├── .editorconfig ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── pyproject.toml ├── requirements-dev.txt ├── requirements.txt ├── src ├── __init__.py ├── base │ ├── 0.0.0 │ │ ├── end-clean-__upgrade__-crons.py │ │ ├── end-moved0.py │ │ ├── end-no-respawn-fields.py │ │ ├── end-user_groups_view.py │ │ ├── post-01-modules-auto-discovery.py │ │ ├── post-02-force-upgrade-installed-modules.py │ │ ├── post-commercial_partner_id.py │ │ ├── pre-00-upgrade-start.py │ │ ├── pre-base_version.py │ │ ├── pre-models-ir_model_relation.py │ │ └── pre-moved0.py │ ├── 17.0.1.3 │ │ └── attr_domains2expr.py │ ├── 8.0.1.3 │ │ ├── pre-00-base_version.py │ │ └── pre-00-upgrade-start.py │ ├── 9.0.1.3 │ │ ├── pre-00-base_version.py │ │ └── pre-00-upgrade-start.py │ └── tests │ │ ├── __init__.py │ │ ├── test_ensure_has_pk.py │ │ ├── test_moved0.py │ │ └── test_util.py ├── mail │ └── 0.0.0 │ │ └── pre-report-migration.py ├── spreadsheet │ └── tests │ │ ├── __init__.py │ │ └── test_spreadsheet_tokenizer.py ├── testing.py └── util │ ├── __init__.py │ ├── _inherit.py │ ├── accounting.py │ ├── const.py │ ├── convert_bootstrap.py │ ├── data.py │ ├── domains.py │ ├── exceptions.py │ ├── fields.py │ ├── helpers.py │ ├── hr_payroll.py │ ├── inconsistencies.py │ ├── indirect_references.py │ ├── inherit.py │ ├── jinja_to_qweb.py │ ├── json.py │ ├── logger.py │ ├── misc.py │ ├── models.py │ ├── modules.py │ ├── orm.py │ ├── pg.py │ ├── records.py │ ├── release-note.xml │ ├── report-migration.xml │ ├── report.py │ ├── snippets.py │ ├── specific.py │ └── spreadsheet │ ├── __init__.py │ ├── misc.py │ └── tokenizer.py └── tools ├── compile23.py ├── fetch-release-notes-video-id.py ├── generate-inherit.py └── graph-upgrade-timing.py /.editorconfig: -------------------------------------------------------------------------------- 1 | # install the editorconfig plugin for your editor: http://editorconfig.org/#download 2 | 3 | root = true 4 | 5 | [*] 6 | charset = utf-8 7 | end_of_line = lf 8 | insert_final_newline = true 9 | indent_style = space 10 | indent_size = 4 11 | trim_trailing_whitespace = true 12 | 13 | [*.py] 14 | # 120 + 10% 15 | # See error B950 https://github.com/PyCQA/flake8-bugbear#opinionated-warnings 16 | max_line_length = 132 17 | 18 | [*.xml] 19 | indent_size = 2 20 | 21 | [*.md] 22 | trim_trailing_whitespace = false 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # dotfiles 2 | .* 3 | !.gitignore 4 | # compiled python files 5 | *.py[co] 6 | # setup.py egg_info 7 | *.egg-info 8 | # emacs backup files 9 | *~ 10 | # hg stuff 11 | *.orig 12 | status 13 | 14 | # artefacts 15 | src/_version.py 16 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: compile23 5 | name: Compile python files using the expected runtime version 6 | entry: ./tools/compile23.py 7 | language: script 8 | require_serial: true 9 | verbose: true 10 | - id: bad-import-000 11 | name: Incompatible import with old versions in tests and `0.0.0` scripts 12 | language: pygrep 13 | entry: '(from|import) odoo.upgrade\b' 14 | files: '^src/\w+/(tests|0\.0\.0)/.*\.py$' 15 | 16 | - repo: https://github.com/astral-sh/ruff-pre-commit 17 | rev: v0.11.3 18 | hooks: 19 | - id: ruff 20 | name: Check code with Ruff, apply automatic fixes 21 | args: [ --exit-non-zero-on-fix ] 22 | - id: ruff-format 23 | name: Format code with Ruff 24 | - repo: https://github.com/crate-ci/typos 25 | rev: v1.31.1 26 | hooks: 27 | - id: typos 28 | - repo: https://github.com/pre-commit/pre-commit-hooks 29 | rev: v5.0.0 30 | hooks: 31 | - id: check-xml 32 | - id: check-yaml 33 | - id: end-of-file-fixer 34 | - id: trailing-whitespace 35 | - id: check-symlinks 36 | - id: debug-statements 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🧰 Upgrade Utils 2 | 3 | This repository contains helper functions[^1] to facilitate the writing of upgrade scripts. 4 | 5 | The functions in this repo are meant to work (sometimes just not fail) from Odoo 7.0 up to latest version. 6 | Thus the only supported version of this repo is `master` head. 7 | 8 | ## Installation 9 | 10 | ### Recommended 11 | 12 | Once you have cloned this repository locally, start `odoo` with the `src` directory prepended to the `--upgrade-path` option. 13 | ```shell-session 14 | $ ./odoo-bin --upgrade-path=/path/to/upgrade-util/src,/path/to/other/upgrade/script/directory [...] 15 | ``` 16 | 17 | ### Alternative 18 | 19 | On platforms where you don't manage Odoo yourself, you can install this package via pip: 20 | ```shell-session 21 | $ python3 -m pip install git+https://github.com/odoo/upgrade-util@master 22 | ``` 23 | 24 | You can freeze the hash version when installing in this fashion. Just replace `master` by the hash of the commit you want to target. 25 | 26 | On [Odoo.sh](https://www.odoo.sh/) it is recommended to add it to the `requirements.txt` of your repository: 27 | ``` 28 | odoo_upgrade @ git+https://github.com/odoo/upgrade-util@master 29 | ``` 30 | 31 | ## How to use the helper functions? 32 | 33 | Once installed, the following packages are available 34 | - `odoo.upgrade.util`: the helper functions. 35 | - `odoo.upgrade.testing`: base `TestCase` classes 36 | 37 | ## Documentation 38 | 39 | - [Basic guide on how to write upgrade scripts](https://www.odoo.com/documentation/master/developer/reference/upgrades/upgrade_scripts.html) 40 | - [The reference documentation](https://www.odoo.com/documentation/master/developer/reference/upgrades/upgrade_utils.html) 41 | 42 | [^1]: We call them "utils". 43 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "odoo_upgrade" 3 | authors = [ 4 | { name = "Odoo Upgrade Team", email = "upgrade@odoo.com" } 5 | ] 6 | dynamic = ["version"] 7 | dependencies = ["markdown"] 8 | 9 | [build-system] 10 | requires = ["hatchling", "hatch-vcs"] 11 | build-backend = "hatchling.build" 12 | 13 | [tool.hatch.build] 14 | only-include = ["src"] 15 | 16 | [tool.hatch.build.sources] 17 | "src" = "odoo/upgrade" 18 | 19 | [tool.hatch.version] 20 | source = "vcs" 21 | raw-options.version_scheme = "calver-by-date" 22 | 23 | [tool.hatch.build.hooks.vcs] 24 | version-file = "src/_version.py" 25 | 26 | [tool.ruff] 27 | required-version = ">=0.10.0" 28 | fix = true 29 | show-fixes = true 30 | output-format = "full" 31 | line-length = 120 32 | target-version = "py37" 33 | 34 | [tool.ruff.lint] 35 | ignore = [ 36 | "B904", # raise-without-from-inside-except; not python2 compatible 37 | "B905", # zip-without-explicit-strict; not python2 compatible 38 | "D1", # undocumented-* 39 | "E501", # line-too-long; handled by auto-formatting 40 | "E731", # lambda-assignment 41 | "PERF203", # try-except-in-loop 42 | "PLR09", # too-many-*; unwanted code complexity checks 43 | "RUF012", # mutable-class-default; we know about the risk 44 | 45 | "PLR2004", # magic-value-comparison; not all comparisons to int or str are magic 46 | "TRY003", # raise-vanilla-args; we can live without it 47 | "RET505", # only true for simple if/elif branches (like in the ruff doc example). if/elif blocks are easier to read in most cases 48 | 49 | "ISC001", # avoid incompatibility with the ruff formatter 50 | # not (yet) supported rules 51 | # "E301", 52 | # "E302", 53 | # "E265", 54 | # "E241", 55 | # "W503", 56 | # "E203", 57 | # "B907", 58 | ] 59 | select = [ 60 | # full rule-sets 61 | "A", # flake8-builtins 62 | "B", # flake8-bugbear 63 | "C4", # flake8-comprehensions 64 | "D", # pydocstyle 65 | "E", # pycodestyle 66 | "ERA", # eradicate 67 | "F", # Pyflakes 68 | "G", # flake8-logging-format 69 | "I", # isort 70 | "ISC", # flake8-implicit-str-concat 71 | "PERF",# perflint 72 | "PIE", # flake8-pie 73 | "PL", # pylint 74 | "RET", # flake8-return 75 | "RUF", # ruff specific rules 76 | "SIM", # flake8-simplify 77 | "TRY", # tryceratops 78 | "T20", # flake8-print 79 | "W", # pycodestyle 80 | 81 | # explicit rules 82 | "COM818", # trailing-comma-on-bare-tuple; other rules handled by autoformatter 83 | "FBT003", # boolean-positional-value-in-call; other rules not python2 compatible 84 | "UP005", # deprecated-unittest-alias 85 | "S704", # unsafe-markup-use; replaces RUF035 86 | 87 | ] 88 | 89 | [tool.ruff.lint.flake8-builtins] 90 | ignorelist = ["format", "id", "type"] 91 | allowed-modules = ["json"] 92 | 93 | [tool.ruff.lint.isort] 94 | section-order = ["future", "standard-library", "third-party", "first-party", "odoo-addons", "local-folder"] 95 | known-first-party = ["odoo", "openerp"] 96 | known-local-folder = ["odoo.upgrade", "odoo.addons.base.maintenance.migrations", "openerp.addons.base.maintenance.migrations"] 97 | 98 | [tool.ruff.lint.isort.sections] 99 | odoo-addons = ["odoo.addons", "openerp.addons"] 100 | 101 | [tool.ruff.lint.pydocstyle] 102 | convention = "pep257" 103 | 104 | [tool.ruff.lint.per-file-ignores] 105 | "*/__init__.py" = [ 106 | "F401", 107 | "F403", 108 | ] 109 | "src/util/*.py" = [ 110 | # python3 only rules 111 | "RUF005", 112 | "RUF007", 113 | ] 114 | # ignore docstring lint for tests files 115 | "src/*/tests/*.py" = ["D"] 116 | # and for upgrade scripts 117 | "src/*/*/{pre,post,end}-*.py" = ["D"] 118 | 119 | 120 | [tool.typos.files] 121 | extend-exclude = [ 122 | # auto-generated file 123 | "src/util/_inherit.py", 124 | 125 | # Use weird words. And it's just a test file, typos can be tolerated. 126 | "src/spreadsheet/tests/test_spreadsheet_tokenizer.py", 127 | ] 128 | 129 | [tool.typos.type.py] 130 | extend-ignore-re = [ 131 | "\\brelease\\.serie\\b", 132 | # ignore `datas` as the whole string 133 | '"datas"', 134 | ] 135 | 136 | [tool.typos.default.extend-identifiers] 137 | inh = "inh" 138 | _inh = "_inh" 139 | ressource_type_id = "ressource_type_id" 140 | # Used as alias in SQL queries. 141 | fpt = "fpt" 142 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pre-commit 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | markdown 2 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/odoo/upgrade-util/86409e52629c0d77c8ff83871fee45078fd9960e/src/__init__.py -------------------------------------------------------------------------------- /src/base/0.0.0/end-clean-__upgrade__-crons.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo.addons.base.maintenance.migrations import util 3 | 4 | 5 | def migrate(cr, version): 6 | server_act_array = ( 7 | "array_agg(c.ir_actions_server_id)" if util.column_exists(cr, "ir_cron", "ir_actions_server_id") else "NULL" 8 | ) 9 | xid_date = ( 10 | ", x.date_init" if util.column_exists(cr, "ir_model_data", "date_init") else "" 11 | ) # field gone in saas-13.4 12 | cr.execute( 13 | """ 14 | SELECT array_agg(c.id), {server_act_array} 15 | FROM ir_cron c 16 | JOIN ir_model_data x ON x.model = 'ir.cron' AND x.res_id = c.id 17 | WHERE x.module = '__upgrade__' 18 | AND now() - COALESCE(c.create_date, x.create_date {xid_date}) > interval '1 month' 19 | """.format( 20 | server_act_array=server_act_array, 21 | xid_date=xid_date, 22 | ) 23 | ) 24 | cron_ids, server_act_ids = cr.fetchone() 25 | util.remove_records(cr, "ir.cron", cron_ids) 26 | util.remove_records(cr, "ir.actions.server", server_act_ids) 27 | -------------------------------------------------------------------------------- /src/base/0.0.0/end-moved0.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo.addons.base.maintenance.migrations import util 3 | 4 | 5 | def migrate(cr, version): 6 | if not util.ENVIRON.get("manual_moved0"): 7 | # let the test verify the invariant. 8 | return 9 | 10 | pre = util.import_script("base/0.0.0/pre-moved0.py") 11 | 12 | cr.execute("SELECT value FROM upgrade_test_data WHERE key = %s", [pre.KEY]) 13 | expected = [tuple(i) for i in cr.fetchone()[0]] if cr.rowcount else [] 14 | moved_fields = set(pre.get_moved0_columns(cr)) - set(expected) 15 | if moved_fields: 16 | raise util.UpgradeError( 17 | "New `moved0` field. It happen when the ORM cannot change a column type by itself.\n%s" 18 | % "\n".join("\t- %s.%s" % m for m in sorted(moved_fields)) 19 | ) 20 | -------------------------------------------------------------------------------- /src/base/0.0.0/end-no-respawn-fields.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | import os 4 | 5 | from psycopg2.extras import execute_values 6 | 7 | from odoo.addons.base.maintenance.migrations import util 8 | 9 | _logger = logging.getLogger("odoo.addons.base.maintenance.migrations.base.000.no_respawn") 10 | 11 | 12 | def migrate(cr, version): 13 | # Ensure that we didn't `remove_field` that shouldnt' 14 | cr.execute( 15 | """ 16 | CREATE TEMPORARY TABLE no_respawn( 17 | model varchar, 18 | field varchar 19 | ) 20 | """ 21 | ) 22 | execute_values( 23 | cr._obj, 24 | "INSERT INTO no_respawn(model, field) VALUES %s", 25 | # fmt:off 26 | [ 27 | (model, field) 28 | for model, fields in util.ENVIRON["__renamed_fields"].items() 29 | for field, new_name in fields.items() 30 | if new_name is None # means removed :p 31 | ], 32 | # fmt:on 33 | ) 34 | cr.execute( 35 | """ 36 | SELECT m.model, f.name, m.transient, f.store 37 | FROM ir_model_fields f 38 | JOIN ir_model m ON m.id = f.model_id 39 | JOIN no_respawn r ON (m.model = r.model AND f.name = r.field) 40 | ORDER BY m.model, f.name 41 | """ 42 | ) 43 | 44 | key = "field_respawn:" 45 | ignored_fields_respawn = { 46 | e[len(key) :] 47 | for e in os.environ.get("suppress_upgrade_warnings", "").split(",") # noqa: SIM112 48 | if e.startswith(key) 49 | } 50 | 51 | for model, field, transient, store in cr.fetchall(): 52 | qualifier = "field" if store else "non-stored field" 53 | if transient: 54 | qualifier = "transient " + qualifier 55 | lvl = util.NEARLYWARN if transient or not store else logging.CRITICAL 56 | action = "" 57 | 58 | if "{}/{}".format(model, field) in ignored_fields_respawn: 59 | lvl = util.NEARLYWARN 60 | action = "; explicitly ignored" 61 | 62 | _logger.log(lvl, "%s %s/%s has respawn%s.", qualifier, model, field, action) 63 | -------------------------------------------------------------------------------- /src/base/0.0.0/end-user_groups_view.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo.addons.base.maintenance.migrations import util 3 | 4 | 5 | def migrate(cr, version): 6 | if util.version_gte("saas~18.2"): 7 | return 8 | util.env(cr)["res.groups"]._update_user_groups_view() 9 | -------------------------------------------------------------------------------- /src/base/0.0.0/post-01-modules-auto-discovery.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo.addons.base.maintenance.migrations import util 3 | from odoo.addons.base.maintenance.migrations.util.modules import _trigger_auto_discovery 4 | 5 | 6 | def migrate(cr, version): 7 | if util.version_gte("saas~14.5"): 8 | _trigger_auto_discovery(cr) 9 | -------------------------------------------------------------------------------- /src/base/0.0.0/post-02-force-upgrade-installed-modules.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo.addons.base.maintenance.migrations import util 3 | 4 | 5 | def migrate(cr, version): 6 | # Short-circuit the state change made by Odoo during the loading process (at STEP 2). 7 | # In Odoo, it's done by calling the method `button_upgrade` on all modules passed 8 | # in the command line option `-u`. This method drills down through all downstream 9 | # dependencies. That's why it works when upgrading the `base` module. 10 | # This technique works well during updates (keep the same major version) where the 11 | # modules' dependencies don't change. 12 | # However, during upgrades (to the next version), it may happen that modules (A) got new 13 | # dependencies (B) that are not installed yet (being a new module or not). 14 | # As `button_update` won't update the state of non installed modules, if the modules (A) 15 | # only dependencies are the new ones (B), their state will remain `installed`. Still, the 16 | # corresponding packages (in the graph) will have the `update` flag, meaning the modules 17 | # will still be upgraded. 18 | # But partially. Due to their initial `installed` state, the `end-` scripts won't be 19 | # applied, leading to an incomplete upgrade. 20 | # This is the case for the `account_asset` module in `saas~12.3`. 21 | # This can be observed at https://upgradeci.odoo.com/upgradeci/run/3665 22 | # NOTE: This behavior has been fixed by https://github.com/odoo/odoo/pull/85516 23 | # but we need to keep this for older versions. 24 | query = "UPDATE ir_module_module SET state = 'to upgrade' WHERE state = 'installed'" 25 | if util.column_exists(cr, "ir_module_module", "imported"): 26 | query += " AND COALESCE(imported, false) = false" 27 | 28 | cr.execute(query) 29 | -------------------------------------------------------------------------------- /src/base/0.0.0/post-commercial_partner_id.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo.addons.base.maintenance.migrations import util 3 | 4 | 5 | def migrate(cr, version): 6 | # The `commercial_partner_id` field is expected to always be set. Although the column is not marked as `NOT NULL`. 7 | # Fight the Murphy's Law, and recompute the value on partners with a NULL value. 8 | cr.execute("SELECT id FROM res_partner WHERE commercial_partner_id IS NULL") 9 | if cr.rowcount: 10 | util.recompute_fields(cr, "res.partner", ["commercial_partner_id"], ids=[id_ for (id_,) in cr.fetchall()]) 11 | -------------------------------------------------------------------------------- /src/base/0.0.0/pre-00-upgrade-start.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | def migrate(cr, version): 5 | cr.execute( 6 | """ 7 | INSERT 8 | INTO ir_config_parameter(key, value) 9 | VALUES ('upgrade.start.time', now() at time zone 'utc') 10 | ON CONFLICT (key) 11 | DO UPDATE 12 | SET value = (now() at time zone 'utc') 13 | WHERE EXCLUDED.value::timestamp - ir_config_parameter.value::timestamp > interval '72 hours' 14 | """ 15 | ) 16 | -------------------------------------------------------------------------------- /src/base/0.0.0/pre-base_version.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | try: 3 | from odoo.addons.base.maintenance.migrations import util 4 | except ImportError: 5 | # for symlinked versions 6 | from openerp.addons.base.maintenance.migrations import util 7 | 8 | 9 | def migrate(cr, version): 10 | util.inherit._get_base_version(cr) 11 | -------------------------------------------------------------------------------- /src/base/0.0.0/pre-models-ir_model_relation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo import api, models 3 | 4 | from odoo.addons.base.maintenance.migrations import util 5 | 6 | try: 7 | from odoo.addons.base.models import ir_model as _ignore 8 | except ImportError: 9 | # version 10 10 | from odoo.addons.base.ir import ir_model as _ignore # noqa 11 | 12 | 13 | def migrate(cr, version): 14 | pass 15 | 16 | 17 | class ModelRelation(models.Model): 18 | _name = "ir.model.relation" 19 | _inherit = ["ir.model.relation"] 20 | _module = "base" 21 | 22 | @api.model 23 | def _register_hook(self): 24 | super(ModelRelation, self)._register_hook() 25 | 26 | query = """ 27 | DELETE FROM ir_model_relation WHERE id IN ( 28 | SELECT r.id 29 | FROM ir_model_relation r 30 | JOIN ir_module_module m ON m.id = r.module 31 | LEFT JOIN information_schema.tables t ON t.table_name = r.name 32 | WHERE m.state = 'installed' 33 | AND t.table_name IS NULL 34 | ) 35 | """ 36 | 37 | self.env.cr.execute(query) 38 | 39 | gone_m2m = util.ENVIRON.get("_gone_m2m") 40 | if gone_m2m: 41 | query = """ 42 | SELECT table_name 43 | FROM information_schema.tables 44 | WHERE table_name IN %s 45 | """ 46 | self.env.cr.execute(query, [tuple(gone_m2m)]) 47 | back_m2m = "\n".join(" - %s via %s" % (tn, gone_m2m[tn]) for (tn,) in self.env.cr.fetchall()) 48 | if back_m2m: 49 | raise util.MigrationError("The following m2m relations have respawn:\n%s" % back_m2m) 50 | -------------------------------------------------------------------------------- /src/base/0.0.0/pre-moved0.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from psycopg2.extras import Json 3 | 4 | from odoo.addons.base.maintenance.migrations import util 5 | 6 | KEY = "base.tests.test_moved0.TestMoved0" 7 | 8 | 9 | def get_moved0_columns(cr): 10 | cr.execute( 11 | """ 12 | SELECT table_name, column_name 13 | FROM information_schema.columns 14 | WHERE column_name ~ '_moved[0-9]+' 15 | ORDER BY table_name, column_name 16 | """ 17 | ) 18 | return cr.fetchall() 19 | 20 | 21 | def migrate(cr, version): 22 | if util.version_gte("16.0"): 23 | # Starting Odoo 16, no more `moved0` columns are created 24 | # See https://github.com/odoo/odoo/commit/50767ef90eadeca2ed05b9400238af8bdbe77fb3 25 | return 26 | 27 | if util.table_exists(cr, "upgrade_test_data"): 28 | cr.execute("SELECT 1 FROM upgrade_test_data WHERE key = %s", [KEY]) 29 | if cr.rowcount: 30 | # Already ran as test. ignore 31 | return 32 | else: 33 | # Test not run or not a version that support upgrade tests (<= 12) 34 | cr.execute( 35 | """ 36 | CREATE TABLE upgrade_test_data ( 37 | key VARCHAR(255) PRIMARY KEY, 38 | value JSONB NOT NULL 39 | ) 40 | """ 41 | ) 42 | 43 | util.ENVIRON["manual_moved0"] = True 44 | 45 | value = get_moved0_columns(cr) 46 | if value: 47 | cr.execute( 48 | "INSERT INTO upgrade_test_data(key, value) VALUES (%s, %s)", 49 | [KEY, Json(value)], 50 | ) 51 | -------------------------------------------------------------------------------- /src/base/8.0.1.3/pre-00-base_version.py: -------------------------------------------------------------------------------- 1 | ../0.0.0/pre-base_version.py -------------------------------------------------------------------------------- /src/base/8.0.1.3/pre-00-upgrade-start.py: -------------------------------------------------------------------------------- 1 | ../0.0.0/pre-00-upgrade-start.py -------------------------------------------------------------------------------- /src/base/9.0.1.3/pre-00-base_version.py: -------------------------------------------------------------------------------- 1 | ../0.0.0/pre-base_version.py -------------------------------------------------------------------------------- /src/base/9.0.1.3/pre-00-upgrade-start.py: -------------------------------------------------------------------------------- 1 | ../0.0.0/pre-00-upgrade-start.py -------------------------------------------------------------------------------- /src/base/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from . import test_ensure_has_pk, test_moved0, test_util 2 | -------------------------------------------------------------------------------- /src/base/tests/test_ensure_has_pk.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from odoo.addons.base.maintenance.migrations import util 4 | from odoo.addons.base.maintenance.migrations.testing import IntegrityCase 5 | 6 | _logger = logging.getLogger("odoo.upgrade.base.tests.test_ensure_has_pk") 7 | 8 | 9 | class TestTablesHavePK(IntegrityCase): 10 | def invariant(self): 11 | if not util.version_gte("14.0"): 12 | # Older versions generated m2m tables without PK 13 | return 14 | 15 | # Verify that all tables have a PK 16 | cr = self.env.cr 17 | query = """ 18 | SELECT c.relname 19 | FROM pg_class c 20 | JOIN pg_namespace ns on ns.oid = c.relnamespace 21 | LEFT JOIN pg_constraint p on p.conrelid = c.oid and p.contype = 'p' 22 | WHERE c.relkind IN ('r', 'p') 23 | AND ns.nspname = current_schema 24 | AND p.oid IS NULL 25 | ORDER BY c.relname 26 | """ 27 | 28 | cr.execute(query) 29 | if cr.rowcount: 30 | tables = "\n".join(" - " + t for (t,) in cr.fetchall()) 31 | msg = "Some tables do not have any primary key:\n{}".format(tables) 32 | _logger.critical(msg) 33 | if util.on_CI(): 34 | raise AssertionError(msg) 35 | -------------------------------------------------------------------------------- /src/base/tests/test_moved0.py: -------------------------------------------------------------------------------- 1 | from odoo.addons.base.maintenance.migrations import util 2 | from odoo.addons.base.maintenance.migrations.testing import IntegrityCase 3 | 4 | impl = util.import_script("base/0.0.0/pre-moved0.py") 5 | 6 | 7 | class TestMoved0(IntegrityCase): 8 | key = impl.KEY 9 | message = "New `moved0` field. It happen when the ORM cannot change a column type by itself." 10 | 11 | def invariant(self): 12 | if util.version_gte("16.0"): 13 | # See https://github.com/odoo/odoo/commit/50767ef90eadeca2ed05b9400238af8bdbe77fb3 14 | self.skipTest("Starting Odoo 16, no more `moved0` columns are created") 15 | return None 16 | 17 | return impl.get_moved0_columns(self.env.cr) 18 | -------------------------------------------------------------------------------- /src/mail/0.0.0/pre-report-migration.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from odoo import models 3 | 4 | try: 5 | from odoo.api import model_cr 6 | except ImportError: 7 | # v13 shim 8 | def model_cr(f): 9 | return f 10 | 11 | 12 | try: 13 | from odoo.addons.mail.models.mail_message import Message # noqa 14 | except ImportError: 15 | from odoo.addons.mail.models.mail_message import MailMessage 16 | 17 | from odoo.addons.base.maintenance.migrations import util 18 | 19 | 20 | def migrate(cr, version): 21 | pass 22 | 23 | 24 | class MailMessage(models.Model): 25 | _inherit = "mail.message" 26 | _module = "mail" 27 | 28 | @model_cr 29 | def _register_hook(self): 30 | util.announce_release_note(self.env.cr) 31 | if len(util.migration_reports): 32 | util.announce_migration_report(self.env.cr) 33 | return super(MailMessage, self)._register_hook() 34 | -------------------------------------------------------------------------------- /src/spreadsheet/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from .test_spreadsheet_tokenizer import SpreadsheetTokenizeTest 2 | -------------------------------------------------------------------------------- /src/spreadsheet/tests/test_spreadsheet_tokenizer.py: -------------------------------------------------------------------------------- 1 | from odoo.addons.base.maintenance.migrations.testing import UnitTestCase 2 | from odoo.addons.base.maintenance.migrations.util.spreadsheet import tokenize 3 | 4 | 5 | class SpreadsheetTokenizeTest(UnitTestCase): 6 | def test_simple_token(self): 7 | self.assertEqual(tokenize("1"), [("NUMBER", "1")]) 8 | 9 | def test_number_with_decimal_token(self): 10 | self.assertEqual( 11 | tokenize("=1.5"), 12 | [("OPERATOR", "="), ("NUMBER", "1.5")], 13 | ) 14 | 15 | def test_formula_token(self): 16 | self.assertEqual( 17 | tokenize("=1"), 18 | [("OPERATOR", "="), ("NUMBER", "1")], 19 | ) 20 | 21 | def test_longer_operators(self): 22 | self.assertEqual( 23 | tokenize("= >= <= < <>"), 24 | [ 25 | ("OPERATOR", "="), 26 | ("SPACE", " "), 27 | ("OPERATOR", ">="), 28 | ("SPACE", " "), 29 | ("OPERATOR", "<="), 30 | ("SPACE", " "), 31 | ("OPERATOR", "<"), 32 | ("SPACE", " "), 33 | ("OPERATOR", "<>"), 34 | ], 35 | ) 36 | 37 | def test_concat_operator(self): 38 | self.assertEqual(tokenize("=&"), [("OPERATOR", "="), ("OPERATOR", "&")]) 39 | 40 | def test_not_equal_operator(self): 41 | self.assertEqual(tokenize("=<>"), [("OPERATOR", "="), ("OPERATOR", "<>")]) 42 | 43 | def test_can_tokenize_various_number_expressions(self): 44 | self.assertEqual( 45 | tokenize("1%"), 46 | [("NUMBER", "1"), ("OPERATOR", "%")], 47 | ) 48 | self.assertEqual(tokenize("1 %"), [("NUMBER", "1"), ("SPACE", " "), ("OPERATOR", "%")]) 49 | self.assertEqual(tokenize("1.1"), [("NUMBER", "1.1")]) 50 | self.assertEqual(tokenize("1e3"), [("NUMBER", "1e3")]) 51 | 52 | def test_debug_formula_token(self): 53 | self.assertEqual( 54 | tokenize("=?1"), 55 | [("OPERATOR", "="), ("DEBUGGER", "?"), ("NUMBER", "1")], 56 | ) 57 | 58 | def test_REF_formula_token(self): 59 | tokens = tokenize("=#REF+1") 60 | self.assertEqual( 61 | tokens, 62 | [("OPERATOR", "="), ("UNKNOWN", "#"), ("SYMBOL", "REF"), ("OPERATOR", "+"), ("NUMBER", "1")], 63 | ) 64 | 65 | def test_string(self): 66 | self.assertEqual(tokenize('"hello"'), [("STRING", '"hello"')]) 67 | self.assertEqual(tokenize("'hello'"), [("SYMBOL", "'hello'")]) 68 | self.assertEqual(tokenize("'hello"), [("UNKNOWN", "'hello")]) 69 | self.assertEqual(tokenize('"he\\"l\\"lo"'), [("STRING", '"he\\"l\\"lo"')]) 70 | self.assertEqual(tokenize("\"hel'l'o\""), [("STRING", "\"hel'l'o\"")]) 71 | self.assertEqual( 72 | tokenize('"hello""test"'), 73 | [ 74 | ("STRING", '"hello"'), 75 | ("STRING", '"test"'), 76 | ], 77 | ) 78 | 79 | def test_function_missing_closing_parenthesis(self): 80 | tokens = tokenize("SUM(") 81 | self.assertEqual(tokens, [("SYMBOL", "SUM"), ("LEFT_PAREN", "(")]) 82 | 83 | def test_function_token_with_point(self): 84 | self.assertEqual(tokenize("CEILING.MATH"), [("SYMBOL", "CEILING.MATH")]) 85 | self.assertEqual(tokenize("ceiling.math"), [("SYMBOL", "ceiling.math")]) 86 | self.assertEqual( 87 | tokenize("CEILING.MATH()"), 88 | [("SYMBOL", "CEILING.MATH"), ("LEFT_PAREN", "("), ("RIGHT_PAREN", ")")], 89 | ) 90 | self.assertEqual( 91 | tokenize("ceiling.math()"), 92 | [("SYMBOL", "ceiling.math"), ("LEFT_PAREN", "("), ("RIGHT_PAREN", ")")], 93 | ) 94 | 95 | def test_boolean(self): 96 | self.assertEqual(tokenize("true"), [("SYMBOL", "true")]) 97 | self.assertEqual(tokenize("false"), [("SYMBOL", "false")]) 98 | self.assertEqual(tokenize("TRUE"), [("SYMBOL", "TRUE")]) 99 | self.assertEqual(tokenize("FALSE"), [("SYMBOL", "FALSE")]) 100 | self.assertEqual(tokenize("TrUe"), [("SYMBOL", "TrUe")]) 101 | self.assertEqual(tokenize("FalSe"), [("SYMBOL", "FalSe")]) 102 | self.assertEqual( 103 | tokenize("=AND(true,false)"), 104 | [ 105 | ("OPERATOR", "="), 106 | ("SYMBOL", "AND"), 107 | ("LEFT_PAREN", "("), 108 | ("SYMBOL", "true"), 109 | ("ARG_SEPARATOR", ","), 110 | ("SYMBOL", "false"), 111 | ("RIGHT_PAREN", ")"), 112 | ], 113 | ) 114 | self.assertEqual( 115 | tokenize("=trueee"), 116 | [("OPERATOR", "="), ("SYMBOL", "trueee")], 117 | ) 118 | 119 | def test_references(self): 120 | self.assertEqual( 121 | tokenize("=A1"), 122 | [("OPERATOR", "="), ("REFERENCE", "A1")], 123 | ) 124 | self.assertEqual( 125 | tokenize("= A1 "), 126 | [ 127 | ("OPERATOR", "="), 128 | ("SPACE", " "), 129 | ("REFERENCE", "A1"), 130 | ("SPACE", " "), 131 | ], 132 | ) 133 | self.assertEqual( 134 | tokenize("=A1:A4"), 135 | [ 136 | ("OPERATOR", "="), 137 | ("REFERENCE", "A1"), 138 | ("OPERATOR", ":"), 139 | ("REFERENCE", "A4"), 140 | ], 141 | ) 142 | 143 | def test_fixed_references(self): 144 | self.assertEqual(tokenize("=$A$1"), [("OPERATOR", "="), ("REFERENCE", "$A$1")]) 145 | self.assertEqual(tokenize("=A$1"), [("OPERATOR", "="), ("REFERENCE", "A$1")]) 146 | self.assertEqual(tokenize("=$A1"), [("OPERATOR", "="), ("REFERENCE", "$A1")]) 147 | self.assertEqual(tokenize("=Sheet1!$A1"), [("OPERATOR", "="), ("REFERENCE", "Sheet1!$A1")]) 148 | self.assertEqual(tokenize("=Sheet1!A$1"), [("OPERATOR", "="), ("REFERENCE", "Sheet1!A$1")]) 149 | self.assertEqual(tokenize("='Sheet1'!$A1"), [("OPERATOR", "="), ("REFERENCE", "'Sheet1'!$A1")]) 150 | self.assertEqual(tokenize("='Sheet1'!A$1"), [("OPERATOR", "="), ("REFERENCE", "'Sheet1'!A$1")]) 151 | 152 | def test_reference_and_sheets(self): 153 | self.assertEqual( 154 | tokenize("=Sheet1!A1"), 155 | [("OPERATOR", "="), ("REFERENCE", "Sheet1!A1")], 156 | ) 157 | self.assertEqual( 158 | tokenize("=Sheet1!A1:A2"), 159 | [("OPERATOR", "="), ("REFERENCE", "Sheet1!A1"), ("OPERATOR", ":"), ("REFERENCE", "A2")], 160 | ) 161 | self.assertEqual( 162 | tokenize("='Sheet1'!A1"), 163 | [("OPERATOR", "="), ("REFERENCE", "'Sheet1'!A1")], 164 | ) 165 | self.assertEqual( 166 | tokenize("='Aryl Nibor Xela Nalim'!A1"), 167 | [("OPERATOR", "="), ("REFERENCE", "'Aryl Nibor Xela Nalim'!A1")], 168 | ) 169 | self.assertEqual( 170 | tokenize("='a '' b'!A1"), 171 | [("OPERATOR", "="), ("REFERENCE", "'a '' b'!A1")], 172 | ) 173 | 174 | def test_wrong_references(self): 175 | self.assertEqual( 176 | tokenize("='Sheet1!A1"), 177 | [("OPERATOR", "="), ("UNKNOWN", "'Sheet1!A1")], 178 | ) 179 | self.assertEqual( 180 | tokenize("=!A1"), 181 | [("OPERATOR", "="), ("SYMBOL", "!A1")], 182 | ) 183 | self.assertEqual( 184 | tokenize("=''!A1"), 185 | [("OPERATOR", "="), ("SYMBOL", "''!A1")], 186 | ) 187 | -------------------------------------------------------------------------------- /src/testing.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import inspect 3 | import logging 4 | import os 5 | import re 6 | from contextlib import contextmanager 7 | 8 | import odoo 9 | from odoo import api, release 10 | from odoo.modules.registry import Registry 11 | from odoo.tests.common import BaseCase, TransactionCase, get_db_name 12 | from odoo.tools import config 13 | from odoo.tools.parse_version import parse_version 14 | 15 | try: 16 | from odoo.tests.common import MetaCase 17 | except ImportError: 18 | MetaCase = None 19 | 20 | try: 21 | from odoo.api import SUPERUSER_ID 22 | except ImportError: 23 | from odoo import SUPERUSER_ID 24 | 25 | try: 26 | from unittest.mock import patch 27 | except ImportError: 28 | from mock import patch 29 | 30 | from . import util 31 | from .util import json 32 | 33 | _logger = logging.getLogger(__name__) 34 | 35 | DATA_TABLE = "upgrade_test_data" 36 | VERSION_RE = re.compile(r"^(saas[-~])?(\d+).(\d+)$") 37 | 38 | 39 | def parametrize(argvalues): 40 | """ 41 | Parametrize a test function. 42 | 43 | Decorator for UnitTestCase test functions to parametrize the decorated test. 44 | 45 | Usage: 46 | ```python 47 | @parametrize([ 48 | (1, 2), 49 | (2, 4), 50 | (-1, -2), 51 | (0, 0), 52 | ]) 53 | def test_double(self, input, expected): 54 | self.assertEqual(input * 2, expected) 55 | ``` 56 | 57 | It works by injecting test functions in the containing class. 58 | Idea taken from the `parameterized` package (https://pypi.org/project/parameterized/). 59 | """ 60 | 61 | def make_func(func, name, args): 62 | @functools.wraps(func) 63 | def wrapped(self): 64 | return func(self, *args) 65 | 66 | wrapped.__name__ = name 67 | return wrapped 68 | 69 | def decorator(func): 70 | frame_locals = inspect.currentframe().f_back.f_locals 71 | 72 | digits = len(str(len(argvalues))) 73 | for i, args in enumerate(argvalues): 74 | new_name = f"{func.__name__}__{i:0>{digits}}" 75 | # inject new function in the parent frame 76 | frame_locals[new_name] = make_func(func, new_name, args) 77 | 78 | return decorator 79 | 80 | 81 | def _create_meta(sequence: int, *tags: str) -> type: 82 | if MetaCase: 83 | 84 | class UpgradeMetaCase(MetaCase): 85 | def __init__(self, name, bases, attrs, **kwargs): 86 | # Setting test_tags in __init_subclass__ could work, but BaseCase will override them in __init__. 87 | # we need to set test_tags after BaseCase __init__ 88 | super().__init__(name, bases, attrs) 89 | self.test_sequence = sequence 90 | self.test_tags = {"post_install", "upgrade"} | set(tags) 91 | self.test_class = name 92 | 93 | if self.__module__.startswith("odoo.upgrade."): 94 | self.test_module = self.__module__.split(".")[2] 95 | elif self.__module__.startswith("odoo.addons.base.maintenance.migrations"): 96 | self.test_module = self.__module__.split(".")[5] 97 | 98 | return UpgradeMetaCase("UpgradeMetaCase", (), {}) 99 | else: 100 | 101 | class UpgradeMetaCase(BaseCase): 102 | def __init_subclass__(cls): 103 | super().__init_subclass__() 104 | 105 | if cls.__module__.startswith("odoo.upgrade."): 106 | cls.test_module = cls.__module__.split(".")[2] 107 | elif cls.__module__.startswith("odoo.addons.base.maintenance.migrations"): 108 | cls.test_module = cls.__module__.split(".")[5] 109 | else: 110 | return 111 | 112 | cls.test_tags = {"post_install", "upgrade"} | set(tags) 113 | cls.test_sequence = sequence 114 | 115 | return UpgradeMetaCase 116 | 117 | 118 | class UnitTestCase(TransactionCase, _create_meta(10, "upgrade_unit")): 119 | @classmethod 120 | def setUpClass(cls): 121 | super().setUpClass() 122 | if "__base_version" not in util.ENVIRON: 123 | bv = os.getenv("ODOO_BASE_VERSION", release.series) 124 | util.ENVIRON["__base_version"] = parse_version(bv) 125 | 126 | @contextmanager 127 | def assertNotUpdated(self, table, ids=None, msg=None): 128 | cr = self.env.cr 129 | cr.execute(util.format_query(cr, "DROP TRIGGER IF EXISTS no_update ON {}", table)) 130 | cr.execute( 131 | """ 132 | DROP TABLE IF EXISTS _upg_test_no_upd_id; 133 | CREATE UNLOGGED TABLE _upg_test_no_upd_id(id int PRIMARY KEY, record json); 134 | CREATE OR REPLACE 135 | FUNCTION fail_assert_not_updated() RETURNS TRIGGER AS $$ 136 | BEGIN 137 | INSERT INTO _upg_test_no_upd_id VALUES (NEW.id, row_to_json(NEW, true)) 138 | ON CONFLICT DO NOTHING; 139 | RETURN NEW; 140 | END 141 | $$ LANGUAGE PLPGSQL 142 | """, 143 | ) 144 | cr.execute( 145 | util.format_query( 146 | cr, 147 | """ 148 | CREATE TRIGGER no_update 149 | BEFORE {when} 150 | ON {table} 151 | FOR EACH ROW {cond} EXECUTE 152 | FUNCTION fail_assert_not_updated() 153 | """, 154 | when=util.SQLStr("UPDATE" if ids is not None else "UPDATE or INSERT"), 155 | table=table, 156 | cond=util.SQLStr("WHEN (new.id = ANY(%s))" if ids else ""), 157 | ), 158 | [list(ids) if ids is not None else None], 159 | ) 160 | self.addCleanup(cr.execute, "DROP TABLE IF EXISTS _upg_test_no_upd_id") 161 | self.addCleanup(cr.execute, util.format_query(cr, "DROP TRIGGER IF EXISTS no_update ON {}", table)) 162 | yield 163 | cr.execute("SELECT record FROM _upg_test_no_upd_id") 164 | updated_records = [r[0] for r in cr.fetchall()] 165 | if updated_records: 166 | raise AssertionError(msg or "Some {} records were updated {}".format(table, updated_records)) 167 | 168 | @contextmanager 169 | def assertUpdated(self, table, ids=None, msg=None): 170 | cr = self.env.cr 171 | cr.execute(util.format_query(cr, "DROP TRIGGER IF EXISTS assert_update ON {}", table)) 172 | cr.execute( 173 | """ 174 | DROP TABLE IF EXISTS _upg_test_upd_id; 175 | CREATE UNLOGGED TABLE _upg_test_upd_id(id int PRIMARY KEY); 176 | CREATE OR REPLACE 177 | FUNCTION save_updated() RETURNS TRIGGER AS $$ 178 | BEGIN 179 | INSERT INTO _upg_test_upd_id VALUES (NEW.id) 180 | ON CONFLICT DO NOTHING; 181 | RETURN NEW; 182 | END 183 | $$ LANGUAGE PLPGSQL 184 | """, 185 | ) 186 | cr.execute( 187 | util.format_query( 188 | cr, 189 | """ 190 | CREATE TRIGGER assert_update 191 | BEFORE {when} 192 | ON {table} 193 | FOR EACH ROW {cond} EXECUTE 194 | FUNCTION save_updated() 195 | """, 196 | when=util.SQLStr("UPDATE" if ids is not None else "UPDATE or INSERT"), 197 | table=table, 198 | cond=util.SQLStr("WHEN (NEW.id = ANY(%s))" if ids else ""), 199 | ), 200 | [list(ids) if ids is not None else None], 201 | ) 202 | self.addCleanup(cr.execute, "DROP TABLE IF EXISTS _upg_test_upd_id") 203 | self.addCleanup(cr.execute, util.format_query(cr, "DROP TRIGGER IF EXISTS assert_update ON {}", table)) 204 | yield 205 | cr.execute("SELECT id FROM _upg_test_upd_id") 206 | updated_ids = [r[0] for r in cr.fetchall()] 207 | if not ids: 208 | self.assertTrue(updated_ids, msg or "No record was updated.") 209 | else: 210 | self.assertEqual(set(updated_ids), set(ids), msg or "Records were not updated.") 211 | 212 | 213 | class UpgradeCommon(BaseCase): 214 | __initialized = False 215 | 216 | change_version = (None, None) 217 | _abstract = True 218 | allow_inherited_tests_method = True 219 | 220 | @property 221 | def key(self): 222 | return "%s.%s" % (".".join(self.__class__.__module__.split(".")[-3:]), self.__class__.__name__) 223 | 224 | def _set_value(self, key, value): 225 | self._init_db() 226 | value = json.dumps(value, sort_keys=True) 227 | query = """ 228 | INSERT INTO {} (key, value) VALUES (%s, %s) 229 | ON CONFLICT (key) DO UPDATE SET value=EXCLUDED.value 230 | """.format(DATA_TABLE) 231 | self._data_table_cr.execute(query, (key, value)) 232 | self._data_table_cr._cnx.commit() 233 | 234 | def _get_value(self, key): 235 | self._init_db() 236 | query = "SELECT value FROM {} WHERE key = %s".format(DATA_TABLE) 237 | self._data_table_cr.execute(query, [key]) 238 | result = self._data_table_cr.fetchone() 239 | if not result: 240 | raise KeyError(key) 241 | return result[0] 242 | 243 | def _key_exists(self, key): 244 | self._init_db() 245 | query = "SELECT 1 FROM {} WHERE key = %s".format(DATA_TABLE) 246 | self._data_table_cr.execute(query, [key]) 247 | return bool(self._data_table_cr.rowcount) 248 | 249 | def _init_db(self): 250 | if not UpgradeCommon.__initialized: 251 | self._data_table_cr.execute("SELECT 1 FROM pg_class WHERE relname=%s", [DATA_TABLE]) 252 | if not self._data_table_cr.rowcount: 253 | _logger.info("Creating table %s", DATA_TABLE) 254 | query = """ CREATE TABLE {} ( 255 | key VARCHAR(255) PRIMARY KEY, 256 | value JSONB NOT NULL 257 | )""".format(DATA_TABLE) 258 | self._data_table_cr.execute(query) 259 | self._data_table_cr._cnx.commit() 260 | UpgradeCommon.__initialized = True 261 | 262 | def _setup_registry(self): 263 | self.registry = Registry(get_db_name()) 264 | self._data_table_cr = ( 265 | self.registry.cursor() 266 | ) # use to commit in upgrade_test_data, dont use it for anything else 267 | self.addCleanup(self._data_table_cr.close) 268 | 269 | def setUp(self): 270 | super().setUp() 271 | self._setup_registry() 272 | self.cr = self.registry.cursor() 273 | self.env = api.Environment(self.cr, SUPERUSER_ID, {}) 274 | self.addCleanup(self.env.clear) 275 | self.addCleanup(self.cr.close) 276 | 277 | # could be reworked that to either call prepare or check in a unique test_method 278 | # -> but in this case impossible to filter on prepare or check with test_tags 279 | def test_prepare(self): 280 | if self._abstract: 281 | self.skipTest("abstract test class") 282 | return 283 | (version, sub_version) = self.change_version 284 | if version is not None: 285 | current_version = parse_version(release.series) 286 | if current_version >= parse_version("%s.%s" % self.change_version): 287 | self.skipTest("out of bounds version (>)") 288 | return 289 | if current_version < parse_version("%s.%s" % get_previous_major(version, sub_version)): 290 | self.skipTest("out of bounds version (<)") 291 | return 292 | 293 | key = self.key 294 | if self._key_exists(key): 295 | _logger.warning("key %s already exists, skipping", key) 296 | # do we want to warn and skip, or update key? 297 | # for upgrade case, avoid to prepare twice in all cases. For integrity, maybe update value 298 | self.skipTest("duplicated key") 299 | return 300 | 301 | _logger.info("Calling %s.prepare", self.__class__.__name__) 302 | value = self.prepare() 303 | self._set_value(self.key, value) # prepare has been called, even if value is null 304 | 305 | def test_check(self): 306 | if self._abstract: 307 | self.skipTest("abstract test class") 308 | return 309 | (version, sub_version) = self.change_version 310 | if version is not None: 311 | current_version = parse_version(release.series) 312 | if current_version < parse_version("%s.%s" % self.change_version): 313 | self.skipTest("out of bounds version (<)") 314 | return 315 | if current_version > parse_version("%s.%s" % get_next_major(version, sub_version)): 316 | self.skipTest("out of bounds version (>)") 317 | return 318 | 319 | key = self.key 320 | try: 321 | value = self._get_value(key) 322 | except KeyError: 323 | _logger.info("No value found for %s, skipping check", key) 324 | # we don't want to check is corresponding prepare was not executed 325 | # Example: change_version 13.1, testing from 13.2 to 13.3 326 | else: 327 | _logger.info("Calling %s.check", self.__class__.__name__) 328 | self.check(value) 329 | 330 | def convert_check(self, value): 331 | return json.loads(json.dumps(value, sort_keys=True)) 332 | 333 | 334 | def change_version(version_str): 335 | def version_decorator(obj): 336 | match = VERSION_RE.match(version_str) 337 | if not match: 338 | raise ValueError("change_version decorator must be in format [saas(-|~).]") 339 | (_, version, sub_version) = match.groups() 340 | obj.change_version = (int(version), int(sub_version)) 341 | return obj 342 | 343 | return version_decorator 344 | 345 | 346 | # helpers to get the version on which a test is expected to run depending on the value specified with the `change_version` decorator 347 | FAKE_MAJORS = [(12, 3)] # non dot-zero versions which will run tests 348 | 349 | 350 | def get_next_major(major, minor): 351 | for fake in FAKE_MAJORS: 352 | if major == fake[0] and minor < fake[1]: 353 | return fake 354 | if minor != 0: 355 | major += 1 356 | return major, 0 357 | 358 | 359 | def get_previous_major(major, minor): 360 | if minor == 0: 361 | major -= 1 362 | 363 | for fake in FAKE_MAJORS: 364 | if major == fake[0] and (minor == 0 or minor > fake[1]): 365 | return fake 366 | 367 | return major, 0 368 | 369 | 370 | # pylint: disable=inherit-non-class 371 | class UpgradeCase(UpgradeCommon, _create_meta(10, "upgrade_case")): 372 | """ 373 | Test case to modify data in origin version, and assert in target version. 374 | 375 | User must define a "prepare" and a "check" method. 376 | - prepare method can write in database, return value will be stored in a dedicated table and 377 | passed as argument to check. 378 | - check method can assert that the received argument is the one expected, 379 | executing any code to retrieve equivalent information in migrated database. 380 | Note: check argument is a loaded json dump, meaning that tuple are converted to list. 381 | convert_check can be used to normalise the right part of the comparison. 382 | 383 | check method is only called if corresponding prepared was run in previous version 384 | 385 | prepare and check implementation may contains version conditional code to match api changes. 386 | 387 | using @change_version class decorator can indicate with script version is tested here if any: 388 | Example: to test a saas~12.3 script, using @change_version('saas-12,3') will only run prepare if 389 | version in [12.0, 12.3[ and run check if version is in [12.3, 13] 390 | 391 | """ 392 | 393 | def __init_subclass__(cls, abstract=False): 394 | cls._abstract = abstract 395 | if not abstract and (not hasattr(cls, "prepare") or not hasattr(cls, "check")): 396 | _logger.error("%s (UpgradeCase) must define prepare and check methods", cls.__name__) 397 | 398 | def test_prepare(self): 399 | super(UpgradeCase, self).test_prepare() 400 | self.cr.commit() 401 | 402 | 403 | # pylint: disable=inherit-non-class 404 | class IntegrityCase(UpgradeCommon, _create_meta(20, "integrity_case")): 405 | """ 406 | Test case to check invariant through any version. 407 | 408 | User must define a "invariant" method. 409 | invariant return value will be compared between the two version. 410 | 411 | invariant implementation may contains version conditional code to match api changes. 412 | """ 413 | 414 | message = "Invariant check fail" 415 | 416 | def __init_subclass__(cls, abstract=False): 417 | cls._abstract = abstract 418 | if not abstract and not hasattr(cls, "invariant"): 419 | _logger.error("%s (IntegrityCase) must define an invariant method", cls.__name__) 420 | 421 | # IntegrityCase should not alterate database: 422 | # TODO give a test cursor, don't commit after prepare, use a protected cursor to set_value 423 | 424 | def prepare(self): 425 | return self.invariant() 426 | 427 | def check(self, value): 428 | self.assertEqual(value, self.convert_check(self.invariant()), self.message) 429 | 430 | def _setup_registry(self): 431 | super(IntegrityCase, self)._setup_registry() 432 | cr = self.registry.cursor() 433 | self.addCleanup(cr.close) 434 | if hasattr(self, "registry_enter_test_mode"): 435 | self.registry_enter_test_mode(cr=cr) 436 | else: 437 | self.registry.enter_test_mode(cr) 438 | self.addCleanup(self.registry.leave_test_mode) 439 | 440 | def setUp(self): 441 | super(IntegrityCase, self).setUp() 442 | 443 | def commit(self): 444 | if self.dbname == config["log_db"].split("/")[-1]: 445 | self._cnx.commit() 446 | else: 447 | raise RuntimeError("Commit is forbidden in integrity cases") 448 | 449 | patcher = patch.object(odoo.sql_db.Cursor, "commit", commit) 450 | patcher.start() 451 | self.addCleanup(patcher.stop) 452 | 453 | def skip_if_demo(self): 454 | self.env.cr.execute("SELECT 1 FROM ir_module_module WHERE name='base' AND demo") 455 | if self.env.cr.rowcount: 456 | self.skipTest("This invariant is not guaranteed with demo data.") 457 | -------------------------------------------------------------------------------- /src/util/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .const import * 3 | from .data import * 4 | from .domains import * 5 | from .exceptions import * 6 | from .fields import * 7 | from .helpers import * 8 | from .indirect_references import * 9 | from .inherit import * 10 | from .logger import _logger 11 | from .misc import * 12 | from .models import * 13 | from .modules import * 14 | from .orm import * 15 | from .pg import * 16 | from .records import * 17 | from .report import * 18 | from .specific import * 19 | -------------------------------------------------------------------------------- /src/util/accounting.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | from contextlib import contextmanager 4 | 5 | from .fields import remove_field 6 | from .helpers import table_of_model 7 | from .modules import module_installed 8 | from .orm import env, invalidate 9 | from .pg import create_column, explode_query_range, get_columns, parallel_execute 10 | 11 | _logger = logging.getLogger(__name__) 12 | 13 | 14 | @contextmanager 15 | def no_deprecated_accounts(cr): 16 | cr.execute( 17 | """ 18 | UPDATE account_account 19 | SET deprecated = false 20 | WHERE deprecated = true 21 | RETURNING id 22 | """ 23 | ) 24 | ids = tuple(r for (r,) in cr.fetchall()) 25 | yield 26 | if ids: 27 | cr.execute( 28 | """ 29 | UPDATE account_account 30 | SET deprecated = true 31 | WHERE id IN %s 32 | """, 33 | [ids], 34 | ) 35 | 36 | 37 | @contextmanager 38 | def no_fiscal_lock(cr): 39 | invalidate(env(cr)["res.company"]) 40 | columns = [col for col in get_columns(cr, "res_company") if col.endswith("_lock_date")] 41 | assert columns 42 | set_val = ", ".join("{} = NULL".format(col) for col in columns) 43 | returns = ", ".join("old.{}".format(col) for col in columns) 44 | cr.execute( 45 | """ 46 | UPDATE res_company c 47 | SET {} 48 | FROM res_company old 49 | WHERE old.id = c.id 50 | RETURNING {}, old.id 51 | """.format(set_val, returns) 52 | ) 53 | data = cr.fetchall() 54 | yield 55 | set_val = ", ".join("{} = %s".format(col) for col in columns) 56 | cr.executemany( 57 | """ 58 | UPDATE res_company 59 | SET {} 60 | WHERE id = %s 61 | """.format(set_val), 62 | data, 63 | ) 64 | 65 | 66 | @contextmanager 67 | def skip_failing_python_taxes(env, skipped=None): 68 | if module_installed(env.cr, "account_tax_python"): 69 | origin_compute_amount = env.registry["account.tax"]._compute_amount 70 | 71 | def _compute_amount(self, *args, **kwargs): 72 | if self.amount_type != "code": 73 | return origin_compute_amount(self, *args, **kwargs) 74 | try: 75 | return origin_compute_amount(self, *args, **kwargs) 76 | except ValueError as e: 77 | if skipped is not None: 78 | skipped[self.id] = (self.name, e.args[0]) 79 | return 0 80 | 81 | env.registry["account.tax"]._compute_amount = _compute_amount 82 | yield 83 | env.registry["account.tax"]._compute_amount = origin_compute_amount 84 | else: 85 | yield 86 | 87 | 88 | def upgrade_analytic_distribution(cr, model, tag_table=None, account_field=None, tag_field=None): 89 | table = table_of_model(cr, model) 90 | tag_table = tag_table or "account_analytic_tag_{table}_rel".format(table=table) 91 | account_field = account_field or "analytic_account_id" 92 | tag_field = tag_field or "analytic_tag_ids" 93 | query = """ 94 | WITH _items AS ( 95 | SELECT id, {account_field} FROM {table} item WHERE {{parallel_filter}} 96 | ), 97 | table_union AS ( 98 | SELECT item.id AS line_id, 99 | distribution.account_id AS account_id, 100 | distribution.percentage AS percentage 101 | FROM _items item 102 | JOIN {tag_table} analytic_rel ON analytic_rel.{table}_id = item.id 103 | JOIN account_analytic_distribution distribution ON analytic_rel.account_analytic_tag_id = distribution.tag_id 104 | UNION ALL 105 | SELECT item.id AS line_id, 106 | item.{account_field} AS account_id, 107 | 100 AS percentage 108 | FROM _items item 109 | WHERE item.{account_field} IS NOT NULL 110 | ), 111 | summed_union AS ( 112 | SELECT line_id, 113 | account_id, 114 | SUM(percentage) AS percentage 115 | FROM table_union 116 | GROUP BY line_id, account_id 117 | ), 118 | distribution AS ( 119 | SELECT line_id, 120 | json_object_agg(account_id, percentage) AS distribution 121 | FROM summed_union 122 | GROUP BY line_id 123 | ) 124 | UPDATE {table} item 125 | SET analytic_distribution = distribution.distribution 126 | FROM distribution 127 | WHERE item.id = distribution.line_id 128 | """.format( 129 | account_field=account_field, 130 | table=table, 131 | tag_table=tag_table, 132 | ) 133 | 134 | create_column(cr, table, "analytic_distribution", "jsonb") 135 | parallel_execute(cr, explode_query_range(cr, query, table=table, alias="item")) 136 | remove_field(cr, model, account_field) 137 | remove_field(cr, model, tag_field) 138 | -------------------------------------------------------------------------------- /src/util/const.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import collections 3 | import os 4 | 5 | try: 6 | from odoo import netsvc 7 | except ImportError: 8 | from openerp import netsvc 9 | 10 | 11 | # migration environ, used to share data between scripts 12 | ENVIRON = { 13 | "__renamed_fields": collections.defaultdict(dict), 14 | "__modules_auto_discovery_force_installs": set(), 15 | "__modules_auto_discovery_force_upgrades": {}, 16 | "__fix_fk_allowed_cascade": [], 17 | "__no_model_data_delete": {}, 18 | } 19 | 20 | NEARLYWARN = 25 # between info and warning; appear on runbot build page 21 | netsvc.LEVEL_COLOR_MAPPING[NEARLYWARN] = (netsvc.YELLOW, netsvc.DEFAULT) 22 | 23 | BIG_TABLE_THRESHOLD = int(os.getenv("ODOO_UPG_BIG_TABLE_THRESHOLD", "40000")) 24 | -------------------------------------------------------------------------------- /src/util/data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | 4 | from .helpers import table_of_model 5 | from .pg import SQLStr, format_query, table_exists 6 | from .records import ref, remove_records, replace_record_references_batch 7 | 8 | _logger = logging.getLogger(__name__.rpartition(".")[0]) 9 | 10 | # python3 shims 11 | try: 12 | basestring # noqa: B018 13 | except NameError: 14 | basestring = str 15 | 16 | 17 | def uniq_tags(cr, model, uniq_column="name", order="id"): 18 | """ 19 | Deduplicate "tag" models entries. 20 | 21 | In standard, should only be referenced as many2many 22 | But with a customization, could be referenced as many2one 23 | 24 | By using `uniq_column=lower(name)` and `order=name` 25 | you can prioritize tags in CamelCase/UPPERCASE. 26 | """ 27 | table = table_of_model(cr, model) 28 | 29 | query = format_query( 30 | cr, 31 | """ 32 | SELECT unnest((array_agg(id ORDER BY {order}))[2:]), 33 | (array_agg(id ORDER BY {order}))[1] 34 | FROM {table} 35 | GROUP BY {uniq_column} 36 | HAVING count(id) > 1 37 | """, 38 | table=table, 39 | order=SQLStr(order), 40 | uniq_column=SQLStr(uniq_column), 41 | ) 42 | cr.execute(query) 43 | if not cr.rowcount: 44 | return 45 | mapping = dict(cr.fetchall()) 46 | replace_record_references_batch(cr, mapping, model) 47 | remove_records(cr, model, mapping.keys()) 48 | 49 | 50 | def split_group(cr, from_groups, to_group): 51 | """Users have all `from_groups` will be added into `to_group`.""" 52 | 53 | def check_group(g): 54 | if isinstance(g, basestring): 55 | gid = ref(cr, g) 56 | if not gid: 57 | _logger.warning("split_group(): Unknown group: %r", g) 58 | return gid 59 | return g 60 | 61 | if not isinstance(from_groups, (list, tuple, set)): 62 | from_groups = [from_groups] 63 | 64 | from_groups = [g for g in map(check_group, from_groups) if g] 65 | if not from_groups: 66 | return 67 | 68 | if isinstance(to_group, basestring): 69 | to_group = ref(cr, to_group) 70 | 71 | assert to_group 72 | 73 | cr.execute( 74 | """ 75 | INSERT INTO res_groups_users_rel(uid, gid) 76 | SELECT uid, %s 77 | FROM res_groups_users_rel 78 | GROUP BY uid 79 | HAVING array_agg(gid) @> %s 80 | EXCEPT 81 | SELECT uid, gid 82 | FROM res_groups_users_rel 83 | WHERE gid = %s 84 | """, 85 | [to_group, from_groups, to_group], 86 | ) 87 | 88 | 89 | def drop_workflow(cr, osv): 90 | if not table_exists(cr, "wkf"): 91 | # workflows have been removed in 10.saas~14 92 | # noop if there is no workflow tables anymore... 93 | return 94 | 95 | cr.execute( 96 | """ 97 | -- we want to first drop the foreign keys on the workitems because 98 | -- it slows down the process a lot 99 | ALTER TABLE wkf_triggers DROP CONSTRAINT wkf_triggers_workitem_id_fkey; 100 | ALTER TABLE wkf_workitem DROP CONSTRAINT wkf_workitem_act_id_fkey; 101 | ALTER TABLE wkf_workitem DROP CONSTRAINT wkf_workitem_inst_id_fkey; 102 | ALTER TABLE wkf_triggers DROP CONSTRAINT wkf_triggers_instance_id_fkey; 103 | 104 | -- if this workflow is used as a subflow, complete workitem running this subflow 105 | UPDATE wkf_workitem wi 106 | SET state = 'complete' 107 | FROM wkf_instance i JOIN wkf w ON (w.id = i.wkf_id) 108 | WHERE wi.subflow_id = i.id 109 | AND w.osv = %(osv)s 110 | AND wi.state = 'running' 111 | ; 112 | 113 | -- delete the workflow and dependencies 114 | WITH deleted_wkf AS ( 115 | DELETE FROM wkf WHERE osv = %(osv)s RETURNING id 116 | ), 117 | deleted_wkf_instance AS ( 118 | DELETE FROM wkf_instance i 119 | USING deleted_wkf w 120 | WHERE i.wkf_id = w.id 121 | RETURNING i.id 122 | ), 123 | _delete_triggers AS ( 124 | DELETE FROM wkf_triggers t 125 | USING deleted_wkf_instance i 126 | WHERE t.instance_id = i.id 127 | ), 128 | deleted_wkf_activity AS ( 129 | DELETE FROM wkf_activity a 130 | USING deleted_wkf w 131 | WHERE a.wkf_id = w.id 132 | RETURNING a.id 133 | ) 134 | DELETE FROM wkf_workitem wi 135 | USING deleted_wkf_instance i 136 | WHERE wi.inst_id = i.id 137 | ; 138 | 139 | -- recreate constraints 140 | ALTER TABLE wkf_triggers ADD CONSTRAINT wkf_triggers_workitem_id_fkey 141 | FOREIGN KEY (workitem_id) REFERENCES wkf_workitem(id) 142 | ON DELETE CASCADE; 143 | ALTER TABLE wkf_workitem ADD CONSTRAINT wkf_workitem_act_id_fkey 144 | FOREIGN key (act_id) REFERENCES wkf_activity(id) 145 | ON DELETE CASCADE; 146 | ALTER TABLE wkf_workitem ADD CONSTRAINT wkf_workitem_inst_id_fkey 147 | FOREIGN KEY (inst_id) REFERENCES wkf_instance(id) 148 | ON DELETE CASCADE; 149 | ALTER TABLE wkf_triggers ADD CONSTRAINT wkf_triggers_instance_id_fkey 150 | FOREIGN KEY (instance_id) REFERENCES wkf_instance(id) 151 | ON DELETE CASCADE; 152 | """, 153 | {"osv": osv}, 154 | ) 155 | -------------------------------------------------------------------------------- /src/util/exceptions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | class UpgradeError(Exception): 5 | pass 6 | 7 | 8 | class SleepyDeveloperError(ValueError): 9 | pass 10 | 11 | 12 | # Compat 13 | MigrationError = UpgradeError 14 | -------------------------------------------------------------------------------- /src/util/helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | import os 4 | import sys 5 | from collections import namedtuple 6 | 7 | import lxml 8 | 9 | from .exceptions import SleepyDeveloperError 10 | from .misc import splitlines, version_between, version_gte 11 | 12 | _logger = logging.getLogger(__name__.rpartition(".")[0]) 13 | 14 | _VALID_MODELS = frozenset( 15 | { 16 | "_unknown", 17 | "website_pricelist", 18 | "ir_actions_account_report_download", 19 | # see test_testing_utilities/models.py 20 | "o2m_readonly_subfield_parent", 21 | "o2m_readonly_subfield_child", 22 | "o2m_changes_parent", 23 | "o2m_changes_children", 24 | } 25 | | ({"l10n_pl_tax_office"} if version_between("16.0", "17.0") else set()) 26 | | {m.strip() for m in os.getenv("UPG_VALID_MODELS", "").split(";")} - {""} 27 | ) 28 | 29 | # python3 shims 30 | try: 31 | unicode # noqa: B018 32 | except NameError: 33 | unicode = str 34 | 35 | 36 | def table_of_model(cr, model): 37 | exceptions = dict( 38 | line.split() 39 | for line in splitlines( 40 | """ 41 | ir.actions.actions ir_actions 42 | ir.actions.act_url ir_act_url 43 | ir.actions.act_window ir_act_window 44 | ir.actions.act_window_close ir_actions 45 | ir.actions.act_window.view ir_act_window_view 46 | ir.actions.client ir_act_client 47 | ir.actions.report.xml ir_act_report_xml 48 | ir.actions.report ir_act_report_xml 49 | ir.actions.server ir_act_server 50 | ir.actions.wizard ir_act_wizard 51 | 52 | stock.picking.in stock_picking 53 | stock.picking.out stock_picking 54 | 55 | workflow wkf 56 | workflow.activity wkf_activity 57 | workflow.instance wkf_instance 58 | workflow.transition wkf_transition 59 | workflow.triggers wkf_triggers 60 | workflow.workitem wkf_workitem 61 | 62 | # mass_mailing 63 | mail.mass_mailing.list_contact_rel mail_mass_mailing_contact_list_rel 64 | mailing.contact.subscription mailing_contact_list_rel 65 | 66 | # `mail.notification` was a "normal" model in versions <9.0 67 | # and a named m2m in >=saas~13 68 | # and renamed as a "normal" model table in >= saas~14.3 69 | {gte_saas13_lte_saas14_3} mail.notification mail_message_res_partner_needaction_rel 70 | 71 | project.task.stage.personal project_task_user_rel 72 | """.format(gte_saas13_lte_saas14_3="" if version_gte("9.saas~13") and not version_gte("saas~14.3") else "#") 73 | ) 74 | ) 75 | return exceptions.get(model, model.replace(".", "_")) 76 | 77 | 78 | def model_of_table(cr, table): 79 | exceptions = dict( 80 | line.split() 81 | for line in splitlines( 82 | """ 83 | # can also be act_window_close, but there are chances it wont be useful for anyone... 84 | ir_actions ir.actions.actions 85 | ir_act_url ir.actions.act_url 86 | ir_act_window ir.actions.act_window 87 | ir_act_window_view ir.actions.act_window.view 88 | ir_act_client ir.actions.client 89 | ir_act_report_xml {action_report_model} 90 | ir_act_server ir.actions.server 91 | ir_act_wizard ir.actions.wizard 92 | 93 | wkf workflow 94 | wkf_activity workflow.activity 95 | wkf_instance workflow.instance 96 | wkf_transition workflow.transition 97 | wkf_triggers workflow.triggers 98 | wkf_workitem workflow.workitem 99 | 100 | _unknown _unknown 101 | 102 | mail_mass_mailing_contact_list_rel mail.mass_mailing.list_contact_rel 103 | mailing_contact_list_rel mailing.contact.subscription 104 | # Not a real model until saas~13 105 | {gte_saas13_lte_saas14_3} mail_message_res_partner_needaction_rel mail.notification 106 | 107 | project_task_user_rel project.task.stage.personal 108 | """.format( 109 | action_report_model="ir.actions.report" if version_gte("10.saas~17") else "ir.actions.report.xml", 110 | gte_saas13_lte_saas14_3="" if version_gte("9.saas~13") and not version_gte("saas~14.3") else "#", 111 | ) 112 | ) 113 | ) 114 | try: 115 | return exceptions[table] 116 | except KeyError: 117 | cr.execute( 118 | """ 119 | SELECT model 120 | FROM ir_model 121 | WHERE replace(model, '.', '_') = %s 122 | """, 123 | [table], 124 | ) 125 | candidates = [m for (m,) in cr.fetchall()] 126 | if candidates: 127 | if len(candidates) > 1: 128 | _logger.critical("cannot determine model of table %r. Multiple candidates: %r", table, candidates) 129 | return candidates[0] 130 | 131 | fallback = table.replace("_", ".") 132 | _logger.critical( 133 | "cannot determine model of table %r. No candidates found in the `ir_model` table. Fallback to %r", 134 | table, 135 | fallback, 136 | ) 137 | return fallback 138 | 139 | 140 | def _validate_model(model): 141 | if "_" in model and "." not in model and not model.startswith("x_") and model not in _VALID_MODELS: 142 | raise SleepyDeveloperError("`{}` seems to be a table name instead of model name".format(model)) 143 | return model 144 | 145 | 146 | def _validate_table(table): 147 | if "." in table: 148 | raise SleepyDeveloperError("`{}` seems to be a model name instead of table name".format(table)) 149 | return table 150 | 151 | 152 | def _ir_values_value(cr, prefix=None): 153 | # returns the casting from bytea to text needed in saas~17 for column `value` of `ir_values` 154 | # returns tuple(column_read, cast_write) 155 | cache = getattr(_ir_values_value, "cache", None) 156 | 157 | if cache is None: 158 | from .pg import column_type 159 | 160 | if column_type(cr, "ir_values", "value") == "bytea": 161 | cr.execute("SELECT character_set_name FROM information_schema.character_sets") 162 | (charset,) = cr.fetchone() 163 | column_read = "convert_from(%%svalue, '%s')" % charset 164 | cast_write = "convert_to(%%s, '%s')" % charset 165 | else: 166 | column_read = "%svalue" 167 | cast_write = "%s" 168 | _ir_values_value.cache = (column_read, cast_write) 169 | else: 170 | column_read, cast_write = cache 171 | 172 | prefix = prefix + "." if prefix else "" 173 | return column_read % prefix, cast_write 174 | 175 | 176 | def _dashboard_actions(cr, arch_match, *models): 177 | """Yield (dashboard_id, action) of dashboards that match `arch_match` and apply on `models` (if specified).""" 178 | q = """ 179 | SELECT id, arch 180 | FROM ir_ui_view_custom 181 | WHERE arch ~ %s 182 | """ 183 | cr.execute(q, [arch_match]) 184 | for dash_id, arch in cr.fetchall(): 185 | try: 186 | if isinstance(arch, unicode): 187 | arch = arch.encode("utf-8") # noqa: PLW2901 188 | dash = lxml.etree.fromstring(arch) 189 | except lxml.etree.XMLSyntaxError: 190 | _logger.exception("Cannot parse dashboard %s", dash_id) 191 | continue 192 | for act in dash.xpath("//action"): 193 | if models: 194 | try: 195 | act_id = int(act.get("name", "FAIL")) 196 | except ValueError: 197 | continue 198 | 199 | cr.execute("SELECT res_model FROM ir_act_window WHERE id = %s", [act_id]) 200 | [act_model] = cr.fetchone() or [None] 201 | if act_model not in models: 202 | continue 203 | yield dash_id, act 204 | 205 | cr.execute( 206 | "UPDATE ir_ui_view_custom SET arch = %s WHERE id = %s", 207 | [lxml.etree.tostring(dash, encoding="unicode"), dash_id], 208 | ) 209 | 210 | 211 | def _get_theme_models(): 212 | return { 213 | "theme.ir.ui.view": "ir.ui.view", 214 | "theme.ir.asset": "ir.asset", 215 | "theme.website.page": "website.page", 216 | "theme.website.menu": "website.menu", 217 | "theme.ir.attachment": "ir.attachment", 218 | } 219 | 220 | 221 | FieldsPathPart = namedtuple("FieldsPathPart", "field_model field_name relation_model") 222 | if sys.version_info[0] >= 3: 223 | FieldsPathPart.__doc__ = """ 224 | Encapsulate information about a field within a fields path. 225 | 226 | :param str field_model: model of the field 227 | :param str field_name: name of the field 228 | :param str relation_model: target model of the field, if relational, otherwise ``None`` 229 | """ 230 | for _f in FieldsPathPart._fields: 231 | getattr(FieldsPathPart, _f).__doc__ = None 232 | 233 | 234 | def resolve_model_fields_path(cr, model, path): 235 | """ 236 | Resolve model fields paths. 237 | 238 | This function returns a list of :class:`~odoo.upgrade.util.helpers.FieldsPathPart` 239 | where each item describes a field in ``path`` (in the same order). The returned list 240 | could be shorter than the original ``path`` due to a missing field or model, or 241 | because there is a non-relational field in the path. The only non-relational field 242 | allowed in a fields path is the last one, in which case the returned list has the same 243 | length as the input ``path``. 244 | 245 | .. example:: 246 | 247 | .. code-block:: python 248 | 249 | >>> util.resolve_model_fields_path(cr, "res.partner", "user_ids.partner_id.title".split(".")) 250 | [FieldsPathPart(field_model='res.partner', field_name='user_ids', relation_model='res.users'), 251 | FieldsPathPart(field_model='res.users', field_name='partner_id', relation_model='res.partner'), 252 | FieldsPathPart(field_model='res.partner', field_name='title', relation_model='res.partner.title')] 253 | 254 | Last field is not relational: 255 | 256 | .. code-block:: python 257 | 258 | >>> resolve_model_fields_path(cr, "res.partner", "user_ids.active".split(".")) 259 | [FieldsPathPart(field_model='res.partner', field_name='user_ids', relation_model='res.users'), 260 | FieldsPathPart(field_model='res.users', field_name='active', relation_model=None)] 261 | 262 | The path is wrong, it uses a non-relational field: 263 | 264 | .. code-block:: python 265 | 266 | >>> resolve_model_fields_path(cr, "res.partner", "user_ids.active.name".split(".")) 267 | [FieldsPathPart(field_model='res.partner', field_name='user_ids', relation_model='res.users'), 268 | FieldsPathPart(field_model='res.users', field_name='active', relation_model=None)] 269 | 270 | The path is broken, it uses a non-existing field: 271 | 272 | .. code-block:: python 273 | 274 | >>> resolve_model_fields_path(cr, "res.partner", "user_ids.non_existing_id.active".split(".")) 275 | [FieldsPathPart(field_model='res.partner', field_name='user_ids', relation_model='res.users')] 276 | 277 | :param str model: starting model of the fields path 278 | :param typing.Sequence[str] path: fields path 279 | :return: resolved fields path parts 280 | :rtype: list(:class:`~odoo.upgrade.util.helpers.FieldsPathPart`) 281 | """ 282 | if not path: 283 | return [] 284 | path = list(path) 285 | cr.execute( 286 | """ 287 | WITH RECURSIVE resolved_fields_path AS ( 288 | -- non-recursive term 289 | SELECT imf.model AS field_model, 290 | imf.name AS field_name, 291 | imf.relation AS relation_model, 292 | p.path AS path, 293 | 1 AS part_index 294 | FROM (VALUES (%(model)s, %(path)s)) p(model, path) 295 | JOIN ir_model_fields imf 296 | ON imf.model = p.model 297 | AND imf.name = p.path[1] 298 | 299 | UNION ALL 300 | 301 | -- recursive term 302 | SELECT rimf.model AS field_model, 303 | rimf.name AS field_name, 304 | rimf.relation AS relation_model, 305 | rfp.path AS path, 306 | rfp.part_index + 1 AS part_index 307 | FROM resolved_fields_path rfp 308 | JOIN ir_model_fields rimf 309 | ON rimf.model = rfp.relation_model 310 | AND rimf.name = rfp.path[rfp.part_index + 1] 311 | WHERE cardinality(rfp.path) > rfp.part_index 312 | ) 313 | SELECT field_model, 314 | field_name, 315 | relation_model 316 | FROM resolved_fields_path 317 | ORDER BY part_index 318 | """, 319 | {"model": model, "path": list(path)}, 320 | ) 321 | return [FieldsPathPart(**row) for row in cr.dictfetchall()] 322 | -------------------------------------------------------------------------------- /src/util/hr_payroll.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from .fields import remove_field 4 | from .records import delete_unused, ref 5 | 6 | _logger = logging.getLogger(__name__) 7 | 8 | 9 | def remove_salary_rule(cr, xmlid): 10 | rid = ref(cr, xmlid) 11 | cr.execute( 12 | r""" 13 | SELECT f.name 14 | FROM ir_model_fields f, 15 | hr_salary_rule r 16 | JOIN hr_payroll_structure s 17 | ON r.struct_id = s.id 18 | LEFT JOIN res_country c 19 | ON s.country_id = c.id 20 | WHERE r.id = %s 21 | AND f.model = 'hr.payroll.report' 22 | AND f.name = regexp_replace( 23 | concat_ws( 24 | '_', 25 | 'x_l10n', 26 | COALESCE(lower(c.code), 'xx'), 27 | lower(r.code) 28 | ), 29 | '[\.\- ]', 30 | '_' 31 | ) 32 | """, 33 | [rid], 34 | ) 35 | for (fname,) in cr.fetchall(): 36 | _logger.info( 37 | "Removing field %r from model 'hr.payroll.report' since salary rule %r is being removed", 38 | fname, 39 | xmlid, 40 | ) 41 | remove_field(cr, "hr.payroll.report", fname) 42 | delete_unused(cr, xmlid) 43 | -------------------------------------------------------------------------------- /src/util/inconsistencies.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | import os 4 | from textwrap import dedent 5 | 6 | from psycopg2.extensions import quote_ident 7 | from psycopg2.extras import Json 8 | from psycopg2.sql import SQL 9 | 10 | from .helpers import _validate_model, table_of_model 11 | from .misc import Sentinel, chunks, str2bool 12 | from .pg import format_query, get_value_or_en_translation, target_of 13 | from .report import add_to_migration_reports, get_anchor_link_to_record, html_escape 14 | 15 | _logger = logging.getLogger(__name__) 16 | 17 | INCLUDE_ARCHIVED_PRODUCTS = str2bool( 18 | os.environ.get("ODOO_MIG_DO_NOT_IGNORE_ARCHIVED_PRODUCTS_FOR_UOM_INCONSISTENCIES"), 19 | default=False, 20 | ) 21 | FIX_PRODUCT_UOM = str2bool(os.environ.get("ODOO_MIG_FIX_ALL_UOM_INCONSISTENCIES"), default=False) 22 | 23 | FROM_ENV = Sentinel("FROM_ENV") 24 | 25 | 26 | def break_recursive_loops(cr, model, field, name_field="name"): 27 | # TODO add a variant to verify loops on m2m 28 | _validate_model(model) 29 | 30 | table = table_of_model(cr, model) 31 | trgt = target_of(cr, table, field) 32 | if not trgt or trgt[:2] != (table, "id"): 33 | raise ValueError("The column `{}` is not FK on itself".format(field)) 34 | 35 | query = format_query( 36 | cr, 37 | """ 38 | WITH RECURSIVE __loop AS ( 39 | SELECT array[{field}] AS path, 40 | False AS cycle 41 | FROM {table} 42 | WHERE {field} IS NOT NULL 43 | GROUP BY {field} 44 | UNION ALL 45 | SELECT child.{field} || curr.path AS path, 46 | child.{field} = any(curr.path) AS cycle 47 | FROM __loop AS curr 48 | JOIN {table} AS child 49 | ON child.id = curr.path[1] 50 | WHERE child.{field} IS NOT NULL 51 | AND NOT curr.cycle 52 | ) 53 | SELECT path FROM __loop WHERE cycle 54 | """, 55 | table=table, 56 | field=field, 57 | ) 58 | cr.execute(query) 59 | if not cr.rowcount: 60 | return 61 | 62 | ids = [] 63 | done = set() 64 | for (cycle,) in cr.fetchall(): 65 | to_break = min(cycle[: cycle.index(cycle[0], 1)]) 66 | if to_break not in done: 67 | ids.append(to_break) 68 | done.update(cycle) 69 | 70 | update_query = format_query( 71 | cr, 72 | """ 73 | UPDATE {table} 74 | SET {field} = NULL 75 | WHERE id IN %s 76 | RETURNING id, {name} 77 | """, 78 | table=table, 79 | field=field, 80 | name=SQL(get_value_or_en_translation(cr, table, name_field)), 81 | ) 82 | cr.execute(update_query, [tuple(ids)]) 83 | n_updates = cr.rowcount 84 | _logger.warning("%s records in %s got their %r column unset to break a recursive loop.", n_updates, table, field) 85 | N_UPDATES_IN_REPORT = 20 86 | bad_data = cr.fetchmany(N_UPDATES_IN_REPORT) 87 | 88 | query = format_query( 89 | cr, 90 | """ 91 | SELECT m.{}, f.{} 92 | FROM ir_model_fields f 93 | JOIN ir_model m 94 | ON m.id = f.model_id 95 | WHERE m.model = %s 96 | AND f.name = %s 97 | """, 98 | SQL(get_value_or_en_translation(cr, "ir_model", "name")), 99 | SQL(get_value_or_en_translation(cr, "ir_model_fields", "field_description")), 100 | ) 101 | cr.execute(query, [model, field]) 102 | model_label, field_label = cr.fetchone() 103 | 104 | add_to_migration_reports( 105 | """ 106 |
107 | 108 | The following {model} were found to be recursive. Their "{field}" field has been reset.{disclaimer} 109 | 110 |
    {li}
111 |
112 | """.format( 113 | model=html_escape(model_label), 114 | field=html_escape(field_label), 115 | disclaimer=" Find below a list of the first {} (out of {}) affected records.".format( 116 | N_UPDATES_IN_REPORT, n_updates 117 | ) 118 | if n_updates > N_UPDATES_IN_REPORT 119 | else "", 120 | li="".join("
  • {}
  • ".format(get_anchor_link_to_record(model, id_, name)) for id_, name in bad_data), 121 | ), 122 | format="html", 123 | category="Inconsistencies", 124 | ) 125 | 126 | 127 | def verify_companies( 128 | cr, model, field_name, logger=_logger, model_company_field="company_id", comodel_company_field="company_id" 129 | ): 130 | _validate_model(model) 131 | cr.execute( 132 | """ 133 | SELECT ttype, relation, relation_table, column1, column2 134 | FROM ir_model_fields 135 | WHERE name = %s 136 | AND model = %s 137 | AND store IS TRUE 138 | AND ttype IN ('many2one', 'many2many') 139 | """, 140 | [field_name, model], 141 | ) 142 | 143 | field_values = cr.dictfetchone() 144 | 145 | if not field_values: 146 | _logger.warning("Field %s not found on model %s.", field_name, model) 147 | return 148 | 149 | table = table_of_model(cr, model) 150 | comodel = field_values["relation"] 151 | cotable = table_of_model(cr, comodel) 152 | 153 | limit = 15 154 | 155 | if field_values["ttype"] == "many2one": 156 | query = """ 157 | SELECT a.id, a.{model_company_field}, b.id, b.{comodel_company_field}, count(*) OVER () 158 | FROM {table} a 159 | JOIN {cotable} b ON b.id = a.{field_name} 160 | WHERE a.{model_company_field} IS NOT NULL 161 | AND b.{comodel_company_field} IS NOT NULL 162 | AND a.{model_company_field} != b.{comodel_company_field} 163 | LIMIT {limit} 164 | """.format(**locals()) 165 | else: # many2many 166 | m2m_relation = field_values["relation_table"] 167 | f1, f2 = field_values["column1"], field_values["column2"] 168 | query = """ 169 | SELECT a.id, a.{model_company_field}, b.id, b.{comodel_company_field}, count(*) OVER () 170 | FROM {m2m_relation} m 171 | JOIN {table} a ON a.id = m.{f1} 172 | JOIN {cotable} b ON b.id = m.{f2} 173 | WHERE a.{model_company_field} IS NOT NULL 174 | AND b.{comodel_company_field} IS NOT NULL 175 | AND a.{model_company_field} != b.{comodel_company_field} 176 | LIMIT {limit} 177 | """.format(**locals()) 178 | 179 | cr.execute(query) 180 | if cr.rowcount: 181 | logger.warning( 182 | "Company field %s/%s is not consistent with %s/%s for %d records (through %s relation %s)", 183 | model, 184 | model_company_field, 185 | comodel, 186 | comodel_company_field, 187 | cr.rowcount, 188 | field_values["ttype"], 189 | field_name, 190 | ) 191 | 192 | bad_rows = cr.fetchall() 193 | total = bad_rows[-1][-1] 194 | lis = "\n".join("
  • record #%s (company=%s) -> record #%s (company=%s)
  • " % bad[:-1] for bad in bad_rows) 195 | 196 | add_to_migration_reports( 197 | message="""\ 198 |
    199 | 200 | Some inconsistencies have been found on field {model}/{field_name} ({total} records affected; show top {limit}) 201 | 202 |
      203 | {lis} 204 |
    205 |
    206 | """.format(**locals()), 207 | category="Multi-company inconsistencies", 208 | format="html", 209 | ) 210 | 211 | 212 | def verify_uoms( 213 | cr, 214 | model, 215 | uom_field="product_uom_id", 216 | product_field="product_id", 217 | include_archived_products=FROM_ENV, 218 | auto_fix=FROM_ENV, 219 | ids=None, 220 | ): 221 | """ 222 | Check if the category of uom on `model` is the same as the category of uom on `product.template`. 223 | 224 | When `ids` is not provided, every ids would be verified. 225 | 226 | Returns list of ids if inconsistencies found, else [] 227 | """ 228 | _validate_model(model) 229 | table = table_of_model(cr, model) 230 | 231 | q = lambda s: quote_ident(s, cr._cnx) 232 | 233 | if include_archived_products is FROM_ENV: 234 | include_archived_products = INCLUDE_ARCHIVED_PRODUCTS 235 | 236 | if auto_fix is FROM_ENV: 237 | auto_fix = FIX_PRODUCT_UOM 238 | 239 | query = """ 240 | SELECT t.id line_id, 241 | t.{uom_column} line_uom_id, 242 | tu.{uom_name} line_uom_name, 243 | tuc.{category_name} line_uom_categ_name, 244 | pt.uom_id product_uom_id, 245 | ptu.{uom_name} product_uom_name, 246 | pt.id product_template_id, 247 | pt.{product_template_name} product_template_name, 248 | ptuc.{category_name} product_uom_categ_name 249 | FROM {table} t 250 | JOIN uom_uom tu ON t.{uom_column} = tu.id 251 | JOIN uom_category tuc ON tu.category_id = tuc.id 252 | JOIN product_product pp ON t.{product_column} = pp.id 253 | JOIN product_template pt ON pp.product_tmpl_id = pt.id 254 | JOIN uom_uom ptu ON pt.uom_id = ptu.id 255 | JOIN uom_category ptuc ON ptu.category_id = ptuc.id 256 | WHERE tu.category_id != ptu.category_id 257 | {ids} 258 | {active} 259 | """.format( 260 | table=q(table), 261 | uom_column=q(uom_field), 262 | product_column=q(product_field), 263 | uom_name=get_value_or_en_translation(cr, "uom_uom", "name"), 264 | category_name=get_value_or_en_translation(cr, "uom_category", "name"), 265 | product_template_name=get_value_or_en_translation(cr, "product_template", "name"), 266 | ids=" AND t.id IN %s" if ids else "", 267 | active=" AND pp.active" if not include_archived_products else "", 268 | ) 269 | 270 | rows = [] 271 | if ids is None: 272 | cr.execute(query) 273 | rows = cr.fetchall() 274 | elif ids: 275 | ids_chunks = chunks(ids, size=cr.IN_MAX, fmt=tuple) 276 | for chunk in ids_chunks: 277 | cr.execute(query, [chunk]) 278 | rows.extend(cr.fetchall()) 279 | 280 | if not rows: 281 | return [] 282 | 283 | title = model.replace(".", " ").title() 284 | 285 | if auto_fix: 286 | line_new_ids = {line_id: prod_uom_id for line_id, _, _, _, prod_uom_id, _, _, _, _ in rows} 287 | cr.execute( 288 | """ 289 | UPDATE {table} t 290 | SET {uom_column} = (%s::jsonb->t.id::text)::int 291 | WHERE t.id IN %s 292 | """.format( 293 | table=q(table), 294 | uom_column=q(uom_field), 295 | ), 296 | [ 297 | Json(line_new_ids), 298 | tuple(line_new_ids), 299 | ], 300 | ) 301 | 302 | msg = dedent( 303 | """ 304 | Upon your request, we have automatically fixed the faulty UoMs by picking it from 305 | the Product Template and setting it on the {title}s. 306 | 307 | Please, take the time to check that the following {title}s inconsistencies have 308 | been updated to the right UoM:\n\n{updated_uoms} 309 | """ 310 | ).format( 311 | title=title, 312 | updated_uoms="\n".join( 313 | " * {}(id={}): Updated UoM from `{}`(id={}, category: `{}`) to `{}`(id={}, category: `{}`) for Product Template `{}`(id={})".format( 314 | title, 315 | line_id, 316 | line_uom, 317 | line_uom_id, 318 | line_uom_categ, 319 | prod_uom, 320 | prod_uom_id, 321 | prod_uom_categ, 322 | prod_temp, 323 | prod_temp_id, 324 | ) 325 | for line_id, line_uom_id, line_uom, line_uom_categ, prod_uom_id, prod_uom, prod_temp_id, prod_temp, prod_uom_categ in rows 326 | ), 327 | ) 328 | faulty_ids = [] 329 | 330 | else: 331 | msg = """ 332 | There is a UoM mismatch in some {title}s. The category of the UoM defined on the 333 | {title} is different from that defined on the Product Template and must be the same to 334 | avoid errors. We allowed the upgrade to continue, but these inconsistencies may cause error 335 | during the upgrade or issues on the upgraded database. 336 | 337 | To avoid any issue, here are the options to consider: 338 | 339 | * fix these inconsistencies manually (below, the details of the affected records) 340 | * let this script automatically fix the affected records by setting the environment variable 341 | ODOO_MIG_FIX_ALL_UOM_INCONSISTENCIES to 1. It will take the UoM from the Product Template 342 | and set it on the faulty {title}s. 343 | 344 | You can also take the archived products into account for listing or fixing faulty lines by setting the 345 | environment variable ODOO_MIG_DO_NOT_IGNORE_ARCHIVED_PRODUCTS_FOR_UOM_INCONSISTENCIES to 1 346 | 347 | These {title}s have UoM inconsistencies:\n\n{uom_inconsistencies} 348 | """.format( 349 | title=title, 350 | uom_inconsistencies="\n".join( 351 | " * {}(id={}) has UoM `{}`(id={}, category: `{}`), Product Template `{}`(id={}) has UoM `{}`(id={}, category: `{}`)".format( 352 | title, 353 | line_id, 354 | line_uom, 355 | line_uom_id, 356 | line_uom_categ, 357 | prod_temp, 358 | prod_temp_id, 359 | prod_uom, 360 | prod_uom_id, 361 | prod_uom_categ, 362 | ) 363 | for line_id, line_uom_id, line_uom, line_uom_categ, prod_uom_id, prod_uom, prod_temp_id, prod_temp, prod_uom_categ in rows 364 | ), 365 | ) 366 | faulty_ids = [r[0] for r in rows] 367 | 368 | _logger.warning("\n%s\n", msg) 369 | add_to_migration_reports(category=title + " UoM Inconsistencies", message=msg, format="md") 370 | return faulty_ids 371 | 372 | 373 | def verify_products( 374 | cr, 375 | model, 376 | foreign_model, 377 | foreign_model_reference_field, 378 | model_product_field="product_id", 379 | foreign_model_product_field="product_id", 380 | include_archived_products=FROM_ENV, 381 | ids=None, 382 | ): 383 | """ 384 | Check if the product on the `foreign_model` is the same as the product on the `model`. 385 | 386 | When `ids` is not provided, every ids would be verified. 387 | 388 | The `foreign_model` should be the one that have a reference to the `model` using this 389 | schema: 390 | >>> `foreign_model`.`foreign_reference_field` = `model`.id 391 | 392 | In case where the model/foreign model own a specific product field (different than `product_id`), 393 | you NEED to provide it using model_product_field/foreign_model_product_field 394 | 395 | As a function example, if you want to check if the product defined on the `account_move_line` 396 | is the same as the product defined on the `purchase_order_line` using `purchase_line_id` 397 | as reference, you should call this function in this way: 398 | >>> verify_products(cr, "purchase.order.line", "account.move.line", "purchase_line_id", ids=ids) 399 | 400 | Returns list of ids if inconsistencies found, else []. 401 | """ 402 | _validate_model(model) 403 | _validate_model(foreign_model) 404 | table = table_of_model(cr, model) 405 | foreign_table = table_of_model(cr, foreign_model) 406 | 407 | q = lambda s: quote_ident(s, cr._cnx) 408 | 409 | if include_archived_products is FROM_ENV: 410 | include_archived_products = INCLUDE_ARCHIVED_PRODUCTS 411 | 412 | query = """ 413 | SELECT f.id, 414 | f.{foreign_model_product_field}, 415 | fpt.{name}, 416 | t.id, 417 | t.{model_product_field}, 418 | tpt.{name} 419 | FROM {table} t 420 | JOIN {foreign_table} f ON f.{foreign_model_reference_field} = t.id 421 | JOIN product_product tpp ON t.{model_product_field} = tpp.id 422 | JOIN product_template tpt ON tpp.product_tmpl_id = tpt.id 423 | JOIN product_product fpp ON f.{foreign_model_product_field} = fpp.id 424 | JOIN product_template fpt ON fpp.product_tmpl_id = fpt.id 425 | WHERE f.{foreign_model_product_field} != t.{model_product_field} 426 | {ids} 427 | {active} 428 | """.format( 429 | name=get_value_or_en_translation(cr, "product_template", "name"), 430 | table=q(table), 431 | foreign_table=q(foreign_table), 432 | foreign_model_reference_field=q(foreign_model_reference_field), 433 | model_product_field=q(model_product_field), 434 | foreign_model_product_field=q(foreign_model_product_field), 435 | ids=" AND t.id IN %s" if ids else "", 436 | active=" AND tpp.active" if not include_archived_products else "", 437 | ) 438 | 439 | rows = [] 440 | if ids is None: 441 | cr.execute(query) 442 | rows = cr.fetchall() 443 | elif ids: 444 | ids_chunks = chunks(ids, size=cr.IN_MAX, fmt=tuple) 445 | for chunk in ids_chunks: 446 | cr.execute(query, [chunk]) 447 | rows.extend(cr.fetchall()) 448 | 449 | if not rows: 450 | return [] 451 | 452 | title = model.replace(".", " ").title() 453 | foreign_title = foreign_model.replace(".", " ").title() 454 | msg = """ 455 | There is a product mismatch in some {foreign_title}. The product defined on the {foreign_title} 456 | is different from that defined on the {title}. To allow the upgrade to continue, the product 457 | on the {foreign_title} and on the {title} must be the same. 458 | These {foreign_title} have inconsistencies: 459 | """.format(**locals()) 460 | msg += "\n".join( 461 | " * {}(id={}) has Product `{}`(id={}), {}(id={}) has Product `{}`(id={})".format( 462 | foreign_title, fline_id, fline_product, fline_product_id, title, line_id, line_product, line_product_id 463 | ) 464 | for fline_id, fline_product_id, fline_product, line_id, line_product_id, line_product in rows 465 | ) 466 | 467 | add_to_migration_reports( 468 | category=title + " - " + foreign_title + " Products Inconsistencies", 469 | message=msg, 470 | format="md", 471 | ) 472 | _logger.warning("\n%s\n", msg) 473 | return [r[0] for r in rows] 474 | -------------------------------------------------------------------------------- /src/util/indirect_references.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import collections 3 | 4 | from .helpers import model_of_table, table_of_model 5 | from .pg import SQLStr, column_exists, table_exists 6 | 7 | 8 | class IndirectReference( 9 | collections.namedtuple( 10 | "IndirectReference", "table res_model res_id res_model_id set_unknown company_dependent_comodel" 11 | ) 12 | ): 13 | def model_filter(self, prefix="", placeholder="%s"): 14 | if prefix and prefix[-1] != ".": 15 | prefix += "." 16 | if self.res_model_id: 17 | placeholder = "(SELECT id FROM ir_model WHERE model={})".format(placeholder) 18 | column = self.res_model_id 19 | else: 20 | column = self.res_model 21 | 22 | if column is None: 23 | # `model` is not set when `company_dependent_comodel` is. 24 | return SQLStr("(false AND {} IS NULL)".format(placeholder)) 25 | 26 | return SQLStr('{}"{}"={}'.format(prefix, column, placeholder)) 27 | 28 | 29 | # By default, there is no `res_id`, no `res_model_id` and it is deleted when the linked model is removed 30 | # warning: defaults are from the last fields in the namedtuple 31 | IndirectReference.__new__.__defaults__ = (None, None, False, None) # https://stackoverflow.com/a/18348004 32 | _IR = IndirectReference 33 | 34 | INDIRECT_REFERENCES = [ 35 | _IR("ir_attachment", "res_model", "res_id"), 36 | _IR("ir_cron", "model", None, set_unknown=True), 37 | _IR("ir_act_report_xml", "model", None, set_unknown=True), 38 | _IR("ir_act_window", "res_model", "res_id"), 39 | _IR("ir_act_window", "res_model", None), 40 | _IR("ir_act_window", "src_model", None), 41 | _IR("ir_act_server", "wkf_model_name", None), 42 | _IR("ir_act_server", "crud_model_name", None), 43 | _IR("ir_act_server", "model_name", None, "model_id", set_unknown=True), 44 | _IR("ir_act_client", "res_model", None, set_unknown=True), 45 | _IR("ir_embedded_actions", "parent_res_model", "parent_res_id"), 46 | _IR("ir_model", "model", None), 47 | _IR("ir_model_fields", "model", None), 48 | _IR("ir_model_fields", "relation", None), # destination of a relation field 49 | _IR("ir_model_data", "model", "res_id"), 50 | _IR("ir_filters", "model_id", None, set_unknown=True), # YUCK!, not an id 51 | # duplicated for versions where the `res_id` column does not exists 52 | _IR("ir_filters", "model_id", "embedded_parent_res_id"), 53 | _IR("ir_exports", "resource", None), 54 | _IR("ir_ui_view", "model", None, set_unknown=True), 55 | _IR("ir_values", "model", "res_id"), 56 | _IR("wkf_transition", "trigger_model", None), 57 | _IR("wkf_triggers", "model", None), 58 | _IR("ir_model_fields_anonymization", "model_name", None), 59 | _IR("ir_model_fields_anonymization_migration_fix", "model_name", None), 60 | _IR("base_import_mapping", "res_model", None), 61 | _IR("calendar_event", "res_model", "res_id"), # new in saas~18 62 | _IR("data_cleaning_model", "res_model_name", None), 63 | _IR("data_cleaning_record", "res_model_name", "res_id"), 64 | _IR("data_cleaning_rule", "res_model_name", None), 65 | _IR("data_merge_group", "res_model_name", None), 66 | _IR("data_merge_model", "res_model_name", None), 67 | _IR("data_merge_record", "res_model_name", "res_id"), 68 | _IR("documents_document", "res_model", "res_id"), 69 | _IR("email_template", "model", None, set_unknown=True), # stored related 70 | _IR("iap_extracted_words", "res_model", "res_id"), 71 | _IR("mail_template", "model", None, set_unknown=True), # model renamed in saas~6 72 | _IR("mail_activity", "res_model", "res_id", "res_model_id"), 73 | _IR("mail_activity_type", "res_model", None), 74 | _IR("mail_alias", None, "alias_force_thread_id", "alias_model_id"), 75 | _IR("mail_alias", None, "alias_parent_thread_id", "alias_parent_model_id"), 76 | _IR("mail_followers", "res_model", "res_id"), 77 | _IR("mail_message_subtype", "res_model", None), 78 | _IR("mail_message", "model", "res_id"), 79 | _IR("mail_compose_message", "model", "res_id"), 80 | _IR("mail_wizard_invite", "res_model", "res_id"), 81 | _IR("mail_mail_statistics", "model", "res_id"), 82 | _IR("mailing_trace", "model", "res_id"), 83 | _IR("mail_mass_mailing", "mailing_model", None, "mailing_model_id", set_unknown=True), 84 | _IR("mailing_mailing", None, None, "mailing_model_id", set_unknown=True), 85 | _IR("marketing_campaign", "model_name", None, set_unknown=True), # stored related 86 | _IR("marketing_participant", "model_name", "res_id", "model_id", set_unknown=True), 87 | _IR("payment_transaction", None, "callback_res_id", "callback_model_id"), 88 | _IR("project_project", "alias_model", None, set_unknown=True), 89 | # IR("pos_blackbox_be_log", "model_name", None), # ACTUALLY NOT. We need to keep records intact, even when renaming a model # noqa: ERA001 90 | _IR("quality_point", "worksheet_model_name", None), 91 | _IR("rating_rating", "res_model", "res_id", "res_model_id"), 92 | _IR("rating_rating", "parent_res_model", "parent_res_id", "parent_res_model_id"), 93 | _IR("snailmail_letter", "model", "res_id", set_unknown=True), 94 | _IR("sms_template", "model", None), 95 | _IR("studio_approval_rule", "model_name", None), 96 | _IR("spreadsheet_revision", "res_model", "res_id"), 97 | _IR("studio_approval_entry", "model", "res_id"), 98 | _IR("timer_timer", "res_model", "res_id"), 99 | _IR("timer_timer", "parent_res_model", "parent_res_id"), 100 | _IR("worksheet_template", "res_model", None), 101 | ] 102 | 103 | 104 | def indirect_references(cr, bound_only=False): 105 | for ir in INDIRECT_REFERENCES: 106 | if bound_only and not ir.res_id: 107 | continue 108 | if ir.res_id and not column_exists(cr, ir.table, ir.res_id): 109 | continue 110 | 111 | # some `res_model/res_model_id` combination may change between 112 | # versions (i.e. rating_rating.res_model_id was added in saas~15). 113 | # we need to verify existence of columns before using them. 114 | if ir.res_model and not column_exists(cr, ir.table, ir.res_model): 115 | ir = ir._replace(res_model=None) # noqa: PLW2901 116 | if ir.res_model_id and not column_exists(cr, ir.table, ir.res_model_id): 117 | ir = ir._replace(res_model_id=None) # noqa: PLW2901 118 | if not ir.res_model and not ir.res_model_id: 119 | continue 120 | 121 | yield ir 122 | 123 | if column_exists(cr, "ir_model_fields", "company_dependent"): 124 | cr.execute( 125 | """ 126 | SELECT model, name, relation 127 | FROM ir_model_fields 128 | WHERE company_dependent IS TRUE 129 | AND ttype = 'many2one' 130 | """, 131 | ) 132 | for model_name, column_name, comodel_name in cr.fetchall(): 133 | yield _IR(table_of_model(cr, model_name), None, column_name, company_dependent_comodel=comodel_name) 134 | 135 | # XXX Once we will get the model field of `many2one_reference` fields in the database, we should get them also 136 | # (and filter the one already hardcoded) 137 | 138 | 139 | def generate_indirect_reference_cleaning_queries(cr, ir): 140 | """Yield queries to clean an `IndirectReference`.""" 141 | assert not ir.company_dependent_comodel # not supported for now 142 | if ir.res_model: 143 | query = """ 144 | SELECT {ir.res_model} 145 | FROM {ir.table} 146 | WHERE {ir.res_model} IS NOT NULL 147 | GROUP BY {ir.res_model} 148 | """ 149 | else: 150 | query = """ 151 | SELECT m.model 152 | FROM {ir.table} t 153 | JOIN ir_model m ON m.id = t.{ir.res_model_id} 154 | GROUP BY m.model 155 | """ 156 | cr.execute(query.format(ir=ir)) 157 | for (model,) in cr.fetchall(): 158 | res_table = table_of_model(cr, model) 159 | if table_exists(cr, res_table): 160 | cond = "NOT EXISTS (SELECT 1 FROM {res_table} r WHERE r.id = t.{ir.res_id})".format(**locals()) 161 | else: 162 | cond = "true" 163 | 164 | model_filter = ir.model_filter() 165 | yield cr.mogrify( 166 | "DELETE FROM {ir.table} t WHERE {model_filter} AND {cond}".format(**locals()), [model] 167 | ).decode() 168 | 169 | 170 | def res_model_res_id(cr, filtered=True): 171 | for ir in indirect_references(cr): 172 | if ir.res_model: 173 | yield model_of_table(cr, ir.table), ir.res_model, ir.res_id 174 | -------------------------------------------------------------------------------- /src/util/inherit.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | import operator 4 | import os 5 | 6 | from .const import ENVIRON, NEARLYWARN 7 | from .misc import _cached, parse_version, version_gte 8 | 9 | _logger = logging.getLogger(__name__) 10 | 11 | 12 | if version_gte("saas~17.1"): 13 | from ._inherit import Inherit, frozendict 14 | 15 | @_cached 16 | def _get_inheritance_data(cr): 17 | base_version = _get_base_version(cr)[:2] + ("*final",) 18 | cr.execute( 19 | """ 20 | SELECT p.model, 21 | array_agg(m.model ORDER BY i.id), 22 | array_agg(f.name ORDER BY i.id) 23 | FROM ir_model_inherit i 24 | JOIN ir_model m 25 | ON m.id = i.model_id 26 | JOIN ir_model p 27 | ON p.id = i.parent_id 28 | LEFT JOIN ir_model_fields f 29 | ON f.id = i.parent_field_id 30 | GROUP BY p.model 31 | """ 32 | ) 33 | return frozendict( 34 | { 35 | parent: [ 36 | Inherit(model=model, born=base_version, dead=None, via=via) 37 | for model, via in zip(children, vias, strict=True) 38 | ] 39 | for parent, children, vias in cr.fetchall() 40 | } 41 | ) 42 | 43 | else: 44 | from ._inherit import inheritance_data 45 | 46 | def _get_inheritance_data(cr): 47 | return inheritance_data 48 | 49 | 50 | def _get_base_version(cr): 51 | # base_version is normally computed in `base/0.0.0/pre-base_version.py` (and symlinks) 52 | # However, if theses scripts are used to upgrade custom modules afterward (like the P.S. do), 53 | # as the `base` module not being updated, the *base_version* MUST be set as an environment variable. 54 | bv = ENVIRON.get("__base_version") 55 | if bv: 56 | return bv 57 | # trust env variable if set 58 | bv = os.getenv("ODOO_BASE_VERSION") 59 | if bv: 60 | bv = ENVIRON["__base_version"] = parse_version(bv) 61 | else: 62 | cr.execute("SELECT state, latest_version FROM ir_module_module WHERE name='base'") 63 | state, version = cr.fetchone() 64 | if state != "to upgrade": 65 | major = ".".join(version.split(".")[:2]) 66 | _logger.log( 67 | NEARLYWARN, 68 | "Assuming upgrading from Odoo %s. If it's not the case, specify the environment variable `ODOO_BASE_VERSION`.", 69 | major, 70 | ) 71 | bv = ENVIRON["__base_version"] = parse_version(version) 72 | return bv 73 | 74 | 75 | def _version_comparator(cr, interval): 76 | if interval not in {"[]", "()", "[)", "(]"}: 77 | raise ValueError("Invalid interval: %r" % (interval,)) 78 | 79 | op_lower = operator.le if interval[0] == "[" else operator.lt 80 | op_upper = operator.le if interval[1] == "]" else operator.lt 81 | base_version = _get_base_version(cr) 82 | 83 | return lambda inh: op_lower(inh.born, base_version) and (inh.dead is None or op_upper(base_version, inh.dead)) 84 | 85 | 86 | def for_each_inherit(cr, model, skip=(), interval="[)"): 87 | if skip == "*": 88 | return 89 | cmp_ = _version_comparator(cr, interval) 90 | for inh in _get_inheritance_data(cr).get(model, []): 91 | if inh.model in skip: 92 | continue 93 | if cmp_(inh): 94 | yield inh 95 | 96 | 97 | def direct_inherit_parents(cr, model, skip=(), interval="[)"): 98 | """Yield the *direct* inherits parents.""" 99 | if skip == "*": 100 | return 101 | skip = set(skip) 102 | cmp_ = _version_comparator(cr, interval) 103 | for parent, inhs in _get_inheritance_data(cr).items(): 104 | if parent in skip: 105 | continue 106 | for inh in inhs: 107 | if inh.model == model and cmp_(inh): 108 | yield parent, inh 109 | skip.add(parent) 110 | 111 | 112 | def inherit_parents(cr, model, skip=(), interval="[)"): 113 | """Recursively yield all inherit parents model names.""" 114 | if skip == "*": 115 | return 116 | skip = set(skip) 117 | for parent, _inh in direct_inherit_parents(cr, model, skip=skip, interval=interval): 118 | if parent in skip: 119 | continue 120 | yield parent 121 | skip.add(parent) 122 | for grand_parent in inherit_parents(cr, parent, skip=skip, interval=interval): 123 | yield grand_parent 124 | -------------------------------------------------------------------------------- /src/util/jinja_to_qweb.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import functools 3 | import html 4 | import logging 5 | import re 6 | 7 | import babel 8 | import lxml 9 | from dateutil import relativedelta 10 | from jinja2.sandbox import SandboxedEnvironment 11 | from markupsafe import Markup 12 | from werkzeug import urls 13 | 14 | from odoo import tools 15 | from odoo.tools import is_html_empty, mute_logger, safe_eval 16 | 17 | from .helpers import _validate_table, model_of_table 18 | from .orm import env as get_env 19 | from .pg import named_cursor, table_exists 20 | from .report import add_to_migration_reports, html_escape 21 | 22 | _logger = logging.getLogger(__name__) 23 | 24 | REMOVE_SAFE_REGEX = re.compile(r"\s*\|\s*safe\s*", re.IGNORECASE) 25 | 26 | JINJA_EXPRESSION = r""" 27 | \$\{ 28 | (?=(?P(?: 29 | [^}\'\"\\] 30 | |' 31 | (?=(?P 32 | (?:[^'\\]|\\.)* 33 | ))(?P=singlequote)'? 34 | |\" 35 | (?=(?P 36 | (?:[^\"\\]|\\.)* 37 | ))(?P=doublequote)\"? 38 | | 39 | (?:\\.) 40 | )*))(?P=insidebracket) 41 | } 42 | """ 43 | 44 | JINJA_EXPRESSION_REGEX = re.compile(JINJA_EXPRESSION, flags=re.X | re.DOTALL) 45 | 46 | JINJA_REGEX = re.compile( 47 | rf""" 48 | (?=(?P 49 | (?: 50 | (?:\\\\) 51 | |(?:\\\$) 52 | |(?:(?!\$\{{).) 53 | )* 54 | ))(?P=string) 55 | (?:{JINJA_EXPRESSION})? 56 | """, 57 | re.X | re.DOTALL, 58 | ) 59 | 60 | templates_to_check = {} 61 | 62 | 63 | def _remove_safe(expression): 64 | return re.sub(REMOVE_SAFE_REGEX, " ", expression).strip() 65 | 66 | 67 | def _transform_to_t_out(expression): 68 | return str(Markup('').format(_remove_safe(expression))) 69 | 70 | 71 | def convert_jinja_to_inline(string): 72 | result = [] 73 | for element in re.finditer(JINJA_REGEX, string): 74 | static_string = element.group("string") 75 | expression = element.group("insidebracket") 76 | if not static_string and not expression: 77 | continue 78 | 79 | if static_string: 80 | result.append(static_string) 81 | if expression: 82 | result.append("{{ %s }}" % (_remove_safe(expression),)) 83 | return "".join(result) 84 | 85 | 86 | def _convert_jinja_to_t_out_text(node): 87 | comment = isinstance(node, lxml.etree._Comment) 88 | last_node = None 89 | 90 | index = 0 91 | for element in re.finditer(JINJA_REGEX, node.text): 92 | static_string = element.group("string") 93 | expression = element.group("insidebracket") 94 | if not static_string and not expression: 95 | continue 96 | 97 | if last_node is None: 98 | node.text = static_string 99 | last_node = node 100 | elif comment and static_string: 101 | node.text += static_string 102 | elif static_string: 103 | last_node.tail = static_string 104 | 105 | if expression: 106 | if comment: 107 | node.text += _transform_to_t_out(expression) 108 | else: 109 | new_node = lxml.html.fragment_fromstring(_transform_to_t_out(expression)) 110 | node.insert(index, new_node) 111 | index += 1 112 | last_node = new_node 113 | 114 | 115 | def _convert_jinja_to_t_out_tail(node): 116 | last_node = node 117 | 118 | for element in re.finditer(JINJA_REGEX, node.tail): 119 | static_string = element.group("string") 120 | expression = element.group("insidebracket") 121 | if not static_string and not expression: 122 | continue 123 | 124 | last_node.tail = "" 125 | if expression: 126 | node = lxml.html.fragment_fromstring(_transform_to_t_out(expression)) 127 | last_node.addnext(node) 128 | if static_string: 129 | last_node.tail = static_string 130 | if expression: 131 | last_node = node 132 | 133 | 134 | def convert_jinja_to_qweb(string): 135 | string = re.sub(r"""^<\?xml version=("|')1\.0\1\?>\s*""", "", string, flags=re.M) 136 | # Create a parent in case there is multiples root nodes 137 | element = lxml.html.fragment_fromstring(string, create_parent="div") 138 | for el in element.getiterator(): 139 | if el.text: 140 | _convert_jinja_to_t_out_text(el) 141 | if el.tail: 142 | _convert_jinja_to_t_out_tail(el) 143 | for key, value in el.attrib.items(): 144 | if re.search(JINJA_EXPRESSION_REGEX, value): 145 | del el.attrib[key] 146 | el.set("t-attf-" + key, convert_jinja_to_inline(value)) 147 | result = lxml.html.tostring(element, encoding="unicode") 148 | # Remove the technically created parent div, otherwise the first jinja 149 | # instruction will not match a jinja regex. 150 | result = result[5:-6] 151 | 152 | for func in [ 153 | _replace_set, 154 | _replace_for, 155 | _replace_endfor, 156 | _replace_if, 157 | _replace_elif, 158 | _replace_else, 159 | _replace_endif, 160 | ]: 161 | result = func(result) 162 | 163 | # Make sure the html is correct 164 | result = lxml.etree.tostring(lxml.html.fragment_fromstring(result, create_parent="div"), encoding="unicode") 165 | 166 | # Remove the parent div 167 | return result[5:-6] 168 | 169 | 170 | def _get_set(matchobj): 171 | return Markup("""{}""").format( 172 | matchobj.group(1), 173 | html.unescape(matchobj.group(2).strip()), 174 | html.unescape(matchobj.group(3).strip()), 175 | ) 176 | 177 | 178 | def _replace_set(string): 179 | reg = re.compile(r"^(\s*)%\s*set([^=]*)=(.*)", re.IGNORECASE | re.MULTILINE) 180 | return reg.sub(_get_set, string) 181 | 182 | 183 | def _get_for(matchobj): 184 | return Markup("""{}""").format( 185 | matchobj.group(1), html.unescape(matchobj.group(3).strip()), html.unescape(matchobj.group(2).strip()) 186 | ) 187 | 188 | 189 | def _replace_for(string): 190 | reg = re.compile(r"^(\s*)%\s*for((?:(?! in ).)*?) in (.*?):?\s*$", re.IGNORECASE | re.MULTILINE) 191 | return reg.sub(_get_for, string) 192 | 193 | 194 | def _replace_endfor(string): 195 | reg = re.compile(r"^(\s*)%\s*endfor.*", re.IGNORECASE | re.MULTILINE) 196 | return reg.sub(r"\1", string) 197 | 198 | 199 | def _get_if(matchobj): 200 | return Markup("""{}""").format(matchobj.group(1), html.unescape(matchobj.group(2).strip())) 201 | 202 | 203 | def _replace_if(string): 204 | reg = re.compile(r"^(\s*)%\s*if(.*?):?\s*$", re.IGNORECASE | re.MULTILINE) 205 | return reg.sub(_get_if, string) 206 | 207 | 208 | def _get_elif(matchobj): 209 | return Markup("""{}\n{}""").format( 210 | matchobj.group(1), matchobj.group(1), html.unescape(matchobj.group(2).strip()) 211 | ) 212 | 213 | 214 | def _replace_elif(string): 215 | reg = re.compile(r"^(\s*)%\s*elif(.*?):?\s*$", re.IGNORECASE | re.MULTILINE) 216 | return reg.sub(_get_elif, string) 217 | 218 | 219 | def _replace_else(string): 220 | reg = re.compile(r"^(\s*)%\s*else.*", re.IGNORECASE | re.MULTILINE) 221 | return reg.sub(r'\1\n\1', string) 222 | 223 | 224 | def _replace_endif(string): 225 | reg = re.compile(r"^(\s*)%\s*endif.*", re.IGNORECASE | re.MULTILINE) 226 | return reg.sub(r"\1", string) 227 | 228 | 229 | def upgrade_jinja_fields( 230 | cr, 231 | table_name, 232 | inline_template_fields, 233 | qweb_fields, 234 | name_field="name", 235 | model_name=None, 236 | table_model_name="model", 237 | fetch_model_name=False, 238 | ): 239 | _validate_table(table_name) 240 | all_field = inline_template_fields + qweb_fields 241 | if not model_name: 242 | all_field = [table_model_name] + all_field 243 | sql_fields = ", ".join(all_field) 244 | 245 | sql_where_inline_fields = [field + " like '%${%'" for field in inline_template_fields] 246 | sql_where_qweb_fields = [field + r"~ '(\$\{|%\s*(if|for))'" for field in qweb_fields] 247 | sql_where_fields = " OR ".join(sql_where_inline_fields + sql_where_qweb_fields) 248 | 249 | templates_to_check[table_name] = [] 250 | model = model_of_table(cr, table_name) 251 | 252 | cr.commit() # ease the processing for PG 253 | cr.execute( 254 | f""" 255 | SELECT id, {name_field}, {sql_fields} 256 | FROM {table_name} 257 | WHERE {sql_where_fields} 258 | """ 259 | ) 260 | for data in cr.dictfetchall(): 261 | _logger.info("process %s(%s) %s", table_name, data["id"], data[name_field]) 262 | 263 | # convert the fields 264 | templates_converted = {} 265 | 266 | for field in inline_template_fields: 267 | _logger.info(" `- convert inline field %s", field) 268 | template = data[field] 269 | templates_converted[field] = convert_jinja_to_inline(template) if template else "" 270 | 271 | for field in qweb_fields: 272 | _logger.info(" `- convert qweb field %s", field) 273 | template = data[field] 274 | templates_converted[field] = convert_jinja_to_qweb(template) if template else "" 275 | 276 | fields = [f for f in (inline_template_fields + qweb_fields) if data[f] != templates_converted[f]] 277 | if fields: 278 | sql_fields = ",".join([field + "=%s" for field in fields]) 279 | field_values = [templates_converted[field] for field in fields] 280 | 281 | cr.execute( 282 | f""" 283 | UPDATE {table_name} 284 | SET {sql_fields} 285 | WHERE id = %s 286 | """, 287 | field_values + [data["id"]], 288 | ) 289 | # prepare data to check later 290 | 291 | # only for mailing.mailing 292 | if fetch_model_name: 293 | cr.execute( 294 | """ 295 | SELECT model FROM ir_model WHERE id=%s 296 | """, 297 | [data[table_model_name]], 298 | ) 299 | model_name = cr.fetchone()[0] 300 | else: 301 | model_name = model_name or data[table_model_name] 302 | 303 | templates_to_check[table_name].append( 304 | ( 305 | data, 306 | name_field, 307 | model_name, 308 | inline_template_fields, 309 | qweb_fields, 310 | templates_converted, 311 | ) 312 | ) 313 | 314 | if not table_exists(cr, "ir_translation"): 315 | return 316 | 317 | _logger.info("process translations for model %s", model) 318 | # NOTE: Not all translations may not be updated. 319 | # Difference jinja values can give the same qweb value. 320 | # `${object.company_id.name|safe}` and `${object.company_id.name}` both give `{{ object.company_id.name }}` 321 | # Which will violates the unique constraint. 322 | # In this case, we just ignore the update and remove the duplicated row. 323 | 324 | inline_entries = [f"{model},{name}" for name in inline_template_fields] 325 | if inline_entries: 326 | cr.execute( 327 | """ 328 | SELECT id, src, value 329 | FROM ir_translation 330 | WHERE name IN %s 331 | AND (src LIKE '%%${%%' OR value LIKE '%%${%%') 332 | """, 333 | [tuple(inline_entries)], 334 | ) 335 | for tid, src, value in cr.fetchall(): 336 | converted_src = convert_jinja_to_inline(src) if src else "" 337 | converted_value = convert_jinja_to_inline(value) if value else "" 338 | cr.execute( 339 | """ 340 | DELETE FROM ir_translation orig 341 | USING ir_translation dup 342 | WHERE orig.id = %s 343 | AND dup.id != orig.id 344 | -- "ir_translation_unique" UNIQUE, btree (type, name, lang, res_id, md5(src)) 345 | AND dup.type = orig.type 346 | AND dup.name = orig.name 347 | AND dup.lang = orig.lang 348 | AND dup.res_id = orig.res_id 349 | AND dup.src = %s 350 | RETURNING orig.id 351 | """, 352 | [tid, converted_src], 353 | ) 354 | if not cr.rowcount: 355 | # no duplicate found, update the translation 356 | cr.execute( 357 | "UPDATE ir_translation SET src=%s, value=%s WHERE id=%s", 358 | [converted_src, converted_value, tid], 359 | ) 360 | 361 | cr.commit() # commit changes for the named cursor below 362 | qweb_entries = [f"{model},{name}" for name in qweb_fields] 363 | if qweb_entries: 364 | ncr = named_cursor(cr, 1000) 365 | ncr.execute( 366 | r""" 367 | SELECT id, src, value 368 | FROM ir_translation 369 | WHERE name IN %s 370 | AND (src ~ '(\$\{|%%\s*(if|for))' OR value ~ '(\$\{|%%\s*(if|for))') 371 | """, 372 | [tuple(qweb_entries)], 373 | ) 374 | 375 | for tid, src, value in ncr: 376 | converted_src = convert_jinja_to_qweb(src) if src else "" 377 | converted_value = convert_jinja_to_qweb(value) if value else "" 378 | cr.execute( 379 | """ 380 | DELETE FROM ir_translation orig 381 | USING ir_translation dup 382 | WHERE orig.id = %s 383 | AND dup.id != orig.id 384 | -- "ir_translation_unique" UNIQUE, btree (type, name, lang, res_id, md5(src)) 385 | AND dup.type = orig.type 386 | AND dup.name = orig.name 387 | AND dup.lang = orig.lang 388 | AND dup.res_id = orig.res_id 389 | AND dup.src = %s 390 | RETURNING orig.id 391 | """, 392 | [tid, converted_src], 393 | ) 394 | if not cr.rowcount: 395 | # no duplicate found, update the translation 396 | cr.execute( 397 | "UPDATE ir_translation SET src=%s, value=%s WHERE id=%s", 398 | [converted_src, converted_value, tid], 399 | ) 400 | ncr.close() 401 | 402 | 403 | def verify_upgraded_jinja_fields(cr): 404 | env = get_env(cr) 405 | for table_name, template_data in templates_to_check.items(): 406 | field_errors = {} 407 | missing_records = [] 408 | for ( 409 | data, 410 | name_field, 411 | model_name, 412 | inline_template_fields, 413 | qweb_fields, 414 | templates_converted, 415 | ) in template_data: 416 | if model_name not in env: 417 | # custom model not loaded yet. Ignore 418 | continue 419 | model = env[model_name] 420 | record = model.with_context({"active_test": False}).search([], limit=1, order="id") 421 | 422 | key = (data["id"], data[name_field]) 423 | field_errors[key] = [] 424 | 425 | if not record: 426 | missing_records.append(key) 427 | 428 | for field in inline_template_fields: 429 | if not data[field]: 430 | continue 431 | is_valid = is_converted_template_valid( 432 | env, data[field], templates_converted[field], model_name, record.id, engine="inline_template" 433 | ) 434 | if not is_valid: 435 | field_errors[key].append(field) 436 | 437 | for field in qweb_fields: 438 | is_valid = is_converted_template_valid( 439 | env, data[field], templates_converted[field], model_name, record.id, engine="qweb" 440 | ) 441 | if not is_valid: 442 | field_errors[key].append(field) 443 | 444 | if missing_records: 445 | list_items = "\n".join( 446 | f'
  • id: "{id}", {html_escape(name_field)}: "{html_escape(name)}"
  • ' 447 | for id, name in missing_records 448 | ) 449 | add_to_migration_reports( 450 | f""" 451 |
    452 | 453 | Some of the records for the table {html_escape(table_name)} could not be tested because there is no 454 | record in the database. 455 | The {html_escape(table_name)} records are: 456 | 457 |
      {list_items}
    458 |
    459 | """, 460 | "Jinja upgrade", 461 | format="html", 462 | ) 463 | field_errors = dict(filter(lambda x: bool(x[1]), field_errors.items())) 464 | 465 | if field_errors: 466 | string = [] 467 | for (id, name), fields in field_errors.items(): 468 | fields_string = "\n".join(f"
  • {html_escape(field)}
  • " for field in fields) 469 | string.append( 470 | f"""
  • id: {id}, {html_escape(name_field)}: {html_escape(name)}, 471 | fields:
      {fields_string}
  • """ 472 | ) 473 | 474 | string = "\n".join(string) 475 | add_to_migration_reports( 476 | f""" 477 |
    478 | 479 | Some of the fields of the table {html_escape(table_name)} does not render the same value 480 | before and after being converted. 481 | The mail.template are: 482 | 483 |
      {string}
    484 |
    485 | """, 486 | "Jinja upgrade", 487 | format="html", 488 | ) 489 | 490 | 491 | def is_converted_template_valid(env, template_before, template_after, model_name, record_id, engine="inline_template"): 492 | render_before = None 493 | with contextlib.suppress(Exception): 494 | render_before = _render_template_jinja(env, template_before, model_name, record_id) 495 | 496 | render_after = None 497 | if render_before is not None: 498 | try: 499 | with mute_logger("odoo.addons.mail.models.mail_render_mixin"): 500 | render_after = env["mail.render.mixin"]._render_template( 501 | template_after, model_name, [record_id], engine=engine 502 | )[record_id] 503 | except Exception: 504 | pass 505 | 506 | # post process qweb render to remove comments from the rendered jinja in 507 | # order to avoid false negative because qweb never render comments. 508 | if render_before and render_after and engine == "qweb": 509 | element_before = lxml.html.fragment_fromstring(render_before, create_parent="div") 510 | for comment_element in element_before.xpath("//comment()"): 511 | comment_element.getparent().remove(comment_element) 512 | render_before = lxml.html.tostring(element_before, encoding="unicode") 513 | render_after = lxml.html.tostring( 514 | lxml.html.fragment_fromstring(render_after, create_parent="div"), encoding="unicode" 515 | ) 516 | 517 | return render_before is not None and render_before == render_after 518 | 519 | 520 | # jinja render 521 | 522 | 523 | def format_date(env, date, pattern=False, lang_code=False): 524 | try: 525 | return tools.format_date(env, date, date_format=pattern, lang_code=lang_code) 526 | except babel.core.UnknownLocaleError: 527 | return date 528 | 529 | 530 | def format_datetime(env, dt, tz=False, dt_format="medium", lang_code=False): 531 | try: 532 | return tools.format_datetime(env, dt, tz=tz, dt_format=dt_format, lang_code=lang_code) 533 | except babel.core.UnknownLocaleError: 534 | return dt 535 | 536 | 537 | def format_time(env, time, tz=False, time_format="medium", lang_code=False): 538 | try: 539 | return tools.format_time(env, time, tz=tz, time_format=time_format, lang_code=lang_code) 540 | except babel.core.UnknownLocaleError: 541 | return time 542 | 543 | 544 | def relativedelta_proxy(*args, **kwargs): 545 | # dateutil.relativedelta is an old-style class and cannot be directly 546 | # instantiated within a jinja2 expression, so a lambda "proxy" is 547 | # is needed, apparently 548 | return relativedelta.relativedelta(*args, **kwargs) 549 | 550 | 551 | template_env_globals = { 552 | "str": str, 553 | "quote": urls.url_quote, 554 | "urlencode": urls.url_encode, 555 | "datetime": safe_eval.datetime, 556 | "len": len, 557 | "abs": abs, 558 | "min": min, 559 | "max": max, 560 | "sum": sum, 561 | "filter": filter, 562 | "reduce": functools.reduce, 563 | "map": map, 564 | "relativedelta": relativedelta_proxy, 565 | "round": round, 566 | } 567 | 568 | jinja_template_env = SandboxedEnvironment( 569 | block_start_string="<%", 570 | block_end_string="%>", 571 | variable_start_string="${", 572 | variable_end_string="}", 573 | comment_start_string="<%doc>", 574 | comment_end_string="", 575 | line_statement_prefix="%", 576 | line_comment_prefix="##", 577 | trim_blocks=True, # do not output newline after blocks 578 | autoescape=True, # XML/HTML automatic escaping 579 | ) 580 | 581 | jinja_template_env.globals.update(template_env_globals) 582 | 583 | 584 | def _render_template_jinja(env, template_txt, model, res_id): 585 | if not template_txt: 586 | return "" 587 | 588 | template = jinja_template_env.from_string(tools.ustr(template_txt)) 589 | 590 | record = env[model].browse([res_id]) 591 | variables = { 592 | "format_date": functools.partial(format_date, env), 593 | "format_datetime": functools.partial(format_datetime, env), 594 | "format_time": functools.partial(format_time, env), 595 | "format_amount": functools.partial(tools.format_amount, env), 596 | "format_duration": tools.format_duration, 597 | "user": env.user, 598 | "ctx": {}, 599 | "is_html_empty": is_html_empty, 600 | "object": record, 601 | } 602 | 603 | safe_eval.check_values(variables) 604 | render_result = template.render(variables) 605 | if render_result == "False": 606 | render_result = "" 607 | return render_result 608 | -------------------------------------------------------------------------------- /src/util/json.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | __all__ = ["dumps", "load", "loads"] 4 | 5 | try: 6 | import orjson 7 | except ImportError: 8 | import json 9 | 10 | def dumps(value, sort_keys=False): 11 | return json.dumps(value, sort_keys=sort_keys, separators=(",", ":")) 12 | 13 | def loads(value): 14 | return json.loads(value) 15 | 16 | def load(fp): 17 | return json.load(fp) 18 | else: 19 | 20 | def dumps(value, sort_keys=False): 21 | if isinstance(value, tuple): 22 | # downcast namedtuples 23 | value = tuple(value) 24 | 25 | option = orjson.OPT_NON_STR_KEYS 26 | if sort_keys: 27 | option |= orjson.OPT_SORT_KEYS 28 | return orjson.dumps(value, option=option).decode() 29 | 30 | def loads(value): 31 | return orjson.loads(value) 32 | 33 | def load(fp): 34 | return orjson.loads(fp.read()) 35 | -------------------------------------------------------------------------------- /src/util/logger.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import atexit 3 | import logging 4 | import os 5 | 6 | from .misc import on_CI 7 | 8 | _REGISTERED = False 9 | 10 | _logger = logging.getLogger(__name__.rpartition(".")[0]) 11 | 12 | 13 | class CriticalHandler(logging.Handler): 14 | def __init__(self): 15 | super(CriticalHandler, self).__init__(logging.CRITICAL) 16 | 17 | def emit(self, record): 18 | global _REGISTERED # noqa: PLW0603 19 | if _REGISTERED: 20 | return 21 | 22 | # force exit with status_code=1 if any critical log is emit during upgrade 23 | atexit.register(os._exit, 1) 24 | _REGISTERED = True 25 | 26 | 27 | if on_CI(): # hopefully temporary restriction 28 | product = _logger.name.partition(".")[0] 29 | logging.getLogger(product).addHandler(CriticalHandler()) 30 | -------------------------------------------------------------------------------- /src/util/release-note.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Hello, 4 | Your database has successfully been upgraded to the latest version 🥳. 5 |
    6 | 7 |
    8 | 9 | Meet Odoo . now ! 10 |
    11 | 12 |
    13 |
    14 |
    15 |
    16 |

    Want to know more? Check out the full release note.

    17 |

    Want to know more? Check out the full release note.

    18 |

    19 |
    20 | -------------------------------------------------------------------------------- /src/util/report-migration.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 |

    Congratulations, you have just upgraded to Odoo

    4 |

    Here are changes that may impact day to day flows in this new version.

    5 |

    Want to know more? Check out the full functional release note.

    6 |

    Want to know more? Check out the full functional release note.

    7 |

    8 |
    9 | 10 |

    11 |
      12 | 13 | 17 | 18 |
    • 19 | 20 | 21 | 22 |
        23 | 24 | 25 |
      • 26 | 27 | 28 | &nbsp; 29 | 30 | &nbsp; 31 | 32 | 33 | 34 | 35 | 36 |
      • 37 |
        38 |
      39 |
    • 40 |
      41 | 42 |
    • 43 | During the upgrade some fields have been removed. The records below have been automatically corrected. 44 |
        45 | 46 | 47 | 48 |
      49 |
    • 50 |
      51 | 52 | 53 |
    • 54 |
      55 |
      56 |
    57 |
    58 |
    59 | -------------------------------------------------------------------------------- /src/util/report.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | import os 4 | import re 5 | import sys 6 | from textwrap import dedent 7 | 8 | import lxml 9 | from docutils.core import publish_string 10 | 11 | from .helpers import _validate_model 12 | from .misc import parse_version 13 | 14 | # python3 shims 15 | try: 16 | basestring # noqa: B018 17 | except NameError: 18 | basestring = unicode = str 19 | 20 | try: 21 | from markupsafe import Markup, escape 22 | 23 | from odoo.tools.misc import html_escape 24 | 25 | if html_escape is not escape: 26 | Markup = None 27 | except ImportError: 28 | Markup = None 29 | 30 | try: 31 | try: 32 | from odoo.api import SUPERUSER_ID 33 | except ImportError: 34 | from odoo import SUPERUSER_ID 35 | from odoo import release 36 | from odoo.tools.mail import html_sanitize 37 | except ImportError: 38 | from openerp import SUPERUSER_ID, release 39 | from openerp.tools.mail import html_sanitize 40 | 41 | 42 | if sys.version_info > (3,): 43 | from odoo.tools import html_escape 44 | else: 45 | # In python2, `html_escape` always returns a byte-string with non-ascii characters replaced 46 | # by their html entities. 47 | 48 | import werkzeug.utils 49 | 50 | # Avoid DeprecationWarning while still remaining compatible with werkzeug pre-0.9 51 | if parse_version(getattr(werkzeug, "__version__", "0.0")) < parse_version("0.9.0"): 52 | 53 | def html_escape(text): 54 | return werkzeug.utils.escape(text, quote=True).encode("ascii", "xmlcharrefreplace") 55 | 56 | else: 57 | 58 | def html_escape(text): 59 | return werkzeug.utils.escape(text).encode("ascii", "xmlcharrefreplace") 60 | 61 | 62 | try: 63 | from odoo.addons.base.models.ir_module import MyWriter # > 11.0 64 | except ImportError: 65 | try: 66 | from odoo.addons.base.module.module import MyWriter 67 | except ImportError: 68 | from openerp.addons.base.module.module import MyWriter 69 | 70 | from .exceptions import MigrationError 71 | from .misc import has_enterprise, split_osenv, version_between, version_gte 72 | from .orm import env, get_admin_channel, guess_admin_id 73 | 74 | migration_reports = {} 75 | _logger = logging.getLogger(__name__) 76 | 77 | 78 | _ENV_AM = set(split_osenv("UPG_ANNOUNCE_MEDIA", default="discuss")) 79 | ANNOUNCE_MEDIA = _ENV_AM & {"", "discuss", "logger"} 80 | if _ENV_AM - ANNOUNCE_MEDIA: 81 | raise ValueError( 82 | "Invalid value for the environment variable `UPG_ANNOUNCE_MEDIA`: {!r}. " 83 | "Authorized values are a combination of 'discuss', 'logger', or an empty string.".format( 84 | os.getenv("UPG_ANNOUNCE_MEDIA") 85 | ) 86 | ) 87 | ANNOUNCE_MEDIA -= {""} 88 | 89 | 90 | ODOO_SHOWCASE_VIDEOS = { 91 | "saas~18.3": "oyev2DxC5yY", 92 | "saas~18.2": "bwn_HWuLuTA", 93 | "saas~18.1": "is9oLyIkQGk", 94 | "18.0": "gbE3azm_Io0", 95 | "saas~17.4": "8F4-uDwom8A", 96 | "saas~17.2": "ivjgo_2-wkE", 97 | "17.0": "qxb74CMR748", 98 | "16.0": "RVFZL3D9plg", 99 | } 100 | 101 | 102 | def add_to_migration_reports(message, category="Other", format="text"): 103 | assert format in {"text", "html", "md", "rst"} 104 | if format == "md": 105 | message = md2html(dedent(message)) 106 | elif format == "rst": 107 | message = rst2html(message) 108 | raw = False 109 | if format != "text": 110 | if Markup: 111 | message = Markup(message) 112 | else: 113 | raw = True 114 | migration_reports.setdefault(category, []).append((message, raw)) 115 | migration_reports_length = sum(len(msg) for reps in migration_reports.values() for msg, _ in reps) + sum( 116 | map(len, migration_reports) 117 | ) 118 | if migration_reports_length > 1000000: 119 | _logger.warning("Upgrade report is growing suspiciously long: %s characters so far.", migration_reports_length) 120 | 121 | 122 | def announce_release_note(cr): 123 | filepath = os.path.join(os.path.dirname(__file__), "release-note.xml") 124 | with open(filepath, "rb") as fp: 125 | contents = fp.read() 126 | report = lxml.etree.fromstring(contents) 127 | e = env(cr) 128 | major_version, minor_version = re.findall(r"\d+", release.major_version) 129 | values = { 130 | "version": release.major_version, 131 | "major_version": major_version, 132 | "minor_version": minor_version, 133 | "odoo_showcase_video_id": ODOO_SHOWCASE_VIDEOS.get(release.major_version, ""), 134 | } 135 | _logger.info("Rendering release note for version %s", release.version) 136 | render = e["ir.qweb"].render if hasattr(e["ir.qweb"], "render") else e["ir.qweb"]._render 137 | message = render(report, values=values) 138 | _announce_to_db(cr, message, to_admin_only=False) 139 | 140 | 141 | def announce_migration_report(cr): 142 | filepath = os.path.join(os.path.dirname(__file__), "report-migration.xml") 143 | with open(filepath, "rb") as fp: 144 | contents = fp.read() 145 | if Markup: 146 | contents = contents.replace(b"t-raw", b"t-out") 147 | report = lxml.etree.fromstring(contents) 148 | e = env(cr) 149 | major_version, minor_version = re.findall(r"\d+", release.major_version) 150 | values = { 151 | "action_view_id": e.ref("base.action_ui_view").id, 152 | "version": release.major_version, 153 | "major_version": major_version, 154 | "minor_version": minor_version, 155 | "messages": migration_reports, 156 | "get_anchor_link_to_record": get_anchor_link_to_record, 157 | } 158 | _logger.info(migration_reports) 159 | render = e["ir.qweb"].render if hasattr(e["ir.qweb"], "render") else e["ir.qweb"]._render 160 | message = render(report, values=values) 161 | _announce_to_db(cr, message) 162 | # To avoid posting multiple time the same messages in case this method is called multiple times. 163 | migration_reports.clear() 164 | 165 | 166 | def _announce_to_db(cr, message, to_admin_only=True): 167 | """Send a rendered message to the database via mail channel.""" 168 | if not isinstance(message, basestring): 169 | message = message.decode("utf-8") 170 | if message.strip(): 171 | message = message.replace("{", "{{").replace("}", "}}") 172 | kw = {} 173 | # If possible, post the migration report message to administrators only. 174 | recipient = get_admin_channel(cr) if to_admin_only else None 175 | if recipient: 176 | kw["recipient"] = recipient 177 | announce(cr, release.major_version, message, format="html", header=None, footer=None, **kw) 178 | 179 | 180 | def rst2html(rst): 181 | overrides = { 182 | "embed_stylesheet": False, 183 | "doctitle_xform": False, 184 | "output_encoding": "unicode", 185 | "xml_declaration": False, 186 | } 187 | html = publish_string(source=dedent(rst), settings_overrides=overrides, writer=MyWriter()) 188 | return html_sanitize(html, silent=False) 189 | 190 | 191 | def md2html(md): 192 | import markdown 193 | 194 | mdversion = markdown.__version_info__ if hasattr(markdown, "__version_info__") else markdown.version_info 195 | extensions = [ 196 | "markdown.extensions.nl2br", 197 | "markdown.extensions.sane_lists", 198 | ] 199 | if mdversion[0] < 3: 200 | extensions.append("markdown.extensions.smart_strong") 201 | 202 | return markdown.markdown(md, extensions=extensions) 203 | 204 | 205 | _DEFAULT_HEADER = """ 206 |

    Odoo has been upgraded to version {version}.

    207 |

    What's new in this upgrade?

    208 | """ 209 | 210 | _DEFAULT_FOOTER = "

    Enjoy the new Odoo Online!

    " 211 | 212 | _DEFAULT_RECIPIENT = "mail.%s_all_employees" % ["group", "channel"][version_gte("9.0")] 213 | 214 | 215 | def announce( 216 | cr, 217 | version, 218 | msg, 219 | format="rst", 220 | recipient=_DEFAULT_RECIPIENT, 221 | header=_DEFAULT_HEADER, 222 | footer=_DEFAULT_FOOTER, 223 | pluses_for_enterprise=None, 224 | ): 225 | if not ANNOUNCE_MEDIA: 226 | return 227 | if pluses_for_enterprise is None: 228 | # default value depend on format and version 229 | major = version[0] 230 | pluses_for_enterprise = (major == "s" or int(major) >= 9) and format == "md" 231 | 232 | if pluses_for_enterprise: 233 | plus_re = r"^(\s*)\+ (.+)\n" 234 | replacement = r"\1- \2\n" if has_enterprise() else "" 235 | msg = re.sub(plus_re, replacement, msg, flags=re.M) 236 | 237 | if format == "rst": 238 | msg = rst2html(msg) 239 | elif format == "md": 240 | msg = md2html(msg) 241 | 242 | message = ((header or "") + msg + (footer or "")).format(version=version) 243 | if "logger" in ANNOUNCE_MEDIA: 244 | _logger.info(message) 245 | 246 | if "discuss" not in ANNOUNCE_MEDIA: 247 | return 248 | 249 | # do not notify early, in case the migration fails halfway through 250 | ctx = {"mail_notify_force_send": False, "mail_notify_author": True} 251 | 252 | uid = guess_admin_id(cr) 253 | try: 254 | registry = env(cr) 255 | user = registry["res.users"].browse([uid])[0].with_context(ctx) 256 | 257 | def ref(xid): 258 | return registry.ref(xid).with_context(ctx) 259 | 260 | except MigrationError: 261 | try: 262 | from openerp.modules.registry import RegistryManager 263 | except ImportError: 264 | from openerp.modules.registry import Registry as RegistryManager 265 | registry = RegistryManager.get(cr.dbname) 266 | user = registry["res.users"].browse(cr, SUPERUSER_ID, uid, context=ctx) 267 | 268 | def ref(xid): 269 | rmod, _, rxid = recipient.partition(".") 270 | return registry["ir.model.data"].get_object(cr, SUPERUSER_ID, rmod, rxid, context=ctx) 271 | 272 | # default recipient 273 | poster = user.message_post if hasattr(user, "message_post") else user.partner_id.message_post 274 | 275 | if recipient: 276 | try: 277 | if isinstance(recipient, str): # noqa: SIM108 278 | recipient = ref(recipient) 279 | else: 280 | recipient = recipient.with_context(**ctx) 281 | poster = recipient.message_post 282 | except (ValueError, AttributeError): 283 | # Cannot find record, post the message on the wall of the admin 284 | pass 285 | 286 | type_field = ["type", "message_type"][version_gte("9.0")] 287 | # From 12.0, system notificatications are sent by email, 288 | # and do not increment the upper right notification counter. 289 | # While comments, in a mail.channel, do. 290 | # We want the notification counter to appear for announcements, so we force the comment type from 12.0. 291 | type_value = ["notification", "comment"][version_gte("12.0")] 292 | subtype_key = ["subtype", "subtype_xmlid"][version_gte("saas~13.1")] 293 | 294 | kw = {type_field: type_value, subtype_key: "mail.mt_comment"} 295 | 296 | try: 297 | poster(body=message, partner_ids=[user.partner_id.id], **kw) 298 | except Exception: 299 | _logger.warning("Cannot announce message", exc_info=True) 300 | else: 301 | # Chat window with the report will be open post-upgrade for the admin user 302 | if version_between("9.0", "saas~18.1") and user.partner_id and recipient: 303 | channel_member_model = ( 304 | "discuss.channel.member" 305 | if version_gte("saas~16.3") 306 | else "mail.channel.member" 307 | if version_gte("16.0") 308 | else "mail.channel.partner" 309 | ) 310 | domain = [("partner_id", "=", user.partner_id.id), ("channel_id", "=", recipient.id)] 311 | try: 312 | registry[channel_member_model].search(domain)[:1].with_context(ctx).fold_state = "open" 313 | except Exception: 314 | _logger.warning("Cannot unfold chat window", exc_info=True) 315 | 316 | 317 | def get_anchor_link_to_record(model, id, name, action_id=None): 318 | _validate_model(model) 319 | if not name: 320 | name = "{}(id={})".format(model, id) 321 | if version_gte("saas~17.2"): 322 | part1 = "action-{}".format(action_id) if action_id else model 323 | url = "/odoo/{}/{}?debug=1".format(part1, id) 324 | else: 325 | url = "/web?debug=1#view_type=form&model={}&action={}&id={}".format(model, action_id or "", id) 326 | 327 | anchor_tag = '{}'.format(url, html_escape(name)) 328 | if Markup: 329 | anchor_tag = Markup(anchor_tag) 330 | return anchor_tag 331 | -------------------------------------------------------------------------------- /src/util/snippets.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import inspect 3 | import logging 4 | import re 5 | import sys 6 | import uuid 7 | from concurrent.futures import ProcessPoolExecutor 8 | 9 | from lxml import etree, html 10 | from psycopg2 import sql 11 | from psycopg2.extensions import quote_ident 12 | from psycopg2.extras import Json 13 | 14 | from .const import NEARLYWARN 15 | from .exceptions import MigrationError 16 | from .helpers import table_of_model 17 | from .misc import import_script, log_progress 18 | from .pg import column_exists, column_type, get_max_workers, table_exists 19 | 20 | _logger = logging.getLogger(__name__) 21 | utf8_parser = html.HTMLParser(encoding="utf-8") 22 | 23 | 24 | class Snippet: 25 | def __init__(self, name, tag="*", klass="", selector=""): 26 | self.name = name 27 | self.tag = tag 28 | self.klass = klass or name 29 | self.selector = selector or f'//{tag}[hasclass("{self.klass}")]' 30 | 31 | 32 | def add_snippet_names(cr, table, column, snippets, select_query): 33 | """ 34 | Execute the select_query then for each snippet contained in arch add the right data-snippet attribute on the right element. 35 | 36 | :param str table: The table we are working on 37 | :param str column: The column we are working on 38 | :param list snippets: list of all snippets to migrate 39 | :param str select_query: a query that when executed will return (id, list of snippets contained in the arch, arch) 40 | """ 41 | _logger.info("Add snippet names on %s.%s", table, column) 42 | cr.execute(select_query) 43 | 44 | it = log_progress(cr.fetchall(), _logger, qualifier="rows", size=cr.rowcount, log_hundred_percent=True) 45 | 46 | def quote(ident): 47 | return quote_ident(ident, cr._cnx) 48 | 49 | for res_id, regex_matches, arch in it: 50 | regex_matches = [match[0] for match in regex_matches] # noqa: PLW2901 51 | arch = arch.replace("\r", "") # otherwise html parser below will transform \r -> # noqa: PLW2901 52 | body = html.fromstring(arch, parser=utf8_parser) 53 | changed = False 54 | for snippet in snippets: 55 | if snippet.klass in regex_matches: 56 | body_snippets = body.xpath(snippet.selector) 57 | for body_snippet in body_snippets: 58 | body_snippet.attrib["data-snippet"] = snippet.name 59 | changed = True 60 | if changed: 61 | body = etree.tostring(body, encoding="unicode") 62 | cr.execute(f"UPDATE {quote(table)} SET {quote(column)} = %s WHERE id = %s", [body, res_id]) 63 | 64 | 65 | def add_snippet_names_on_html_field(cr, table, column, snippets, regex): 66 | """Search for all the snippets in the fields mentioned (should be html fields) and add the corresponding data-snippet on them.""" 67 | query = cr.mogrify( 68 | sql.SQL( 69 | """ 70 | SELECT id, array((SELECT regexp_matches({column}, %(regex)s, 'g'))), {column} 71 | FROM {table} 72 | WHERE {column} ~ %(regex)s 73 | """ 74 | ).format(column=sql.Identifier(column), table=sql.Identifier(table)), 75 | {"regex": regex}, 76 | ).decode() 77 | where = cr.mogrify(sql.SQL("{column} ~ %s").format(column=sql.Identifier(column)), [regex]).decode() 78 | ids_ranges = determine_chunk_limit_ids(cr, table, [column], where) 79 | for id0, id1 in ids_ranges: 80 | add_snippet_names(cr, table, column, snippets, query + f" AND id BETWEEN {id0} AND {id1}") 81 | 82 | 83 | def get_regex_from_snippets_list(snippets): 84 | return "(%s)" % "|".join(snippet.klass for snippet in snippets) 85 | 86 | 87 | def get_html_fields(cr): 88 | # yield (table, column) of stored html fields (that needs snippets updates) 89 | for table, columns in html_fields(cr): 90 | for column in columns: 91 | yield table, quote_ident(column, cr._cnx) 92 | 93 | 94 | def html_fields(cr): 95 | cr.execute( 96 | """ 97 | SELECT f.model, array_agg(f.name) 98 | FROM ir_model_fields f 99 | JOIN ir_model m ON m.id = f.model_id 100 | WHERE f.ttype = 'html' 101 | AND f.store = true 102 | AND m.transient = false 103 | AND f.model NOT LIKE 'ir.actions%' 104 | AND f.model != 'mail.message' 105 | GROUP BY f.model 106 | """ 107 | ) 108 | for model, columns in cr.fetchall(): 109 | table = table_of_model(cr, model) 110 | if not table_exists(cr, table): 111 | # an SQL VIEW 112 | continue 113 | existing_columns = [column for column in columns if column_exists(cr, table, column)] 114 | if existing_columns: 115 | yield table, existing_columns 116 | 117 | 118 | def parse_style(attr): 119 | """ 120 | Convert an HTML style attribute's text into a dict mapping property names to property values. 121 | 122 | :param str attr: value of an HTML style attribute 123 | :return: dict of CSS property values per property name 124 | """ 125 | # Captures two groups: 126 | # - identifier: sequence of word character or hyphen that is followed by a colon 127 | # - value: sequence of: 128 | # - any non semicolon character or 129 | # - sequence of any non single quote character or escaped single quote 130 | # surrounded by single quotes or 131 | # - sequence of any non double quote character or escaped double quote 132 | # surrounded by double quotes 133 | regex = r""" 134 | ([\w\-]+)\s*:\s*((?:[^;\"']|'(?:[^']|(?:\\'))*'|\"(?:[^\"]|(?:\\\"))*\")+) 135 | """.strip() 136 | return dict(re.findall(regex, attr)) 137 | 138 | 139 | def format_style(styles): 140 | """ 141 | Convert a dict of CSS property names to property values into an HTML style attribute string. 142 | 143 | :param dict styles: CSS property value per property name 144 | :return: str HTML style attribute 145 | """ 146 | style = "; ".join(["%s: %s" % entry for entry in styles.items()]) 147 | if len(style) > 0 and style[-1] != ";": 148 | style += ";" 149 | return style 150 | 151 | 152 | def html_converter(transform_callback, selector=None): 153 | """ 154 | Create an upgrade converter for a single HTML text content or for HTML elements that match a selector. 155 | 156 | :param func transform_callback: transforms an HTML tree and returns True if 157 | a change happened 158 | :param str selector: targets the elements to loop on 159 | :return: object HTMLConverter with callback 160 | """ 161 | return HTMLConverter(make_pickleable_callback(transform_callback), selector) 162 | 163 | 164 | def make_pickleable_callback(callback): 165 | """ 166 | Make a callable importable. 167 | 168 | `ProcessPoolExecutor.map` arguments needs to be pickleable 169 | Functions can only be pickled if they are importable. 170 | However, the callback's file is not importable due to the dash in the filename. 171 | We should then put the executed function in its own importable file. 172 | """ 173 | callback_filepath = inspect.getfile(callback) 174 | name = f"_upgrade_{uuid.uuid4().hex}" 175 | mod = sys.modules[name] = import_script(callback_filepath, name=name) 176 | try: 177 | return getattr(mod, callback.__name__) 178 | except AttributeError: 179 | error_msg = ( 180 | f"The converter callback `{callback.__name__}` is a nested function in `{callback.__module__}`.\n" 181 | "Move it outside the `migrate()` function to make it top-level." 182 | ) 183 | raise MigrationError(error_msg) from None 184 | 185 | 186 | class BaseConverter: 187 | def __init__(self, callback, selector=None): 188 | self.callback = callback 189 | self.selector = selector 190 | 191 | def for_html(self): 192 | return HTMLConverter(self.callback, self.selector) 193 | 194 | def for_qweb(self): 195 | return QWebConverter(self.callback, self.selector) 196 | 197 | def has_changed(self, els): 198 | if self.selector: 199 | converted = [self.callback(el) for el in els.xpath(self.selector)] 200 | return any(converted) 201 | return self.callback(els) 202 | 203 | def __call__(self, content): 204 | # Remove `` header 205 | if not content: 206 | return (False, content) 207 | content = re.sub(r"^<\?xml .+\?>\s*", "", content.strip()) 208 | # Wrap in node before parsing to preserve external comments and multi-root nodes, 209 | # except for when this looks like a full html doc, because in this case the wrap tag breaks the logic in 210 | # https://github.com/lxml/lxml/blob/2ac88908ffd6df380615c0af35f2134325e4bf30/src/lxml/html/html5parser.py#L184 211 | els = self._loads(content if content.strip()[:5].lower() == "{content}") 212 | has_changed = self.has_changed(els) 213 | new_content = re.sub(r"(^|$|^$)", "", self._dumps(els).strip()) if has_changed else content 214 | return (has_changed, new_content) 215 | 216 | def _loads(self, string): 217 | raise NotImplementedError 218 | 219 | def _dumps(self, node): 220 | raise NotImplementedError 221 | 222 | 223 | class HTMLConverter(BaseConverter): 224 | def for_html(self): 225 | return self 226 | 227 | def _loads(self, string): 228 | return html.fromstring(string, parser=utf8_parser) 229 | 230 | def _dumps(self, node): 231 | return html.tostring(node, encoding="unicode") 232 | 233 | 234 | class QWebConverter(BaseConverter): 235 | def for_qweb(self): 236 | return self 237 | 238 | def _loads(self, string): 239 | return html.fromstring(string, parser=html.XHTMLParser(encoding="utf-8")) 240 | 241 | def _dumps(self, node): 242 | return etree.tostring(node, encoding="unicode") 243 | 244 | 245 | class Convertor: 246 | def __init__(self, converters, callback): 247 | self.converters = converters 248 | self.callback = callback 249 | 250 | def __call__(self, row): 251 | converters = self.converters 252 | columns = self.converters.keys() 253 | converter_callback = self.callback 254 | res_id, *contents = row 255 | changes = {} 256 | for column, content in zip(columns, contents): 257 | if content and converters[column]: 258 | # jsonb column; convert all keys 259 | new_content = {} 260 | has_changed, new_content["en_US"] = converter_callback(content.pop("en_US")) 261 | if has_changed: 262 | for lang, value in content.items(): 263 | _, new_content[lang] = converter_callback(value) 264 | new_content = Json(new_content) 265 | else: 266 | has_changed, new_content = converter_callback(content) 267 | changes[column] = new_content 268 | if has_changed: 269 | changes["id"] = res_id 270 | return changes 271 | 272 | 273 | def convert_html_columns(cr, table, columns, converter_callback, where_column="IS NOT NULL", extra_where="true"): 274 | r""" 275 | Convert HTML content for the given table column. 276 | 277 | :param cursor cr: database cursor 278 | :param str table: table name 279 | :param str column: column name 280 | :param func converter_callback: conversion function that converts the HTML 281 | text content and returns a tuple with a boolean that indicates whether a 282 | change happened and the new content must be saved 283 | :param str where_column: filtering such as 284 | - "like '%abc%xyz%'" 285 | - "~* '\yabc.*xyz\y'" 286 | :param str extra_where: extra filtering on the where clause 287 | """ 288 | assert "id" not in columns 289 | 290 | converters = {column: "->>'en_US'" if column_type(cr, table, column) == "jsonb" else "" for column in columns} 291 | select = ", ".join(f'"{column}"' for column in columns) 292 | where = " OR ".join(f'"{column}"{converters[column]} {where_column}' for column in columns) 293 | 294 | base_select_query = f""" 295 | SELECT id, {select} 296 | FROM {table} 297 | WHERE ({where}) 298 | AND ({extra_where}) 299 | """ 300 | split_queries = [ 301 | (base_select_query + "\n AND id BETWEEN {} AND {}".format(*x)) 302 | for x in determine_chunk_limit_ids(cr, table, columns, "({}) AND ({})".format(where, extra_where)) 303 | ] 304 | 305 | update_sql = ", ".join(f'"{column}" = %({column})s' for column in columns) 306 | update_query = f"UPDATE {table} SET {update_sql} WHERE id = %(id)s" 307 | 308 | with ProcessPoolExecutor(max_workers=get_max_workers()) as executor: 309 | convert = Convertor(converters, converter_callback) 310 | for query in log_progress(split_queries, logger=_logger, qualifier=f"{table} updates"): 311 | cr.execute(query) 312 | for data in executor.map(convert, cr.fetchall(), chunksize=1000): 313 | if "id" in data: 314 | cr.execute(update_query, data) 315 | 316 | 317 | def determine_chunk_limit_ids(cr, table, column_arr, where): 318 | bytes_per_chunk = 100 * 1024 * 1024 319 | columns = ", ".join(quote_ident(column, cr._cnx) for column in column_arr if column != "id") 320 | cr.execute( 321 | f""" 322 | WITH info AS ( 323 | SELECT id, 324 | sum(pg_column_size(({columns}, id))) OVER (ORDER BY id) / {bytes_per_chunk} AS chunk 325 | FROM {table} 326 | WHERE {where} 327 | ) SELECT min(id), max(id) FROM info GROUP BY chunk 328 | """ 329 | ) 330 | return cr.fetchall() 331 | 332 | 333 | def convert_html_content( 334 | cr, 335 | converter_callback, 336 | where_column="IS NOT NULL", 337 | **kwargs, 338 | ): 339 | r""" 340 | Convert HTML content. 341 | 342 | :param cursor cr: database cursor 343 | :param func converter_callback: conversion function that converts the HTML 344 | text content and returns a tuple with a boolean that indicates whether a 345 | change happened and the new content must be saved 346 | :param str where_column: filtering such as 347 | - "like '%abc%xyz%'" 348 | - "~* '\yabc.*xyz\y'" 349 | :param dict kwargs: extra keyword arguments to pass to :func:`convert_html_column` 350 | """ 351 | if hasattr(converter_callback, "for_html"): # noqa: SIM108 352 | html_converter = converter_callback.for_html() 353 | else: 354 | # trust the given converter to handle HTML 355 | html_converter = converter_callback 356 | 357 | for table, columns in html_fields(cr): 358 | convert_html_columns(cr, table, columns, html_converter, where_column=where_column, **kwargs) 359 | 360 | if hasattr(converter_callback, "for_qweb"): 361 | qweb_converter = converter_callback.for_qweb() 362 | else: 363 | _logger.log(NEARLYWARN, "Cannot adapt converter callback %r for qweb; using it directly", converter_callback) 364 | qweb_converter = converter_callback 365 | 366 | convert_html_columns( 367 | cr, 368 | "ir_ui_view", 369 | ["arch_db"], 370 | qweb_converter, 371 | where_column=where_column, 372 | **dict(kwargs, extra_where="type = 'qweb'"), 373 | ) 374 | -------------------------------------------------------------------------------- /src/util/specific.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | 4 | from .helpers import _validate_table 5 | from .misc import _cached 6 | from .models import rename_model 7 | from .modules import rename_module 8 | from .pg import column_exists, rename_table, table_exists 9 | from .report import add_to_migration_reports 10 | 11 | _logger = logging.getLogger(__name__) 12 | 13 | 14 | def dbuuid(cr): 15 | return _dbuuids(cr)[-1] 16 | 17 | 18 | @_cached 19 | def _dbuuids(cr): 20 | cr.execute( 21 | """ 22 | SELECT REPLACE(value, 'upg-neuter-', '') 23 | FROM ir_config_parameter 24 | WHERE key IN ('database.uuid', 'origin.database.uuid') 25 | ORDER BY key 26 | """ 27 | ) 28 | return [uuid for (uuid,) in cr.fetchall()] 29 | 30 | 31 | def dispatch_by_dbuuid(cr, version, callbacks): 32 | """ 33 | Allow to execute a migration script for a specific database only, based on its dbuuid. 34 | 35 | .. example:: 36 | .. code-block:: python 37 | def db_yellowbird(cr, version): 38 | cr.execute("DELETE FROM ir_ui_view WHERE id=837") 39 | 40 | util.dispatch_by_dbuuid(cr, version, { 41 | "ef81c07aa90936a89f4e7878e2ebc634a24fcd66": db_yellowbird, 42 | }) 43 | 44 | :param str version: Odoo version 45 | :param dict[str, function] callbacks: mapping dbuuids to the functions to run against matching dbs 46 | 47 | .. warning:: 48 | - Only the first match of (icp["database.uuid"], icp["origin.database.uuid"]) in `callbacks` is executed. 49 | 50 | .. tip:: 51 | - If looking to prevent a callback from running against a descendant db, one can use a noop `callback`: 52 | .. example:: 53 | .. code-block:: python 54 | noop = lambda *args: None 55 | util.dispatch_by_dbuuid(cr, version, { 56 | "dbuuid": noop, 57 | "ancestor's dbuuid": db_yellowbird, 58 | }) 59 | """ 60 | for uuid in _dbuuids(cr): 61 | if uuid in callbacks: 62 | func = callbacks[uuid] 63 | _logger.info("calling dbuuid-specific function `%s`", func.__name__) 64 | func(cr, version) 65 | break 66 | 67 | 68 | def rename_custom_model(cr, model_name, new_model_name, custom_module=None, report_details=""): 69 | cr.execute("SELECT 1 FROM ir_model WHERE model = %s", [model_name]) 70 | if not cr.rowcount: 71 | _logger.warning("Model %r not found: skip renaming", model_name) 72 | return 73 | 74 | rename_model(cr, model_name, new_model_name, rename_table=True) 75 | module_details = " from module '{}'".format(custom_module) if custom_module else "" 76 | add_to_migration_reports( 77 | category="Custom models", 78 | message="The custom model '{model_name}'{module_details} was renamed to '{new_model_name}'. {report_details}".format( 79 | **locals() 80 | ), 81 | ) 82 | 83 | 84 | def rename_custom_module(cr, old_module_name, new_module_name, report_details="", author="%"): 85 | cr.execute("SELECT 1 FROM ir_module_module WHERE name = %s AND author ILIKE %s", [old_module_name, author]) 86 | if not cr.rowcount: 87 | return 88 | 89 | rename_module(cr, old_module_name, new_module_name) 90 | _logger.warning("Custom module %r renamed to %r", old_module_name, new_module_name) 91 | add_to_migration_reports( 92 | category="Custom modules", 93 | message="The custom module '{old_module_name}' was renamed to '{new_module_name}'. {report_details}".format( 94 | **locals() 95 | ), 96 | ) 97 | 98 | 99 | def rename_custom_table( 100 | cr, 101 | table_name, 102 | new_table_name, 103 | custom_module=None, 104 | report_details="", 105 | ): 106 | if not table_exists(cr, table_name): 107 | _logger.warning("Table %r not found: skip renaming", table_name) 108 | return 109 | 110 | rename_table(cr, table_name, new_table_name, remove_constraints=False) 111 | 112 | module_details = " from module '{}'".format(custom_module) if custom_module else "" 113 | add_to_migration_reports( 114 | category="Custom tables/columns", 115 | message="The custom table '{table_name}'{module_details} was renamed to '{new_table_name}'. {report_details}".format( 116 | **locals() 117 | ), 118 | ) 119 | 120 | 121 | def rename_custom_column(cr, table_name, col_name, new_col_name, custom_module=None, report_details=""): 122 | _validate_table(table_name) 123 | if not column_exists(cr, table_name, col_name): 124 | _logger.warning("Column %r not found on table %r: skip renaming", col_name, table_name) 125 | return 126 | cr.execute('ALTER TABLE "{}" RENAME COLUMN "{}" TO "{}"'.format(table_name, col_name, new_col_name)) 127 | module_details = " from module '{}'".format(custom_module) if custom_module else "" 128 | add_to_migration_reports( 129 | category="Custom tables/columns", 130 | message="The custom column '{col_name}' of the table '{table_name}'{module_details} was renamed to '{new_col_name}'." 131 | " {report_details}".format(**locals()), 132 | ) 133 | 134 | 135 | def reset_cowed_views(cr, xmlid, key=None): 136 | if "." not in xmlid: 137 | raise ValueError("Please use fully qualified name .") 138 | 139 | module, _, name = xmlid.partition(".") 140 | if not key: 141 | key = xmlid 142 | cr.execute( 143 | """ 144 | UPDATE ir_ui_view u 145 | SET arch_prev = u.arch_db, 146 | arch_db = v.arch_db 147 | FROM ir_ui_view v 148 | JOIN ir_model_data m 149 | ON m.res_id = v.id AND m.model = 'ir.ui.view' 150 | WHERE u.key = %s 151 | AND m.module = %s 152 | AND m.name = %s 153 | AND u.website_id IS NOT NULL 154 | RETURNING u.id 155 | """, 156 | [key, module, name], 157 | ) 158 | return set(sum(cr.fetchall(), ())) 159 | -------------------------------------------------------------------------------- /src/util/spreadsheet/__init__.py: -------------------------------------------------------------------------------- 1 | from .misc import * 2 | from .tokenizer import * 3 | -------------------------------------------------------------------------------- /src/util/spreadsheet/misc.py: -------------------------------------------------------------------------------- 1 | from .. import json 2 | 3 | 4 | def iter_commands(cr, like_all=(), like_any=()): 5 | if not (bool(like_all) ^ bool(like_any)): 6 | raise ValueError("Please specify `like_all` or `like_any`, not both") 7 | cr.execute( 8 | """ 9 | SELECT id, 10 | commands 11 | FROM spreadsheet_revision 12 | WHERE commands LIKE {}(%s::text[]) 13 | """.format("ALL" if like_all else "ANY"), 14 | [list(like_all or like_any)], 15 | ) 16 | for revision_id, data in cr.fetchall(): 17 | data_loaded = json.loads(data) 18 | if "commands" not in data_loaded: 19 | continue 20 | data_old = json.dumps(data_loaded, sort_keys=True) 21 | 22 | changed = yield data_loaded["commands"] 23 | if changed is None: 24 | changed = data_old != json.dumps(data_loaded, sort_keys=True) 25 | 26 | if changed: 27 | cr.execute( 28 | "UPDATE spreadsheet_revision SET commands=%s WHERE id=%s", [json.dumps(data_loaded), revision_id] 29 | ) 30 | 31 | 32 | def process_commands(cr, callback, *args, **kwargs): 33 | gen = iter_commands(cr, *args, **kwargs) 34 | try: 35 | cmd = next(gen) 36 | while True: 37 | changed = callback(cmd) 38 | cmd = gen.send(changed) 39 | 40 | except StopIteration: 41 | pass 42 | -------------------------------------------------------------------------------- /src/util/spreadsheet/tokenizer.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | """ 4 | This entire file is a direct translation of the original JavaScript code found in https://github.com/odoo/o-spreadsheet/blob/master/src/formulas/tokenizer.ts. 5 | """ 6 | 7 | 8 | class CellErrorType: 9 | NotAvailable = "#N/A" 10 | InvalidReference = "#REF" 11 | BadExpression = "#BAD_EXPR" 12 | CircularDependency = "#CYCLE" 13 | UnknownFunction = "#NAME?" 14 | DivisionByZero = "#DIV/0!" 15 | GenericError = "#ERROR" 16 | 17 | 18 | DEFAULT_LOCALES = [ 19 | { 20 | "name": "English (US)", 21 | "code": "en_US", 22 | "thousandsSeparator": ",", 23 | "decimalSeparator": ".", 24 | "dateFormat": "m/d/yyyy", 25 | "timeFormat": "hh:mm:ss a", 26 | "formulaArgSeparator": ",", 27 | } 28 | ] 29 | DEFAULT_LOCALE = DEFAULT_LOCALES[0] 30 | 31 | NEWLINE = "\n" 32 | 33 | 34 | def get_formula_number_regex(decimal_separator): 35 | decimal_separator = re.escape(decimal_separator) 36 | return re.compile(r"^-?\d+(%s?\d*(e\d+)?)?|^-?%s\d+(?!\w|!)" % (decimal_separator, decimal_separator)) 37 | 38 | 39 | def escape_regexp(string): 40 | return re.escape(string) 41 | 42 | 43 | full_row_xc = r"(\$?[A-Z]{1,3})?\$?[0-9]{1,7}\s*:\s*(\$?[A-Z]{1,3})?\$?[0-9]{1,7}\s*" 44 | full_col_xc = r"\$?[A-Z]{1,3}(\$?[0-9]{1,7})?\s*:\s*\$?[A-Z]{1,3}(\$?[0-9]{1,7})?\s*" 45 | 46 | cell_reference = re.compile(r"\$?([A-Z]{1,3})\$?([0-9]{1,7})", re.IGNORECASE) 47 | range_reference = re.compile( 48 | r"^\s*('.+'!|[^']+!)?(%s|%s|%s)$" % (cell_reference.pattern, full_row_xc, full_col_xc), re.IGNORECASE 49 | ) 50 | 51 | white_space_special_characters = [ 52 | "\t", 53 | "\f", 54 | "\v", 55 | chr(int("00a0", 16)), 56 | chr(int("1680", 16)), 57 | chr(int("2000", 16)), 58 | chr(int("200a", 16)), 59 | chr(int("2028", 16)), 60 | chr(int("2029", 16)), 61 | chr(int("202f", 16)), 62 | chr(int("205f", 16)), 63 | chr(int("3000", 16)), 64 | chr(int("feff", 16)), 65 | ] 66 | white_space_regexp = re.compile("|".join(map(re.escape, white_space_special_characters)) + r"|(\r\n|\r|\n)") 67 | 68 | 69 | def replace_special_spaces(text): 70 | if not text: 71 | return "" 72 | if not white_space_regexp.search(text): 73 | return text 74 | return white_space_regexp.sub(lambda match: NEWLINE if match.group(1) else " ", text) 75 | 76 | 77 | POSTFIX_UNARY_OPERATORS = ["%"] 78 | OPERATORS = ["+", "-", "*", "/", ":", "=", "<>", ">=", ">", "<=", "<", "^", "&"] + POSTFIX_UNARY_OPERATORS 79 | 80 | 81 | def tokenize(string, locale=DEFAULT_LOCALE): 82 | string = replace_special_spaces(string) 83 | result = [] 84 | if string: 85 | chars = TokenizingChars(string) 86 | 87 | while not chars.is_over(): 88 | token = ( 89 | tokenize_space(chars) 90 | or tokenize_args_separator(chars, locale) 91 | or tokenize_parenthesis(chars) 92 | or tokenize_operator(chars) 93 | or tokenize_string(chars) 94 | or tokenize_debugger(chars) 95 | or tokenize_invalid_range(chars) 96 | or tokenize_number(chars, locale) 97 | or tokenize_symbol(chars) 98 | ) 99 | 100 | if not token: 101 | token = ("UNKNOWN", chars.shift()) 102 | 103 | result.append(token) 104 | 105 | return result 106 | 107 | 108 | def tokenize_debugger(chars): 109 | if chars.current == "?": 110 | chars.shift() 111 | return "DEBUGGER", "?" 112 | return None 113 | 114 | 115 | parenthesis = {"(": ("LEFT_PAREN", "("), ")": ("RIGHT_PAREN", ")")} 116 | 117 | 118 | def tokenize_parenthesis(chars): 119 | value = chars.current 120 | if value in parenthesis: 121 | chars.shift() 122 | return parenthesis[value] 123 | return None 124 | 125 | 126 | def tokenize_args_separator(chars, locale): 127 | if chars.current == locale["formulaArgSeparator"]: 128 | value = chars.shift() 129 | return "ARG_SEPARATOR", value 130 | return None 131 | 132 | 133 | def tokenize_operator(chars): 134 | for op in OPERATORS: 135 | if chars.current_starts_with(op): 136 | chars.advance_by(len(op)) 137 | return "OPERATOR", op 138 | return None 139 | 140 | 141 | FIRST_POSSIBLE_NUMBER_CHARS = set("0123456789") 142 | 143 | 144 | def tokenize_number(chars, locale): 145 | if chars.current not in FIRST_POSSIBLE_NUMBER_CHARS and chars.current != locale["decimalSeparator"]: 146 | return None 147 | match = re.match(get_formula_number_regex(locale["decimalSeparator"]), chars.remaining()) 148 | if match: 149 | chars.advance_by(len(match.group(0))) 150 | return "NUMBER", match.group(0) 151 | return None 152 | 153 | 154 | def tokenize_string(chars): 155 | if chars.current == '"': 156 | start_char = chars.shift() 157 | letters = start_char 158 | while chars.current and (chars.current != start_char or letters[-1] == "\\"): 159 | letters += chars.shift() 160 | if chars.current == '"': 161 | letters += chars.shift() 162 | return "STRING", letters 163 | return None 164 | 165 | 166 | separator_regexp = re.compile(r"^[\w\.!\$]+") 167 | 168 | 169 | def tokenize_symbol(chars): 170 | result = "" 171 | if chars.current == "'": 172 | last_char = chars.shift() 173 | result += last_char 174 | while chars.current: 175 | last_char = chars.shift() 176 | result += last_char 177 | if last_char == "'": 178 | if chars.current and chars.current == "'": 179 | last_char = chars.shift() 180 | result += last_char 181 | else: 182 | break 183 | if last_char != "'": 184 | return "UNKNOWN", result 185 | match = separator_regexp.match(chars.remaining()) 186 | if match: 187 | value = match.group(0) 188 | result += value 189 | chars.advance_by(len(value)) 190 | if result: 191 | value = result 192 | is_reference = range_reference.match(value) 193 | if is_reference: 194 | return "REFERENCE", value 195 | return "SYMBOL", value 196 | return None 197 | 198 | 199 | def tokenize_space(chars): 200 | length = 0 201 | while chars.current == NEWLINE: 202 | length += 1 203 | chars.shift() 204 | if length: 205 | return "SPACE", NEWLINE * length 206 | 207 | while chars.current == " ": 208 | length += 1 209 | chars.shift() 210 | 211 | if length: 212 | return "SPACE", " " * length 213 | return None 214 | 215 | 216 | def tokenize_invalid_range(chars): 217 | if chars.current.startswith(CellErrorType.InvalidReference): 218 | chars.advance_by(len(CellErrorType.InvalidReference)) 219 | return "INVALID_REFERENCE", CellErrorType.InvalidReference 220 | return None 221 | 222 | 223 | class TokenizingChars: 224 | def __init__(self, text): 225 | self.text = text 226 | self.current_index = 0 227 | self.current = text[0] 228 | 229 | def shift(self): 230 | current = self.current 231 | self.current_index += 1 232 | self.current = self.text[self.current_index] if self.current_index < len(self.text) else None 233 | return current 234 | 235 | def advance_by(self, length): 236 | self.current_index += length 237 | self.current = self.text[self.current_index] if self.current_index < len(self.text) else None 238 | 239 | def is_over(self): 240 | return self.current_index >= len(self.text) 241 | 242 | def remaining(self): 243 | return self.text[self.current_index :] 244 | 245 | def current_starts_with(self, string): 246 | if self.current != string[0]: 247 | return False 248 | return all(self.text[self.current_index + j] == string[j] for j in range(1, len(string))) 249 | -------------------------------------------------------------------------------- /tools/compile23.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # ruff: noqa: T201 3 | import subprocess 4 | import sys 5 | from pathlib import PurePath 6 | from shutil import which 7 | 8 | py2_only_patterns = [] 9 | py2_files = [] 10 | 11 | py3_only_patterns = [ 12 | "tools/*.py", 13 | # tests are only run from version 12. python2 compatibility is not needed. 14 | "src/testing.py", 15 | "src/util/jinja_to_qweb.py", 16 | "src/util/snippets.py", 17 | "src/util/convert_bootstrap.py", 18 | "src/*/tests/*.py", 19 | "src/*/17.0.*/*.py", 20 | ] 21 | py3_files = [] 22 | 23 | rc = 0 24 | 25 | for filename in sys.argv[1:]: 26 | p = PurePath(filename) 27 | if p.suffix != ".py": 28 | continue 29 | 30 | if not filename.islower(): 31 | print(f"filename {filename!r} is not lowercase") 32 | rc = 1 33 | 34 | if any(p.match(pattern) for pattern in py2_only_patterns): 35 | py2_files.append(filename) 36 | elif any(p.match(pattern) for pattern in py3_only_patterns): 37 | py3_files.append(filename) 38 | else: 39 | # not an explicit match to a python version. Test against both versions. 40 | py2_files.append(filename) 41 | py3_files.append(filename) 42 | 43 | 44 | if py2_files: 45 | if which("python2"): 46 | s = subprocess.run(["python2", "-m", "compileall", "-f", "-q", *py2_files], check=False) 47 | if s.returncode: 48 | rc = 1 49 | else: 50 | lines = [ 51 | "WARNING: `python2` hasn't been found in $PATH", 52 | "You must ensure the following files are compatible with python2:", 53 | *[f" - {f}" for f in py2_files], 54 | ] 55 | width = max(map(len, lines)) 56 | message = "\n".join(f"@ {line: <{width}s} @" for line in lines) 57 | extra = "@" * (width + 4) 58 | print(f"{extra}\n{message}\n{extra}", file=sys.stderr) 59 | 60 | if py3_files: 61 | s = subprocess.run(["python3", "-m", "compileall", "-f", "-q", *py3_files], check=False) 62 | if s.returncode: 63 | rc = 1 64 | 65 | sys.exit(rc) 66 | -------------------------------------------------------------------------------- /tools/fetch-release-notes-video-id.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S uv run --script --quiet 2 | 3 | # /// script 4 | # requires-python = ">=3.12" 5 | # dependencies = [ 6 | # "httpx", 7 | # "libcst", 8 | # "lxml", 9 | # ] 10 | # /// 11 | 12 | import re 13 | import sys 14 | from pathlib import Path 15 | 16 | import httpx 17 | import libcst as cst 18 | from lxml import etree 19 | 20 | if len(sys.argv) != 2: 21 | sys.exit(f"Usage: {sys.argv[0]} VERSION") 22 | 23 | version = sys.argv[1] 24 | 25 | VERSION_RE = re.compile(r"^(?:saas[~-])?([0-9]+)(?:\.([0-9]+))?$") 26 | 27 | if (match := VERSION_RE.match(version)) is None: 28 | sys.exit(f"Invalid version: {version!r}") 29 | 30 | major, minor = match.groups(default="0") 31 | 32 | version_url = major if minor == "0" else f"{major}-{minor}" 33 | full_version = f"{major}.0" if minor == "0" else f"saas~{major}.{minor}" 34 | 35 | html = httpx.get(f"https://www.odoo.com/odoo-{version_url}-release-notes") 36 | if html.status_code != 200: 37 | sys.exit(f"Cannot fetch release notes page for version {version}") 38 | 39 | root = etree.fromstring(html.text, parser=etree.HTMLParser()) 40 | iframe = root.xpath("//main//iframe[contains(@src, 'youtube.com') or contains(@src, 'youtube-nocookie.com')]") 41 | if not iframe: 42 | sys.exit(f"Cannot find youtube video in {html.url}") 43 | 44 | yt_link = httpx.URL(iframe[0].attrib["src"]) 45 | video_id = yt_link.path.removeprefix("/embed/") 46 | 47 | 48 | report_py = Path(__file__).parent.parent / "src" / "util" / "report.py" 49 | 50 | source_tree = cst.parse_module(report_py.read_bytes()) 51 | 52 | 53 | class Transformer(cst.CSTTransformer): 54 | def __init__(self): 55 | self.video_dict = None 56 | self.key_found = False 57 | super().__init__() 58 | 59 | def visit_Assign(self, node): 60 | match node: 61 | case cst.Assign( 62 | targets=[cst.AssignTarget(target=cst.Name(value="ODOO_SHOWCASE_VIDEOS"))], 63 | value=video_dict, 64 | ): 65 | self.video_dict = video_dict 66 | return True 67 | return False 68 | 69 | def visit_Dict(self, node): 70 | return node is self.video_dict 71 | 72 | def leave_DictElement(self, original_node, updated_node): 73 | if original_node.key.raw_value == full_version: 74 | self.key_found = True 75 | if original_node.value.raw_value != video_id: 76 | updated_node = updated_node.with_changes(value=cst.SimpleString(f'"{video_id}"')) 77 | return updated_node 78 | 79 | def leave_Dict(self, original_node, updated_node): 80 | if original_node is self.video_dict: 81 | if self.key_found: 82 | elements = updated_node.elements 83 | else: 84 | new_elem = updated_node.elements[0].with_changes( 85 | key=cst.SimpleString(f'"{full_version}"'), value=cst.SimpleString(f'"{video_id}"') 86 | ) 87 | elements = [new_elem, *updated_node.elements] 88 | 89 | elements = sorted(elements, reverse=True, key=lambda e: VERSION_RE.match(e.key.raw_value).groups("0")) 90 | updated_node = updated_node.with_changes(elements=elements) 91 | return updated_node 92 | 93 | 94 | modified_tree = source_tree.visit(Transformer()) 95 | 96 | report_py.write_text(modified_tree.code) 97 | -------------------------------------------------------------------------------- /tools/generate-inherit.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # ruff: noqa: ERA001 3 | 4 | import io 5 | import itertools 6 | import logging 7 | import subprocess 8 | import sys 9 | import tokenize 10 | from argparse import ArgumentParser, Namespace 11 | from ast import literal_eval 12 | from collections import defaultdict 13 | from dataclasses import dataclass, field 14 | from functools import total_ordering 15 | from pathlib import Path 16 | from typing import Dict, List, NamedTuple, Optional, Set, Tuple 17 | 18 | import black 19 | import tomli 20 | 21 | try: 22 | from black.nodes import Visitor 23 | except ImportError: 24 | # old black version 25 | from black import Visitor 26 | 27 | 28 | logging.basicConfig( 29 | level=logging.INFO, stream=sys.stderr, format="%(asctime)s %(levelname)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S" 30 | ) 31 | if sys.stderr.isatty(): 32 | logging.addLevelName(logging.INFO, "\033[1;32m\033[1;49mINFO\033[0m") 33 | logging.addLevelName(logging.CRITICAL, "\033[1;37m\033[1;41mCRITICAL\033[0m") 34 | 35 | logger = logging.getLogger(__name__) 36 | 37 | if int(black.__version__.split(".")[0]) >= 22: 38 | logger.critical("Too recent version of `black`. Please install version 21.12b0 in order to parse python2 code.") 39 | sys.exit(1) 40 | 41 | MODELS = ["osv", "osv_memory", "Model", "TransientModel", "AbstractModel"] 42 | MODELS += [".".join(x).lstrip(".") for x in itertools.product(["openerp", "odoo", ""], ["osv", "models"], MODELS)] 43 | 44 | 45 | class Repo(NamedTuple): 46 | name: str 47 | 48 | @property 49 | def remote(self): 50 | return f"git@github.com:odoo/{self.name}.git" 51 | 52 | 53 | REPOSITORIES = [ 54 | Repo("odoo"), 55 | Repo("enterprise"), 56 | Repo("design-themes"), 57 | ] 58 | 59 | 60 | @total_ordering 61 | @dataclass(eq=False) 62 | class Version: 63 | name: str 64 | 65 | @property 66 | def fqn(self): 67 | if "." in self.name: 68 | return self.name.replace("-", "~") 69 | major, minor = self.ints 70 | return f"{major}.saas~{minor}" 71 | 72 | def __repr__(self): 73 | # This is hacky. 74 | # It will only be used when outputting the generated file. 75 | return f"parse_version({self.fqn!r})" 76 | 77 | @property 78 | def ints(self): 79 | s = list(map(int, self.name.replace("saas-", "").split("."))) 80 | if len(s) == 1: 81 | # < 11.0 82 | major = {range(1, 6): 7, range(6, 7): 8, range(7, 14): 9, range(14, 19): 10} 83 | for m, n in major.items(): 84 | if s[0] in m: 85 | return (n, s[0]) 86 | raise ValueError(self.name) 87 | return tuple(s) 88 | 89 | def __eq__(self, other): 90 | return self.name == other.name 91 | 92 | def __lt__(self, other): 93 | return self.ints < other.ints 94 | 95 | def __hash__(self): 96 | return hash(self.name) # Only name is relevant 97 | 98 | @classmethod 99 | def parse(cls, name): 100 | name = name.replace("~", "-") 101 | if not (name.startswith("saas-") or name.endswith(".0")): 102 | name = name.split(".", 1)[-1] 103 | return cls(name) 104 | 105 | @property 106 | def python_target(self): 107 | bounds = [ 108 | [(7, 0), black.mode.TargetVersion.PY27], 109 | [(10, 17), black.mode.TargetVersion.PY36], 110 | [(15, 0), black.mode.TargetVersion.PY37], 111 | [(16, 3), black.mode.TargetVersion.PY310], 112 | ] 113 | for bound, target in reversed(bounds): 114 | if bound <= self.ints: 115 | return target 116 | raise RuntimeError(f"Cannot determine python target for {self.name}") 117 | 118 | 119 | @dataclass(order=True) 120 | class Inherit: 121 | model: str 122 | born: Version # inclusive 123 | dead: Optional[Version] = None # non-inclusive 124 | via: Optional[str] = None # Many2one field to parent in case of `_inherits` 125 | 126 | def apply_on(self, version: Version) -> bool: 127 | if self.dead is None: 128 | return self.born <= version 129 | return self.born <= version < self.dead 130 | 131 | 132 | _LAST_MAJOR = 17 133 | _VERSIONS = {Version(f"{major}.0") for major in range(7, _LAST_MAJOR + 1)} 134 | _VERSIONS |= {Version(f"saas-{saas}") for saas in range(1, 19)} 135 | _VERSIONS |= {Version(f"saas-{major}.{minor}") for major in range(11, _LAST_MAJOR) for minor in range(1, 6)} 136 | 137 | VERSIONS = sorted(_VERSIONS) 138 | 139 | IGNORED_FILES = [ 140 | # defines `_name = LITERAL % CONSTANT` 141 | # does not have _inherit(s) 142 | "odoo/addons/google_calendar/google_calendar.py", 143 | "odoo/addons/google_calendar/models/google_calendar.py", 144 | "odoo/addons/website_version/models/google_management.py", 145 | "enterprise/website_version/models/google_management.py", 146 | ] 147 | 148 | # Sometimes, new modules are added during a version lifetime and not forward-ported to dead saas~* version. 149 | # Theses versions being dead and no upgrade to these versions being made, we can consider it contains some models 150 | # Without it, we would end with holes in inherit tree. 151 | VIRTUAL_INHERITS = { 152 | "account.avatax": [ 153 | Inherit(model="account.move", born=Version("14.0"), dead=Version("15.0"), via=None), 154 | Inherit(model="sale.order", born=Version("14.0"), dead=Version("15.0"), via=None), 155 | Inherit(model="sale.subscription", born=Version("14.0"), dead=Version("15.0"), via=None), 156 | ], 157 | "account.avatax.unique.code": [ 158 | Inherit(model="account.avatax", born=Version("14.0"), dead=Version("15.0"), via=None), 159 | Inherit(model="res.partner", born=Version("14.0"), dead=Version("15.0"), via=None), 160 | ], 161 | "account.edi.common": [ 162 | Inherit("account.edi.xml.cii", born=Version("14.0"), dead=Version("15.0")), 163 | Inherit("account.edi.xml.ubl_20", born=Version("14.0"), dead=Version("15.0")), 164 | ], 165 | "account.edi.xml.ubl_20": [ 166 | Inherit("account.edi.xml.ubl_21", born=Version("14.0"), dead=Version("15.0")), 167 | Inherit("account.edi.xml.ubl_efff", born=Version("14.0"), dead=Version("15.0")), 168 | ], 169 | "account.edi.xml.ubl_21": [ 170 | Inherit("account.edi.xml.ubl_bis3", born=Version("14.0"), dead=Version("15.0")), 171 | ], 172 | "account.edi.xml.ubl_bis3": [ 173 | Inherit("account.edi.xml.ubl_de", born=Version("14.0"), dead=Version("15.0")), 174 | ], 175 | "account.report": [ 176 | Inherit("account.cash.flow.report", born=Version("saas-11.1"), dead=Version("saas-12.5")), 177 | Inherit("l10n.lu.report.partner.vat.intra", born=Version("saas-13.1"), dead=Version("saas-13.2")), 178 | ], 179 | "l10n_cl.edi.util": [ 180 | Inherit("stock.picking", born=Version("14.0"), dead=Version("saas-14.2")), 181 | Inherit(model="l10n_cl.daily.sales.book", born=Version("14.0"), dead=Version("saas-14.3"), via=None), 182 | ], 183 | "l10n_es.sii.account.tax.mixin": [ 184 | Inherit(model="account.tax", born=Version("14.0"), dead=Version("saas-14.4"), via=None), 185 | Inherit(model="account.tax.template", born=Version("14.0"), dead=Version("saas-14.4"), via=None), 186 | ], 187 | "l10n_mx.trial.report": [ 188 | Inherit("l10n_mx.trial.closing.report", born=Version("saas-11.1"), dead=Version("saas-12.2")), 189 | ], 190 | "l10n_mx_edi.pac.sw.mixin": [ 191 | Inherit("account.invoice", born=Version("saas-11.1"), dead=Version("saas-12.5")), 192 | Inherit("account.payment", born=Version("saas-11.1"), dead=Version("saas-12.2")), 193 | ], 194 | "mail.activity.mixin": [ 195 | Inherit("l10n_lu.yearly.tax.report.manual", born=Version("13.0"), dead=Version("15.0")), 196 | Inherit("l10n_uk.vat.obligation", born=Version("saas-15"), dead=Version("12.0")), 197 | ], 198 | "mail.thread": [ 199 | Inherit("account.online.link", born=Version("12.0"), dead=Version("14.0")), 200 | Inherit(model="l10n_cl.daily.sales.book", born=Version("14.0"), dead=Version("saas-14.3"), via=None), 201 | Inherit("l10n_lu.yearly.tax.report.manual", born=Version("13.0"), dead=Version("15.0")), 202 | Inherit("l10n_uk.vat.obligation", born=Version("saas-15"), dead=Version("12.0")), 203 | ], 204 | "microsoft.outlook.mixin": [ 205 | Inherit(model="fetchmail.server", born=Version("12.0"), dead=Version("saas-15.3"), via=None), 206 | Inherit(model="ir.mail_server", born=Version("12.0"), dead=Version("saas-15.3"), via=None), 207 | ], 208 | "pos.order.line": [ 209 | Inherit(model="pos.order_line_pro_forma_be", born=Version("14.0"), dead=Version("saas-16.1")), 210 | ], 211 | "studio.mixin": [ 212 | Inherit(model="ir.default", born=Version("14.0"), dead=Version("saas-14.2")), 213 | ], 214 | "google.gmail.mixin": [ 215 | Inherit(model="fetchmail.server", born=Version("12.0"), dead=Version("15.0"), via=None), 216 | Inherit(model="ir.mail_server", born=Version("12.0"), dead=Version("15.0"), via=None), 217 | ], 218 | } 219 | 220 | 221 | # from lib2to3.refactor.RefactoringTool class 222 | def _read_python_source(filename): 223 | """Do our best to decode a Python source file correctly.""" 224 | try: 225 | f = open(filename, "rb") # noqa: SIM115 226 | except OSError: 227 | return None, None 228 | try: 229 | encoding = tokenize.detect_encoding(f.readline)[0] 230 | finally: 231 | f.close() 232 | with io.open(filename, "r", encoding=encoding, newline="") as f: 233 | return f.read(), encoding 234 | 235 | 236 | class BootstrapVisitor(Visitor): 237 | result: Dict[str, List[Version]] = {} 238 | 239 | def to_str(self, node): 240 | if isinstance(node, black.Node): 241 | return "".join(self.to_str(c) for c in node.children) 242 | return node.value 243 | 244 | def visit_dictsetmaker(self, node): 245 | eval_context = { 246 | "Inherit": Inherit, 247 | "parse_version": Version.parse, 248 | } 249 | 250 | self.result = eval(f"{{ {self.to_str(node)} }}", eval_context) 251 | return [] 252 | 253 | 254 | @dataclass 255 | class OdooVisitor(Visitor): 256 | inh: Dict[str, Set[Tuple[str, str]]] = field(default_factory=lambda: defaultdict(set)) 257 | 258 | def to_str(self, node): 259 | if isinstance(node, black.Node): 260 | return "".join(self.to_str(c) for c in node.children) 261 | return node.value 262 | 263 | def visit_classdef(self, node): 264 | classparent = None 265 | 266 | children = iter(node.children) 267 | child = next(children) 268 | while child.type != black.token.COLON: 269 | if child.type == black.token.LPAR: 270 | classparent = self.to_str(next(children)) 271 | child = next(children) 272 | 273 | if classparent in MODELS: 274 | suite = next(children) 275 | name = None 276 | inh = [] 277 | for child in suite.children: 278 | if child.type == black.syms.simple_stmt: 279 | expr_stmt = child.children[0] 280 | if expr_stmt.type != black.syms.expr_stmt: 281 | continue 282 | attr = self.to_str(expr_stmt.children[0]) 283 | if attr == "_name": 284 | node = expr_stmt.children[2] 285 | if node.type == black.token.NAME and node.value == "_description": 286 | # `_description` being required, some devs uses the following syntax: https://git.io/JUfhO 287 | node = expr_stmt.children[4] 288 | name = literal_eval(self.to_str(node)) 289 | elif attr == "_inherit": 290 | node = expr_stmt.children[2] 291 | if node.type == black.token.NAME and node.value == "_name": 292 | inh.append((name, None)) 293 | else: 294 | val = literal_eval(self.to_str(node)) 295 | if isinstance(val, str): 296 | val = [val] 297 | inh.extend((v, None) for v in val) 298 | elif attr == "_inherits": 299 | val = literal_eval(self.to_str(expr_stmt.children[2])) 300 | inh.extend(val.items()) 301 | else: # noqa: PLR5501 302 | # handle Many2one with delegate=True attribute 303 | if ( 304 | len(expr_stmt.children) == 3 305 | and expr_stmt.children[1].type == black.token.EQUAL 306 | and expr_stmt.children[2].type == black.syms.power 307 | ): 308 | pw = expr_stmt.children[2] 309 | if (self.to_str(pw.children[0]) + self.to_str(pw.children[1])) == "fields.Many2one": 310 | via = self.to_str(expr_stmt.children[0]) 311 | arglist = pw.children[2].children[1] 312 | comodel = None 313 | delegate = False 314 | for arg in arglist.children: 315 | if arg.type == black.token.STRING and comodel is None: 316 | comodel = literal_eval(self.to_str(arg)) 317 | elif arg.type == black.syms.argument: 318 | if ( 319 | self.to_str(arg.children[0]) == "delegate" 320 | and self.to_str(arg.children[2]) == "True" 321 | ): 322 | delegate = True 323 | if ( 324 | self.to_str(arg.children[0]) == "comodel_name" 325 | and arg.children[2].type == black.token.STRING 326 | ): 327 | comodel = literal_eval(self.to_str(arg.children[2])) 328 | if delegate and comodel: 329 | inh.append((comodel, via)) 330 | 331 | if name: 332 | for i, via in inh: 333 | if i != name: 334 | self.inh[i].add((name, via)) 335 | 336 | return [] 337 | 338 | 339 | def init_repos(path: Path) -> None: 340 | path.mkdir(parents=True, exist_ok=True) 341 | 342 | for repo in REPOSITORIES: 343 | p = path / repo.name 344 | if not p.exists(): 345 | subprocess.run( 346 | ["git", "clone", repo.remote, repo.name], 347 | cwd=str(path), 348 | check=True, 349 | ) 350 | else: 351 | subprocess.run(["git", "fetch", "-q"], cwd=str(p), check=True) 352 | 353 | 354 | def checkout(wd: Path, repo: Repo, version: Version) -> bool: 355 | gitdir = str(wd / repo.name) 356 | 357 | hasref = subprocess.run( 358 | ["git", "show-ref", "-q", "--verify", f"refs/remotes/origin/{version.name}"], cwd=gitdir, check=False 359 | ) 360 | if hasref.returncode != 0: 361 | return False # unknown branch 362 | subprocess.run( 363 | ["git", "checkout", "-q", "--force", "-B", version.name, f"origin/{version.name}"], cwd=gitdir, check=True 364 | ) 365 | return True 366 | 367 | 368 | def bootstrap(from_file: Path): 369 | logger.info("📂 Bootstrapping from %s", from_file) 370 | visitor = BootstrapVisitor() 371 | 372 | code, _ = _read_python_source(from_file) 373 | node = black.lib2to3_parse(code) 374 | 375 | list(visitor.visit(node)) 376 | return defaultdict(list, visitor.result) 377 | 378 | 379 | def main(options: Namespace): 380 | wd = options.working_dir 381 | logger.info("⚙️ Initialize repositories into %s", wd) 382 | init_repos(wd) 383 | 384 | result = bootstrap(options.bootstrap_file) if options.bootstrap_file else defaultdict(list) 385 | 386 | for version in VERSIONS: 387 | if not (options.from_branch <= version <= options.to_branch): 388 | logger.info("⏭ Skip version %s", version.name) 389 | continue 390 | 391 | visitor = OdooVisitor() 392 | 393 | for model, virtuals in VIRTUAL_INHERITS.items(): 394 | for virtual in virtuals: 395 | if virtual.apply_on(version): 396 | visitor.inh[model].add((virtual.model, virtual.via)) 397 | 398 | any_repo = False 399 | for repo in REPOSITORIES: 400 | if not checkout(wd, repo, version): 401 | continue 402 | any_repo = True 403 | logger.info("🔎 Process %s at version %s", repo.name, version.name) 404 | r = wd / repo.name 405 | for pyfile in r.glob("**/*.py"): 406 | fname = str(pyfile.relative_to(wd)) 407 | if fname in IGNORED_FILES or "test" in fname: 408 | continue 409 | code, _ = _read_python_source(pyfile) 410 | node = black.lib2to3_parse(code, [version.python_target]) 411 | try: 412 | list(visitor.visit(node)) 413 | except Exception: 414 | logger.critical("💥 Cannot parse %s (%s %s)", pyfile, repo.name, version.name) 415 | raise 416 | 417 | if not any_repo: 418 | # branch not found in any repo, don't store any inherits, even virtual ones 419 | continue 420 | 421 | if not visitor.inh: 422 | continue 423 | 424 | for model, children in result.items(): 425 | for child in children: 426 | if (child.model, child.via) not in visitor.inh[model] and not child.dead and child.born < version: 427 | child.dead = version 428 | 429 | for model, children in visitor.inh.items(): 430 | for child, via in children: 431 | for inh in result[model]: 432 | if inh.model == child and inh.via == via and (not inh.dead or inh.dead >= version): 433 | break 434 | else: 435 | result[model].append(Inherit(model=child, born=version, via=via)) 436 | 437 | result = {m: sorted(result[m]) for m in sorted(result)} 438 | me = Path(sys.argv[0]) 439 | pyproject = Path(black.find_pyproject_toml((str(me.parent),))) 440 | 441 | output = f"""\ 442 | # This file is auto-generated by `{me.resolve().relative_to(pyproject.parent)}`. Edits will be lost. 443 | 444 | from collections import namedtuple 445 | 446 | try: 447 | from odoo.tools.misc import frozendict 448 | from odoo.tools.parse_version import parse_version 449 | except ImportError: 450 | from openerp.tools.parse_version import parse_version 451 | try: 452 | from openerp.tools.misc import frozendict 453 | except ImportError: 454 | # frozendict only appears with new api in 8.0 455 | frozendict = dict 456 | 457 | Inherit = namedtuple("Inherit", "model born dead via") # NOTE: dead is non-inclusive 458 | 459 | inheritance_data = frozendict({result!r}) 460 | """ 461 | 462 | with open(pyproject, "rb") as fp: 463 | line_length = tomli.load(fp)["tool"]["ruff"]["line-length"] 464 | mode = black.FileMode(target_versions={black.TargetVersion.PY27}, line_length=line_length) 465 | print(black.format_str(output, mode=mode), end="") # noqa: T201 466 | 467 | 468 | # def debug(options: Namespace): 469 | # from black.debug import DebugVisitor 470 | # assert options.bootstrap_file 471 | # code, _ = _read_python_source(options.bootstrap_file) 472 | # node = black.lib2to3_parse(code) 473 | 474 | # # DebugVisitor.show(node) 475 | # result = bootstrap(options.bootstrap_file) 476 | # print(f"inheritance_data = frozendict({result!r})") 477 | # # v = OdooVisitor() 478 | # # list(v.visit(node)) 479 | # # print(v.inh) 480 | 481 | 482 | if __name__ == "__main__": 483 | parser = ArgumentParser(description="Regenerate `_inherit.py` from source files") 484 | 485 | parser.add_argument("--working-dir", "-w", dest="working_dir", type=Path, default="/tmp/inh") 486 | parser.add_argument("--bootstrap-file", "-b", dest="bootstrap_file", type=Path) 487 | 488 | parser.add_argument( 489 | "--from-branch", "-f", dest="from_branch", type=Version, choices=VERSIONS, default=VERSIONS[0], metavar="BRANCH" 490 | ) 491 | parser.add_argument( 492 | "--to-branch", "-t", dest="to_branch", type=Version, choices=VERSIONS, default=VERSIONS[-1], metavar="BRANCH" 493 | ) 494 | 495 | options = parser.parse_args() 496 | 497 | # debug(options) 498 | main(options) 499 | -------------------------------------------------------------------------------- /tools/graph-upgrade-timing.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S uv run --script --quiet 2 | # ruff: noqa: RET503 3 | 4 | # /// script 5 | # dependencies = [ 6 | # "pygal[png]", 7 | # ] 8 | # /// 9 | 10 | import argparse 11 | import os 12 | import re 13 | import sys 14 | 15 | import pygal # also need cairosvg for png output 16 | 17 | 18 | def process(options): 19 | pie = pygal.Pie() 20 | dt = None 21 | others = 0.0 22 | for line in sys.stdin.readlines(): 23 | if dt is None: 24 | match = re.match(r"(\d{4}-\d{2}-\d{2} \d{2}:\d{2}):\d{2},\d{3} ", line) 25 | if match: 26 | dt = match.group(1) 27 | 28 | match = re.search(r"Module ([a-zA-Z0-9_]+) loaded in (\d+\.\d\d)s, \d+ queries", line) 29 | if match: 30 | time = float(match.group(2)) 31 | if time > options.min_time: 32 | pie.add(match.group(1), time) 33 | else: 34 | others += time 35 | 36 | if options.min_time and others: 37 | pie.add("Other modules", others) 38 | 39 | title = f"{dt}" 40 | if options.min_time: 41 | title = f"{title} • Modules loaded in more than {options.min_time} seconds" 42 | pie.title = title 43 | 44 | if options.format == "png": 45 | return pie.render_to_png() 46 | elif options.format == "svg": 47 | return pie.render() 48 | 49 | 50 | def main(): 51 | # cat migration-14.0-latest.log | python3 graph-upgrade-timing.py -m 15 > graph.svg 52 | parser = argparse.ArgumentParser() 53 | parser.add_argument("-f", "--format", type=str, choices=["svg", "png"], default="svg") 54 | parser.add_argument("-m", "--min-time", dest="min_time", type=float, default=0.0) 55 | 56 | options = parser.parse_args() 57 | with os.fdopen(sys.stdout.fileno(), "wb") as fp: 58 | fp.write(process(options)) 59 | 60 | return 0 61 | 62 | 63 | if __name__ == "__main__": 64 | sys.exit(main()) 65 | --------------------------------------------------------------------------------