├── .gitignore ├── LICENSE ├── README.md ├── ai_chat ├── README.md ├── __init__.py ├── __manifest__.py ├── data │ ├── ai-odoo-avatar.png │ ├── ai_chat_data.xml │ └── ai_completion_data.xml ├── i18n │ └── fr.po ├── models │ ├── __init__.py │ ├── mail_ai_bot.py │ ├── mail_channel.py │ ├── mail_thread.py │ ├── res_partner.py │ └── res_users.py ├── security │ ├── ir.model.access.csv │ └── security.xml └── static │ ├── description │ ├── icon.png │ ├── index.html │ └── logo.png │ ├── img │ ├── chat.png │ ├── clear_chat.png │ ├── discuss.png │ └── open_chat.png │ └── src │ └── models │ └── messaging_initializer.js ├── ai_connector ├── README.md ├── __init__.py ├── __manifest__.py ├── data │ └── ai_tool_data.xml ├── i18n │ └── fr.po ├── models │ ├── __init__.py │ ├── ai_completion.py │ ├── ai_completion_result.py │ ├── ai_fine_tuning.py │ ├── ai_mixin.py │ ├── ai_model.py │ ├── ai_provider.py │ ├── ai_question_answer.py │ ├── ai_question_answer_tag.py │ ├── ai_result_mixin.py │ ├── ai_tool.py │ └── ai_tool_property.py ├── security │ ├── ir.model.access.csv │ └── security.xml ├── static │ ├── description │ │ ├── icon.png │ │ ├── index.html │ │ └── logo.png │ ├── img │ │ ├── completion_params.png │ │ ├── mistral_ai.png │ │ ├── prompt.png │ │ ├── settings.png │ │ └── tests.png │ └── src │ │ ├── js │ │ ├── completion_action │ │ │ ├── completion_action.xml │ │ │ └── form_list_view_action.js │ │ └── fine_tuning_graph │ │ │ ├── fine_tuning_graph_field.js │ │ │ └── fine_tuning_graph_field.xml │ │ └── scss │ │ └── style.scss ├── views │ ├── ai_completion_result_views.xml │ ├── ai_completion_views.xml │ ├── ai_connector_views.xml │ ├── ai_fine_tuning_views.xml │ ├── ai_model_views.xml │ ├── ai_provider_views.xml │ ├── ai_question_answer_views.xml │ └── ai_tool_views.xml └── wizards │ ├── __init__.py │ ├── create_question_answer_wizard.py │ ├── create_question_answer_wizard.xml │ ├── question_answer_dump_wizard.py │ └── question_answer_dump_wizard.xml ├── ai_connector_anthropic ├── README.md ├── __init__.py ├── __manifest__.py ├── data │ └── ai_provider_data.xml ├── i18n │ └── fr.po ├── models │ ├── __init__.py │ ├── ai_completion.py │ ├── ai_provider.py │ └── ai_tool.py ├── security │ ├── ir.model.access.csv │ └── security.xml └── static │ ├── description │ ├── icon.png │ ├── index.html │ └── logo.png │ ├── img │ └── anthropic_ai_logo.png │ └── src │ └── scss │ └── style.scss ├── ai_connector_mistralai ├── README.md ├── __init__.py ├── __manifest__.py ├── data │ └── ai_provider_data.xml ├── i18n │ └── fr.po ├── models │ ├── __init__.py │ ├── ai_completion.py │ ├── ai_fine_tuning.py │ └── ai_provider.py ├── security │ ├── ir.model.access.csv │ └── security.xml └── static │ ├── description │ ├── icon.png │ ├── index.html │ └── logo.png │ ├── img │ ├── completion_params.png │ ├── mistral_ai.png │ ├── prompt.png │ ├── settings.png │ └── tests.png │ └── src │ └── scss │ └── style.scss ├── ai_connector_openai ├── README.md ├── __init__.py ├── __manifest__.py ├── data │ └── ai_provider_data.xml ├── i18n │ └── fr.po ├── models │ ├── __init__.py │ ├── ai_completion.py │ ├── ai_fine_tuning.py │ └── ai_provider.py ├── security │ ├── ir.model.access.csv │ └── security.xml ├── static │ ├── description │ │ ├── icon.png │ │ ├── index.html │ │ └── logo.png │ ├── img │ │ ├── completion_params.png │ │ ├── openai_logo.svg │ │ ├── openai_params.png │ │ ├── prompt.png │ │ ├── settings.png │ │ └── tests.png │ └── src │ │ └── scss │ │ └── style.scss └── views │ └── ai_completion_views.xml └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # odoo-ai 2 | 3 | Addons to use AI (OpenAI, MistralAI) in Odoo 4 | -------------------------------------------------------------------------------- /ai_chat/README.md: -------------------------------------------------------------------------------- 1 | [![License: LGPL-3](https://img.shields.io/badge/licence-LGPL--3-blue.png)](http://www.gnu.org/licenses/lgpl-3.0-standalone.html) 2 | 3 | AI Chat 4 | =================== 5 | 6 | [AI Connector Logo](https://github.com/myrrkel/odoo-ai) 7 | 8 | This module adds a AI Bot user to chat with. 9 | 10 | ## Usage 11 | 12 | Open a chat and start talking to AI Bot: 13 | 14 | ![image](./static/img/open_chat.png) 15 | 16 | ![image](./static/img/chat.png) 17 | 18 | Or go is **Discuss**: 19 | 20 | ![image](./static/img/discuss.png) 21 | 22 | 23 | ### Clear the chat 24 | 25 | use the command **/clear** to clear the chat: 26 | 27 | ![image](./static/img/clear_chat.png) 28 | 29 | ### Boost AI Bot answer 30 | 31 | By default, AI Bot answer length is limited. To get longer answer, start your prompt with an exclamation mark, so AI Bor will use a maximum of tokens to answer. 32 | 33 | 34 | ## Requirements 35 | 36 | **ai_connector** is required. 37 | 38 | 39 | ## Maintainer 40 | 41 | * This module is maintained by [Michel Perrocheau](https://github.com/myrrkel). 42 | * Contact me on [LinkedIn](https://www.linkedin.com/in/michel-perrocheau-ba17a4122). 43 | 44 | [](https://github.com/myrrkel) 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /ai_chat/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/algpl.html). 3 | 4 | from . import models 5 | -------------------------------------------------------------------------------- /ai_chat/__manifest__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/algpl.html). 3 | { 4 | 'name': 'AI Chat', 5 | 'version': '16.0.0.0.1', 6 | 'author': 'Michel Perrocheau', 7 | 'website': 'https://github.com/myrrkel', 8 | 'summary': "Add a AI Bot user to chat with", 9 | 'sequence': 0, 10 | 'certificate': '', 11 | 'license': 'LGPL-3', 12 | 'depends': [ 13 | 'ai_connector', 14 | 'mail', 15 | 'bus', 16 | ], 17 | 'category': 'AI', 18 | 'complexity': 'easy', 19 | 'qweb': [ 20 | ], 21 | 'demo': [ 22 | ], 23 | 'images': [ 24 | ], 25 | 'data': [ 26 | 'security/ir.model.access.csv', 27 | 'security/security.xml', 28 | 'data/ai_chat_data.xml', 29 | 'data/ai_completion_data.xml', 30 | ], 31 | 'assets': { 32 | 'mail.assets_messaging': [ 33 | 'ai_chat/static/src/models/messaging_initializer.js', 34 | ], 35 | }, 36 | 'auto_install': False, 37 | 'installable': True, 38 | 'application': False, 39 | } 40 | -------------------------------------------------------------------------------- /ai_chat/data/ai-odoo-avatar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/data/ai-odoo-avatar.png -------------------------------------------------------------------------------- /ai_chat/data/ai_chat_data.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | AI Bot 6 | 7 | ai@example.com 8 | 9 | 10 | 11 | 12 | 13 | 14 | ai 15 | 16 | 17 | 18 | --
19 | AI]]>
20 | 21 |
22 |
23 |
-------------------------------------------------------------------------------- /ai_chat/data/ai_completion_data.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | AI Chat 6 | 7 | [] 8 | 4096 9 | 1 10 | 1 11 | Your name is Odoo AI Bot. You are an expert of Odoo. You help Odoo users and developers. Your answers are precise and technical. 12 | 13 | 14 | -------------------------------------------------------------------------------- /ai_chat/i18n/fr.po: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/i18n/fr.po -------------------------------------------------------------------------------- /ai_chat/models/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/algpl.html). 3 | 4 | from . import res_users 5 | from . import res_partner 6 | from . import mail_thread 7 | from . import mail_ai_bot 8 | from . import mail_channel 9 | -------------------------------------------------------------------------------- /ai_chat/models/mail_ai_bot.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, _ 5 | from odoo.tools import plaintext2html, html2plaintext 6 | from odoo.exceptions import UserError 7 | 8 | import logging 9 | 10 | _logger = logging.getLogger(__name__) 11 | 12 | 13 | class MailBot(models.AbstractModel): 14 | _name = 'mail.ai.bot' 15 | _description = 'Mail AI Bot' 16 | 17 | def _answer_to_message(self, record, values): 18 | ai_bot_id = self.env['ir.model.data']._xmlid_to_res_id('ai_chat.partner_ai') 19 | if len(record) != 1 or values.get('author_id') == ai_bot_id or values.get('message_type') != 'comment': 20 | return 21 | if self._is_bot_in_private_channel(record): 22 | if values.get('body', '').startswith('!'): 23 | answer_type = 'important' 24 | else: 25 | answer_type = 'chat' 26 | 27 | try: 28 | answer = self._get_answer(record, answer_type) 29 | except Exception as err: 30 | _logger.error(err) 31 | raise UserError(err) 32 | 33 | if answer: 34 | message_type = 'comment' 35 | subtype_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment') 36 | record = record.with_context(mail_create_nosubscribe=True).sudo() 37 | record.message_post(body=answer, author_id=ai_bot_id, message_type=message_type, subtype_id=subtype_id) 38 | 39 | def get_chat_messages(self, record, header, only_human=False): 40 | partner_ai_id = self.env.ref('ai_chat.partner_ai') 41 | previous_message_ids = record.message_ids.filtered(lambda m: m.body != '') 42 | if only_human: 43 | previous_message_ids = previous_message_ids.filtered(lambda m: m.author_id != partner_ai_id) 44 | 45 | chat_messages = [{'role': 'system', 'content': header}] if header else [] 46 | for message_id in previous_message_ids.sorted('date'): 47 | role = 'assistant' if message_id.author_id == partner_ai_id else 'user' 48 | content = html2plaintext(message_id.body) 49 | chat_message = {'role': role, 'content': content} 50 | chat_messages.append(chat_message) 51 | return chat_messages 52 | 53 | def _get_answer(self, record, answer_type='chat'): 54 | completion_id = self.env.ref('ai_chat.completion_chat') 55 | header = completion_id.get_system_prompt(record.id) 56 | if answer_type == 'chat': 57 | messages = self.get_chat_messages(record, header) 58 | res = completion_id.create_completion(0, messages=messages) 59 | elif answer_type == 'important': 60 | messages = self.get_chat_messages(record, header, only_human=True) 61 | res = completion_id.create_completion(0, messages, 62 | max_tokens=16000) 63 | else: 64 | return 65 | if res: 66 | return res[0].replace('\n\n', '
').replace('\n', '
') 67 | 68 | def _is_bot_in_private_channel(self, record): 69 | ai_bot_id = self.env['ir.model.data']._xmlid_to_res_id('ai_chat.partner_ai') 70 | if record._name == 'mail.channel' and record.channel_type == 'chat': 71 | return ai_bot_id in record.with_context(active_test=False).channel_partner_ids.ids 72 | return False 73 | -------------------------------------------------------------------------------- /ai_chat/models/mail_channel.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, _ 5 | 6 | 7 | class MailChannel(models.Model): 8 | _inherit = 'mail.channel' 9 | 10 | def execute_command_clear_ai_chat(self, **kwargs): 11 | partner = self.env.user.partner_id 12 | key = kwargs['body'] 13 | if key.lower().strip() == '/clear': 14 | ai_bot_id = self.env['ir.model.data']._xmlid_to_res_id('ai_chat.partner_ai') 15 | ai_chat_member_ids = {ai_bot_id, partner.id} 16 | if ai_chat_member_ids == set(self.channel_member_ids.mapped('partner_id.id')): 17 | self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.message/delete', 18 | {'message_ids': self.message_ids.ids}) 19 | self.message_ids.unlink() 20 | -------------------------------------------------------------------------------- /ai_chat/models/mail_thread.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models 5 | 6 | 7 | class MailThread(models.AbstractModel): 8 | _inherit = 'mail.thread' 9 | 10 | def _message_post_after_hook(self, message, msg_vals): 11 | res = super(MailThread, self)._message_post_after_hook(message, msg_vals) 12 | self.env['mail.ai.bot']._answer_to_message(self, msg_vals) 13 | return res 14 | -------------------------------------------------------------------------------- /ai_chat/models/res_partner.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | import logging 6 | 7 | _logger = logging.getLogger(__name__) 8 | 9 | 10 | class ResPartner(models.Model): 11 | _inherit = 'res.partner' 12 | 13 | is_ai_bot = fields.Boolean('Is AI Bot') 14 | 15 | def _compute_im_status(self): 16 | super(ResPartner, self)._compute_im_status() 17 | ai_bot_user_id = self.env['ir.model.data']._xmlid_to_res_id('ai_chat.partner_ai') 18 | for user in self.filtered(lambda u: u.id == ai_bot_user_id): 19 | user.im_status = 'online' 20 | -------------------------------------------------------------------------------- /ai_chat/models/res_users.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | import logging 6 | 7 | _logger = logging.getLogger(__name__) 8 | 9 | 10 | class ResUsers(models.Model): 11 | _inherit = 'res.users' 12 | 13 | def _init_messaging(self): 14 | if self._is_internal(): 15 | self._init_ai_bot() 16 | return super()._init_messaging() 17 | 18 | def _init_ai_bot(self): 19 | self.ensure_one() 20 | ai_bot_partner_id = self.env['ir.model.data']._xmlid_to_res_id('ai_chat.partner_ai') 21 | channel_info = self.env['mail.channel'].channel_get([ai_bot_partner_id, self.partner_id.id]) 22 | channel = self.env['mail.channel'].browse(channel_info['id']) 23 | return channel 24 | -------------------------------------------------------------------------------- /ai_chat/security/ir.model.access.csv: -------------------------------------------------------------------------------- 1 | "id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink" 2 | -------------------------------------------------------------------------------- /ai_chat/security/security.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /ai_chat/static/description/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/static/description/icon.png -------------------------------------------------------------------------------- /ai_chat/static/description/index.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

AI Chat

4 |

ai_chat

5 |
6 |

7 | This module adds an AI Bot user to chat with. 8 |

9 |
10 |
11 | 12 | 13 |
14 |
15 |
16 | 17 |
18 |
19 | Contact me 20 |
21 |
22 |
23 | -------------------------------------------------------------------------------- /ai_chat/static/description/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/static/description/logo.png -------------------------------------------------------------------------------- /ai_chat/static/img/chat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/static/img/chat.png -------------------------------------------------------------------------------- /ai_chat/static/img/clear_chat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/static/img/clear_chat.png -------------------------------------------------------------------------------- /ai_chat/static/img/discuss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/static/img/discuss.png -------------------------------------------------------------------------------- /ai_chat/static/img/open_chat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_chat/static/img/open_chat.png -------------------------------------------------------------------------------- /ai_chat/static/src/models/messaging_initializer.js: -------------------------------------------------------------------------------- 1 | /** @odoo-module **/ 2 | 3 | import { registerPatch } from '@mail/model/model_core'; 4 | import { insert } from '@mail/model/model_field_command'; 5 | 6 | registerPatch({ 7 | name: 'MessagingInitializer', 8 | recordMethods: { 9 | /** 10 | * @override 11 | */ 12 | _initCommands() { 13 | this._super(); 14 | this.messaging.update({ 15 | commands: insert({ 16 | help: this.env._t("Clear chat with AI Bot"), 17 | methodName: 'execute_command_clear_ai_chat', 18 | name: "clear", 19 | }), 20 | }); 21 | }, 22 | }, 23 | }); 24 | -------------------------------------------------------------------------------- /ai_connector/README.md: -------------------------------------------------------------------------------- 1 | [![License: LGPL-3](https://img.shields.io/badge/licence-LGPL--3-blue.png)](http://www.gnu.org/licenses/lgpl-3.0-standalone.html) 2 | 3 | AI Connector 4 | ============ 5 | 6 | [AI Connector Logo](https://github.com/myrrkel/odoo-ai) 7 | 8 | This technical module provides a connector for the AI platforms. 9 | 10 | It can be used as a playground to test AI tools in Odoo but does not have standalone functionality. 11 | The module is intended to be inherited by other modules for specific use cases. 12 | 13 | ## Configuration 14 | 15 | In **Settings**, fill the **API Key** field with your generated key. 16 | 17 | ![image](./static/img/settings.png) 18 | 19 | ## Usage 20 | 21 | ### AI Completion 22 | 23 | To create a new **AI Completion**, go to **Settings**, **Technical**, **AI Completion** and create a new record. 24 | 25 | ![image](./static/img/completion_params.png) 26 | 27 | **Model**: The model on witch the completion will be applied. 28 | 29 | **Target Field**: The field where the generated value will be saved. 30 | 31 | **Domain**: The domain to select the records on witch the completion will be run. 32 | 33 | For Completion results go to **Settings**, **Technical**, **AI Completion Results** 34 | 35 | ### Prompt template 36 | 37 | Write a prompt template in Qweb. 38 | 39 | Available functions in prompt template: 40 | - object : Current record 41 | - answer_lang : Function returning the language name 42 | - html2plaintext : Function to convert html to text 43 | 44 | ![image](./static/img/prompt.png) 45 | 46 | ### Tests 47 | 48 | Test action will use the first record found with the domain set for the model. 49 | 50 | Test first your prompt to adjust your template, then test the result of the Completion to adjust AI parameters. 51 | 52 | ![image](./static/img/tests.png) 53 | 54 | ### Tools (Function calling) 55 | 56 | Many LLM (OpenAI, Mistral AI) allows to provide tools to AI model so the AI assistant will be able to get datas from Odoo to generate the correct answer. 57 | By default, only one tool is available: 58 | 59 | `search_question_answer` : When this tool is added to the completion, the AI will try when it's necessary to find by keywords the most relevant answers in the `AI Question Answers` table. Then the AI model can return a completion according to information in these answers. 60 | 61 | It's possible to add more tools in **Settings**, **Technical**, **AI Tools** 62 | 63 | 64 | ### Question-Answers 65 | 66 | To add a newQuestion-Answer go to **Settings**, **Technical**, **AI Question Answers** and create a new record. 67 | 68 | It's also possible to generate a set of question-answer with a completion. 69 | In the prompt ask AI to return a JSON list of question-answer dictionary like [{'question': '...', 'answer': '...'}] and 70 | select the post-process function 'JSON to questions' to create the questions and answers records. 71 | 72 | 73 | ## Requirements 74 | 75 | No requirements 76 | 77 | ## Maintainer 78 | 79 | * This module is maintained by [Michel Perrocheau](https://github.com/myrrkel). 80 | * Contact me on [LinkedIn](https://www.linkedin.com/in/michel-perrocheau-ba17a4122). 81 | 82 | [](https://github.com/myrrkel) 83 | 84 | 85 | -------------------------------------------------------------------------------- /ai_connector/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from . import models 5 | from . import wizards 6 | -------------------------------------------------------------------------------- /ai_connector/__manifest__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | { 4 | 'name': 'AI Connector', 5 | 'version': '16.0.0.0', 6 | 'author': 'Michel Perrocheau', 7 | 'website': 'https://github.com/myrrkel', 8 | 'summary': "Connector for AI platforms", 9 | 'sequence': 0, 10 | 'certificate': '', 11 | 'license': 'LGPL-3', 12 | 'depends': [ 13 | 'base', 14 | 'mail', 15 | ], 16 | 'category': 'AI', 17 | 'complexity': 'easy', 18 | 'qweb': [ 19 | ], 20 | 'demo': [ 21 | ], 22 | 'images': [ 23 | ], 24 | 'data': [ 25 | 'security/ir.model.access.csv', 26 | 'security/security.xml', 27 | 'data/ai_tool_data.xml', 28 | 'views/ai_provider_views.xml', 29 | 'views/ai_model_views.xml', 30 | 'views/ai_completion_views.xml', 31 | 'views/ai_completion_result_views.xml', 32 | 'views/ai_question_answer_views.xml', 33 | 'views/ai_tool_views.xml', 34 | 'views/ai_fine_tuning_views.xml', 35 | 'views/ai_connector_views.xml', 36 | 'wizards/create_question_answer_wizard.xml', 37 | 'wizards/question_answer_dump_wizard.xml', 38 | ], 39 | 'assets': { 40 | 'web.assets_backend': [ 41 | 'ai_connector/static/src/scss/style.scss', 42 | 'ai_connector/static/src/js/**/*', 43 | ], 44 | }, 45 | 'auto_install': False, 46 | 'installable': True, 47 | 'application': False, 48 | } 49 | -------------------------------------------------------------------------------- /ai_connector/data/ai_tool_data.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | search_question_answer 5 | Search by keywords in the frequently asked questions database. Returns a list of questions with their answers 6 | function 7 | 8 | 9 | 10 | keywords 11 | The keywords to search for in the frequently asked questions database as a list of comma separated keywords. 12 | string 13 | true 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /ai_connector/i18n/fr.po: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/i18n/fr.po -------------------------------------------------------------------------------- /ai_connector/models/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from . import ai_provider 5 | from . import ai_model 6 | from . import ai_mixin 7 | from . import ai_result_mixin 8 | from . import ai_completion 9 | from . import ai_completion_result 10 | from . import ai_tool 11 | from . import ai_tool_property 12 | from . import ai_question_answer 13 | from . import ai_question_answer_tag 14 | from . import ai_fine_tuning 15 | -------------------------------------------------------------------------------- /ai_connector/models/ai_completion.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | import json 4 | from odoo import models, fields, api, _ 5 | from odoo.tools import html2plaintext 6 | import base64 7 | 8 | import logging 9 | 10 | _logger = logging.getLogger(__name__) 11 | 12 | 13 | def _extract_json(content): 14 | start_pos = content.find('{') 15 | end_post = content.rfind('}') + 1 16 | res = content[start_pos:end_post] 17 | try: 18 | json_res = json.loads(res) 19 | except json.JSONDecodeError as err: 20 | if '\\_' in res: 21 | res = res.replace('\\_', '_') 22 | return _extract_json(res) 23 | else: 24 | msg = 'Invalid JSON: %s in %s' % (err, res) 25 | _logger.warning(msg, exc_info=True) 26 | return {} 27 | return res 28 | 29 | 30 | class AICompletion(models.Model): 31 | _name = 'ai.completion' 32 | _description = 'AI Completion' 33 | _inherit = ['ai.mixin'] 34 | 35 | def _get_post_process_list(self): 36 | return [('list_to_many2many', _('List to Many2many')), 37 | ('json_to_questions', _('JSON to questions'))] 38 | 39 | def _get_response_format_list(self): 40 | return [('text', _('Text')), 41 | ('json_object', _('JSON Object')), 42 | ] 43 | 44 | system_template = fields.Text() 45 | system_template_id = fields.Many2one('ir.ui.view', string='System Template View') 46 | temperature = fields.Float(default=1) 47 | max_tokens = fields.Integer(default=10000) 48 | top_p = fields.Float(default=1) 49 | test_answer = fields.Text(readonly=True) 50 | post_process = fields.Selection(selection='_get_post_process_list') 51 | response_format = fields.Selection(selection='_get_response_format_list', default='text') 52 | tool_ids = fields.Many2many('ai.tool', string='Tools', copy=True) 53 | add_completion_action_menu = fields.Boolean() 54 | vision = fields.Boolean() 55 | image_source = fields.Selection([('main_attachment', _('Main Attachment')), 56 | ('binary_field', _('Binary Field'))]) 57 | image_field_id = fields.Many2one('ir.model.fields', string='Image Field') 58 | 59 | def prepare_message(self, message, rec_id=0): 60 | _logger.info(f"Prepare message: {message}") 61 | if self.vision: 62 | message = self.prepare_message_image(message, rec_id) 63 | return message 64 | 65 | def prepare_message_image(self, message, rec_id=0): 66 | image_binary = None 67 | rec = self.get_record(rec_id) 68 | if self.image_source == 'main_attachment': 69 | if rec and hasattr(rec, 'message_main_attachment_id'): 70 | if rec.message_main_attachment_id: 71 | attachment_id = rec.message_main_attachment_id 72 | image_binary = attachment_id.with_context(bin_size=False).datas.decode('utf-8') 73 | # image_binary = base64.b64decode(attachment_id.with_context(bin_size=False).datas) 74 | elif self.image_source == 'binary_field': 75 | if rec and hasattr(rec, self.image_field_id.name): 76 | image_binary = base64.b64decode(rec[self.image_field_id.name]) 77 | if image_binary: 78 | image_content = self.prepare_message_image_content(image_binary) 79 | content = [{'type': 'text', 'text': message['content']}, image_content] 80 | message['content'] = content 81 | return message 82 | 83 | def prepare_message_image_content(self, image_binary): 84 | image_content = { 85 | 'type': 'image_url', 86 | 'image_url': f'data:image/jpeg;base64,{image_binary}' 87 | } 88 | return image_content 89 | 90 | def prepare_messages(self, messages, rec_id=0): 91 | return [self.prepare_message(message, rec_id) for message in messages] 92 | 93 | def create_completion(self, rec_id=0, messages=None, prompt='', **kwargs): 94 | response_format = kwargs.get('response_format', self.response_format) or 'text' 95 | if not messages: 96 | messages = [] 97 | system_prompt = self.get_system_prompt(rec_id) 98 | if system_prompt: 99 | messages.append({'role': 'system', 'content': system_prompt}) 100 | 101 | if not prompt: 102 | prompt = self.get_prompt(rec_id) 103 | messages.append({'role': 'user', 'content': prompt}) 104 | messages = self.prepare_messages(messages, rec_id) 105 | if not rec_id and self.env.context.get('completion'): 106 | rec_id = self.env.context.get('completion').get('res_id', 0) 107 | if isinstance(rec_id, list) and len(rec_id) == 1: 108 | rec_id = rec_id[0] 109 | 110 | choices, prompt_tokens, completion_tokens, total_tokens = self.get_completion_results(rec_id, messages, 111 | **kwargs) 112 | result_ids = [] 113 | for answer in choices: 114 | _logger.info(f'Completion result: {answer}') 115 | if rec_id: 116 | if self.response_format == 'json_object' or response_format == 'json_object': 117 | answer = _extract_json(answer) 118 | if self.save_answer: 119 | result_id = self.create_result(rec_id, prompt, answer, prompt_tokens, completion_tokens, total_tokens) 120 | result_ids.append(result_id) 121 | continue 122 | if self.post_process and not self.target_field_id: 123 | self.exec_post_process(answer) 124 | if self.target_field_id and self.save_on_target_field: 125 | self.env[self.model_id.model].browse(rec_id).write({self.target_field_id.name: answer}) 126 | if not self.save_answer: 127 | return answer 128 | else: 129 | try: 130 | return self.get_result_content(response_format, choices) 131 | except Exception as err: 132 | _logger.error(err, exc_info=True) 133 | return result_ids 134 | 135 | def get_completion_params(self, messages, kwargs): 136 | model = self.ai_model_id.name or kwargs.get('model', '') 137 | temperature = self.temperature or kwargs.get('temperature', 0) 138 | top_p = self.top_p or kwargs.get('top_p', 0) 139 | max_tokens = kwargs.get('max_tokens', self.max_tokens or 10000) 140 | completion_params = { 141 | 'model': model, 142 | 'messages': messages, 143 | 'max_tokens': max_tokens, 144 | 'temperature': temperature, 145 | 'top_p': top_p, 146 | } 147 | if self.tool_ids: 148 | completion_params.update(self.get_tools_params()) 149 | return completion_params 150 | 151 | def get_tools_params(self): 152 | return {'tools': [t.get_tool_dict() for t in self.tool_ids]} 153 | 154 | def get_completion(self, completion_params): 155 | ai_client = self.get_ai_client() 156 | return ai_client.chat.complete(**completion_params) 157 | 158 | # def log_messages(self, messages): 159 | # for message in messages: 160 | # if isinstance(message, dict): 161 | # content = message.get('content') 162 | # _logger.info(f"Create completion: {}") 163 | # _logger.info(f"Create completion: {message}") 164 | 165 | def get_completion_results(self, rec_id, messages, **kwargs): 166 | completion_params = self.get_completion_params(messages, kwargs) 167 | res = self.get_completion(completion_params) 168 | for choice in res.choices: 169 | if choice.finish_reason == 'tool_calls': 170 | for tool_call in choice.message.tool_calls: 171 | messages.append(choice.message) 172 | messages.append(self.prepare_message(self.run_tool_call(tool_call))) 173 | return self.get_completion_results(rec_id, messages, **kwargs) 174 | choices = [choice.message.content for choice in res.choices] 175 | return choices, res.usage.prompt_tokens, res.usage.completion_tokens, res.usage.total_tokens 176 | 177 | def get_result_content(self, response_format, choices): 178 | if self.response_format == 'json_object' or response_format == 'json_object': 179 | return [_extract_json(choice) for choice in choices] 180 | return choices 181 | 182 | def ai_create(self, rec_id, method=False): 183 | return self.create_completion(rec_id) 184 | 185 | def create_result(self, rec_id, prompt, answer, prompt_tokens, completion_tokens, total_tokens): 186 | model_id = self.model_id.id 187 | if self.env.context.get('completion'): 188 | model = self.env.context.get('completion').get('model', '') 189 | if model: 190 | model_id = self.env['ir.model'].search([('model', '=', model)]).id 191 | 192 | values = {'completion_id': self.id, 193 | 'ai_provider_id': self.ai_provider_id.id, 194 | 'ai_model_id': self.ai_model_id.id, 195 | 'model_id': model_id, 196 | 'target_field_id': self.target_field_id.id, 197 | 'res_id': rec_id, 198 | 'prompt': prompt, 199 | 'answer': answer, 200 | 'prompt_tokens': prompt_tokens, 201 | 'completion_tokens': completion_tokens, 202 | 'total_tokens': total_tokens, 203 | } 204 | result_id = self.env['ai.completion.result'].create(values) 205 | return result_id 206 | 207 | def get_tool_call_values(self, tool_call): 208 | return {'function': tool_call.function.name, 'arguments': tool_call.function.arguments} 209 | 210 | def run_tool_call(self, tool_call): 211 | tool_call_values = self.get_tool_call_values(tool_call) 212 | tool_name = tool_call_values.get('function', '') 213 | if not tool_name: 214 | return {} 215 | res_dict = {'role': 'tool', 216 | "tool_call_id": tool_call.id, 217 | 'content': '', 218 | 'name': tool_name} 219 | tool_id = self.tool_ids.filtered(lambda t: t.name == tool_name) 220 | if not tool_id: 221 | return res_dict 222 | model_name = tool_id.model or self.model_id.model 223 | model = self.env[model_name] 224 | 225 | if hasattr(model, tool_name): 226 | function = getattr(model, tool_name) 227 | else: 228 | model = self.env['ai.tool'] 229 | if hasattr(model, tool_name): 230 | function = getattr(model, tool_name) 231 | else: 232 | return res_dict 233 | 234 | arguments = tool_call_values.get('arguments') 235 | if arguments: 236 | if isinstance(arguments, str): 237 | arguments = json.loads(arguments) 238 | _logger.info(f'Run tool: {tool_name}({arguments})') 239 | res = function(**arguments) 240 | else: 241 | res = function() 242 | _logger.info(f'Run tool: {tool_name}()') 243 | 244 | res_dict['content'] = str(res) 245 | return res_dict 246 | 247 | def exec_post_process(self, value): 248 | if not self.post_process: 249 | return value 250 | post_process_function = getattr(self, self.post_process) 251 | return post_process_function(value) 252 | 253 | def get_system_prompt(self, rec_id): 254 | context = {'html2plaintext': html2plaintext} 255 | return self._get_prompt(rec_id, self.system_template_id, self.system_template, context) 256 | 257 | def run_test_completion(self): 258 | rec_id = self.get_records(limit=1).id 259 | if not rec_id: 260 | return 261 | self.test_prompt = self.get_prompt(rec_id) 262 | res = self.create_completion(rec_id) 263 | if res and isinstance(res, list): 264 | self.test_answer = res[0].answer 265 | else: 266 | self.test_answer = res 267 | 268 | @api.model 269 | def get_model_completions(self, model): 270 | res = self.sudo().search([('model_id', '=', model), ('add_completion_action_menu', '=', True)]) 271 | return [{'id': r.id, 'name': r.name} for r in res] 272 | 273 | @api.model 274 | def run_completion(self, completion_id, active_ids): 275 | completion = self.browse(completion_id) 276 | for res_id in active_ids: 277 | completion.create_completion(res_id) 278 | self.browse(completion_id).create_completion(res_id) 279 | -------------------------------------------------------------------------------- /ai_connector/models/ai_completion_result.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | import ast 6 | import logging 7 | import re 8 | 9 | _logger = logging.getLogger(__name__) 10 | 11 | 12 | def clean_list_element(el): 13 | if '.' in el: 14 | el = el.split('.')[1] 15 | if '-' in el: 16 | el = el.split('-')[1] 17 | el = re.sub(r'[^\w\s,-]', '', el.strip()) 18 | return el 19 | 20 | 21 | class AICompletionResult(models.Model): 22 | _name = 'ai.completion.result' 23 | _description = 'AI Completion Result' 24 | _inherit = ['ai.result.mixin'] 25 | 26 | completion_id = fields.Many2one('ai.completion', string='Completion', readonly=True, ondelete='cascade') 27 | answer = fields.Text(readonly=False) 28 | origin_answer = fields.Text(readonly=True) 29 | prompt_tokens = fields.Integer(readonly=True) 30 | completion_tokens = fields.Integer(readonly=True) 31 | total_tokens = fields.Integer(readonly=True) 32 | 33 | def _compute_name(self): 34 | for rec in self: 35 | if hasattr(rec.resource_ref, 'name'): 36 | rec.name = f'{rec.completion_id.name} - {rec.resource_ref.name}' 37 | elif hasattr(rec.resource_ref, 'display_name'): 38 | rec.name = f'{rec.completion_id.name} - {rec.resource_ref.display_name}' 39 | else: 40 | rec.name = f'{rec.completion_id.name} - {rec.model_id.name} ({rec.res_id})' 41 | 42 | def write(self, vals): 43 | if self.answer and vals.get('answer') and not self.origin_answer: 44 | vals['origin_answer'] = self.answer 45 | return super(AICompletionResult, self).write(vals) 46 | 47 | def json_to_questions(self, val): 48 | values = ast.literal_eval(val) 49 | questions = values.get('questions', []) 50 | for question in questions: 51 | create_vals = {'name': question, 52 | 'model_id': self.model_id.id, 53 | 'res_id': self.res_id, 54 | } 55 | self.env['ai.question.answer'].create(create_vals) 56 | 57 | def list_to_many2many(self, val): 58 | """ 59 | :param val: a string representing a python list or a comma separated list 60 | e.g: "test = ['val1', 'val2']" or " val1, val2, " 61 | :return: a many2many update list. 62 | e.g: [(5, 0, 0), (0, 0, {'name': 'new tag'})] 63 | """ 64 | 65 | res = [(5, 0, 0)] 66 | if '=' in val: 67 | val = val.split('=')[1] 68 | val = val.strip() 69 | if val[0] == '[': 70 | # Eval Python list string 71 | val_list = ast.literal_eval(val) 72 | else: 73 | # Split the string 74 | if '\n' in val: 75 | separator = '\n' 76 | else: 77 | separator = ',' 78 | val_list = val.split(separator) 79 | 80 | val_list = [clean_list_element(el) for el in val_list] 81 | if not val_list: 82 | return False 83 | 84 | # Create many2many update list 85 | target_model = self.target_field_id.relation 86 | for el in val_list: 87 | if not el: 88 | continue 89 | rec_el = self.env[target_model].search([('name', '=', el)]) 90 | if not rec_el: 91 | res.append((0, 0, {'name': el})) 92 | else: 93 | res.append((4, rec_el.id)) 94 | return res 95 | 96 | def get_completion_answer(self, answer_type): 97 | if answer_type == 'answer': 98 | return self.answer 99 | if answer_type == 'original': 100 | return self.origin_answer or self.answer 101 | 102 | def create_question_answer(self, answer_type, tag_ids=None): 103 | answer = self.get_completion_answer(answer_type) 104 | create_vals = {'name': self.prompt, 105 | 'answer': answer, 106 | 'answer_completion_id': self.completion_id.id, 107 | 'model_id': self.model_id.id, 108 | 'res_id': self.res_id, 109 | } 110 | if tag_ids: 111 | create_vals['tag_ids'] = [(6, 0, tag_ids.ids)] 112 | self.env['ai.question.answer'].create(create_vals) 113 | -------------------------------------------------------------------------------- /ai_connector/models/ai_fine_tuning.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html). 3 | 4 | import json 5 | from odoo import models, fields, api, _ 6 | from odoo.tools.safe_eval import safe_eval 7 | from odoo.addons.base.models.ir_model import SAFE_EVAL_BASE 8 | 9 | import logging 10 | 11 | _logger = logging.getLogger(__name__) 12 | 13 | 14 | class AIFineTuning(models.Model): 15 | _name = 'ai.fine.tuning' 16 | _description = 'AI Fine-Tuning' 17 | 18 | name = fields.Char() 19 | ai_provider_id = fields.Many2one('ai.provider', string='AI Provider', required=True, ondelete='cascade', 20 | default=lambda self: self.env['ai.provider'].search([], limit=1)) 21 | ai_model_id = fields.Many2one('ai.model', string='AI Model', required=True, ondelete='cascade', 22 | default=lambda self: self.env['ai.model'].search([], limit=1)) 23 | ai_provider = fields.Selection(string='AI Provider Code', related='ai_provider_id.code') 24 | training_steps = fields.Integer(default=10) 25 | learning_rate = fields.Float(default=0.0001, digits=(10, 7)) 26 | training_file_id = fields.Char('Training File ID', readonly=True, copy=False) 27 | fine_tuning_job_id = fields.Char('Fine-Tuning Job ID', readonly=True, copy=False) 28 | fine_tuned_model = fields.Char('Fine-Tuned Model', readonly=True, copy=False) 29 | question_answer_domain = fields.Char() 30 | question_answer_tag_ids = fields.Many2many('ai.question.answer.tag', string='Tags') 31 | question_answer_ids = fields.Many2many('ai.question.answer', string='Questions - Answers', 32 | compute='_compute_question_answers', 33 | store=False) 34 | training_question_answer_ids = fields.Many2many('ai.question.answer', 35 | string='Training Questions - Answers', 36 | store=True, readonly=True, copy=False) 37 | system_role_content = fields.Char() 38 | job_status = fields.Char(readonly=True, copy=False) 39 | fine_tuning_checkpoints = fields.Json(copy=False) 40 | graph_checkpoints = fields.Json(compute='_compute_graph_checkpoints', store=False) 41 | 42 | def get_ai_client(self): 43 | return self.ai_provider_id.get_ai_client() 44 | 45 | def get_fine_tuning_job_client(self): 46 | client = self.get_ai_client() 47 | return client.jobs 48 | 49 | @api.onchange('question_answer_tag_ids', 'question_answer_domain') 50 | def _compute_question_answers(self): 51 | for rec in self: 52 | domain = safe_eval(rec.question_answer_domain, 53 | SAFE_EVAL_BASE, 54 | {'self': rec}) if rec.question_answer_domain else [] 55 | question_answer_ids = self.env['ai.question.answer'].search(domain) 56 | question_answer_ids = question_answer_ids.filtered(lambda x: x.tag_ids & rec.question_answer_tag_ids) 57 | rec.question_answer_ids = question_answer_ids 58 | 59 | @api.depends('fine_tuning_checkpoints') 60 | def _compute_graph_checkpoints(self): 61 | for rec in self: 62 | checkpoints = self.fine_tuning_checkpoints 63 | if not checkpoints: 64 | rec.graph_checkpoints = [] 65 | continue 66 | checkpoints.sort(key=lambda x: x['created_at']) 67 | train_loss_vals = [x['metrics']['train_loss'] for x in checkpoints] 68 | valid_loss_vals = [x['metrics']['valid_loss'] for x in checkpoints] 69 | valid_mean_token_accuracy_vals = [x['metrics']['valid_mean_token_accuracy'] for x in checkpoints] 70 | graph_values = {'labels': [x['step_number'] for x in checkpoints], 71 | 'train_loss': train_loss_vals, 72 | 'valid_loss': valid_loss_vals, 73 | 'valid_mean_token_accuracy': valid_mean_token_accuracy_vals} if checkpoints else [] 74 | rec.graph_checkpoints = graph_values 75 | 76 | def get_training_content(self): 77 | content = '' 78 | for question_answer_id in self.question_answer_ids: 79 | messages = [] 80 | if self.system_role_content: 81 | messages.append({'role': 'system', 'content': self.system_role_content}) 82 | 83 | messages.extend(question_answer_id.get_training_content()) 84 | 85 | values = {'messages': messages} 86 | content += json.dumps(values) + '\n' 87 | return bytes(content, 'utf-8') 88 | 89 | def create_training_file(self): 90 | client = self.get_ai_client() 91 | file = ('training_%s' % self.id, self.get_training_content()) 92 | res = client.files.create(file=file, purpose='fine-tune') 93 | self.training_file_id = res.id 94 | self.training_question_answer_ids = self.question_answer_ids 95 | 96 | def get_create_fine_tuning_job_params(self): 97 | return { 98 | 'training_file': self.training_file_id, 99 | 'model': self.ai_model_id.name 100 | } 101 | 102 | def create_fine_tuning(self): 103 | jobs = self.get_fine_tuning_job_client() 104 | params = self.get_create_fine_tuning_job_params() 105 | res = jobs.create(**params) 106 | self.fine_tuning_job_id = res.id 107 | _logger.info(res) 108 | 109 | def update_fine_tuned_model(self): 110 | jobs = self.get_fine_tuning_job_client() 111 | res = jobs.retrieve(self.fine_tuning_job_id) 112 | # checkpoints = [ 113 | # { 114 | # "metrics": { 115 | # "train_loss": 0.816135, 116 | # "valid_loss": 0.819697, 117 | # "valid_mean_token_accuracy": 1.765035 118 | # }, 119 | # "step_number": 100, 120 | # "created_at": 1717173470 121 | # }, 122 | # { 123 | # "metrics": { 124 | # "train_loss": 0.84643, 125 | # "valid_loss": 0.819768, 126 | # "valid_mean_token_accuracy": 1.765122 127 | # }, 128 | # "step_number": 90, 129 | # "created_at": 1717173388 130 | # }, 131 | # { 132 | # "metrics": { 133 | # "train_loss": 0.816602, 134 | # "valid_loss": 0.820234, 135 | # "valid_mean_token_accuracy": 1.765692 136 | # }, 137 | # "step_number": 80, 138 | # "created_at": 1717173303 139 | # }, 140 | # { 141 | # "metrics": { 142 | # "train_loss": 0.775537, 143 | # "valid_loss": 0.821105, 144 | # "valid_mean_token_accuracy": 1.766759 145 | # }, 146 | # "step_number": 70, 147 | # "created_at": 1717173217 148 | # }, 149 | # { 150 | # "metrics": { 151 | # "train_loss": 0.840297, 152 | # "valid_loss": 0.822249, 153 | # "valid_mean_token_accuracy": 1.76816 154 | # }, 155 | # "step_number": 60, 156 | # "created_at": 1717173131 157 | # }, 158 | # { 159 | # "metrics": { 160 | # "train_loss": 0.823884, 161 | # "valid_loss": 0.824598, 162 | # "valid_mean_token_accuracy": 1.771041 163 | # }, 164 | # "step_number": 50, 165 | # "created_at": 1717173045 166 | # }, 167 | # { 168 | # "metrics": { 169 | # "train_loss": 0.786473, 170 | # "valid_loss": 0.827982, 171 | # "valid_mean_token_accuracy": 1.775201 172 | # }, 173 | # "step_number": 40, 174 | # "created_at": 1717172960 175 | # }, 176 | # { 177 | # "metrics": { 178 | # "train_loss": 0.8704, 179 | # "valid_loss": 0.835169, 180 | # "valid_mean_token_accuracy": 1.784066 181 | # }, 182 | # "step_number": 30, 183 | # "created_at": 1717172874 184 | # }, 185 | # { 186 | # "metrics": { 187 | # "train_loss": 0.880803, 188 | # "valid_loss": 0.852521, 189 | # "valid_mean_token_accuracy": 1.805653 190 | # }, 191 | # "step_number": 20, 192 | # "created_at": 1717172788 193 | # }, 194 | # { 195 | # "metrics": { 196 | # "train_loss": 0.803578, 197 | # "valid_loss": 0.914257, 198 | # "valid_mean_token_accuracy": 1.884598 199 | # }, 200 | # "step_number": 10, 201 | # "created_at": 1717172702 202 | # } 203 | # ] 204 | 205 | self.fine_tuning_checkpoints = [{'step_number': c.step_number, 'created_at': c.created_at, 'metrics': { 206 | "train_loss": c.metrics.train_loss, 207 | "valid_loss": c.metrics.valid_loss, 208 | "valid_mean_token_accuracy": c.metrics.valid_mean_token_accuracy 209 | }} for c in res.checkpoints] 210 | if res.status != self.job_status: 211 | self.job_status = res.status 212 | if res.status == 'SUCCESS': 213 | self.fine_tuned_model = res.fine_tuned_model 214 | self.env['ai.model'].create({'name': self.fine_tuned_model, 215 | 'display_name': 'Fine-Tuned - %s' % self.name, 216 | 'ai_provider_id': self.ai_provider_id.id}) 217 | _logger.info(res) 218 | 219 | def action_create_training_file(self): 220 | for rec in self: 221 | rec.create_training_file() 222 | 223 | def action_create_fine_tuning(self): 224 | for rec in self: 225 | if not rec.training_file_id: 226 | rec.create_training_file() 227 | rec.create_fine_tuning() 228 | 229 | def action_update_fine_tuned_model(self): 230 | for rec in self: 231 | rec.update_fine_tuned_model() 232 | -------------------------------------------------------------------------------- /ai_connector/models/ai_mixin.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | from odoo.exceptions import UserError 6 | from odoo.tools.safe_eval import safe_eval 7 | from odoo.addons.base.models.ir_model import SAFE_EVAL_BASE 8 | from odoo.tools import html2plaintext 9 | import logging 10 | 11 | _logger = logging.getLogger(__name__) 12 | 13 | 14 | class AIMixin(models.AbstractModel): 15 | _name = 'ai.mixin' 16 | _description = 'AI Mixin' 17 | _inherit = ['mail.render.mixin'] 18 | 19 | name = fields.Char() 20 | active = fields.Boolean(default=True) 21 | ai_provider_id = fields.Many2one('ai.provider', string='AI Provider', required=False, ondelete='cascade', 22 | default=lambda self: self.env['ai.provider'].search([], limit=1)) 23 | ai_model_id = fields.Many2one('ai.model', string='AI Model', ondelete='cascade', 24 | default=lambda self: self.env['ai.model'].search([], limit=1)) 25 | ai_provider = fields.Selection(string='AI Provider Code', related='ai_provider_id.code') 26 | model_id = fields.Many2one('ir.model', string='Model', required=False, ondelete='cascade') 27 | domain = fields.Char() 28 | save_on_target_field = fields.Boolean() 29 | save_answer = fields.Boolean() 30 | target_field_id = fields.Many2one('ir.model.fields', string='Target Field') 31 | prompt_template = fields.Text() 32 | prompt_template_id = fields.Many2one('ir.ui.view', string='Prompt Template View') 33 | answer_lang_id = fields.Many2one('res.lang', string='Answer Language', context={'active_test': False}) 34 | test_prompt = fields.Text(readonly=True) 35 | 36 | def get_ai_model_list(self): 37 | if self.ai_provider_id: 38 | return self.ai_provider_id.get_ai_model_list() 39 | return [] 40 | 41 | @api.onchange('ai_provider_id') 42 | def _onchange_ai_provider_id(self): 43 | if not self.ai_provider_id or self.ai_model_id not in self.ai_provider_id.ai_model_ids: 44 | self.ai_model_id = False 45 | return {'domain': {'ai_model_id': [('ai_provider_id', '=', self.ai_provider_id.id)]}} 46 | 47 | 48 | def get_ai_client(self): 49 | return self.ai_provider_id.get_ai_client() 50 | 51 | def get_prompt(self, rec_id=0): 52 | context = {'html2plaintext': html2plaintext} 53 | lang = self.env.lang 54 | answer_lang_id = self.answer_lang_id or self.env['res.lang']._lang_get(lang) 55 | if answer_lang_id: 56 | context['answer_lang'] = answer_lang_id.name 57 | if not self.prompt_template_id and not self.prompt_template: 58 | raise UserError(_('A prompt template is required')) 59 | return self._get_prompt(rec_id, self.prompt_template_id, self.prompt_template, context) 60 | 61 | def _get_prompt(self, rec_id, prompt_template_id, prompt_template, context=None): 62 | if not context: 63 | context = {} 64 | if prompt_template_id: 65 | prompt = self._render_template_qweb_view(prompt_template_id.xml_id, self.model_id.model, [rec_id], 66 | add_context=context) 67 | elif prompt_template: 68 | prompt = self._render_template_qweb(prompt_template, self.model_id.model, [rec_id], 69 | add_context=context) 70 | else: 71 | return '' 72 | 73 | return prompt[rec_id].strip() 74 | 75 | def get_records(self, limit=0): 76 | if not self.model_id: 77 | return 78 | domain = safe_eval(self.domain, SAFE_EVAL_BASE, {'self': self}) if self.domain else [] 79 | rec_ids = self.env[self.model_id.model].search(domain, limit=limit) 80 | return rec_ids 81 | 82 | def get_record(self, rec_id): 83 | record_id = self.env[self.model_id.model].browse(rec_id) 84 | return record_id 85 | 86 | def run(self): 87 | for rec_id in self.get_records(): 88 | self.apply(rec_id.id) 89 | 90 | def apply(self, rec_id, method=False): 91 | result_ids = self.ai_create(rec_id, method) 92 | for result_id in result_ids: 93 | if self.save_on_target_field: 94 | result_id.save_result_on_target_field() 95 | 96 | def ai_create(self, rec_id, method=False): 97 | return [] 98 | 99 | def run_test_prompt(self): 100 | rec_id = self.get_records(limit=1).id 101 | if not rec_id: 102 | return 103 | self.test_prompt = self.get_prompt(rec_id) 104 | -------------------------------------------------------------------------------- /ai_connector/models/ai_model.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | from odoo import models, fields, api, _ 4 | 5 | import logging 6 | 7 | _logger = logging.getLogger(__name__) 8 | 9 | 10 | class AIModel(models.Model): 11 | _name = 'ai.model' 12 | _description = 'AI Model' 13 | _order = 'ai_provider_sequence, default desc, sequence' 14 | _rec_name = 'display_name' 15 | 16 | active = fields.Boolean(default=True) 17 | default = fields.Boolean() 18 | sequence = fields.Integer() 19 | name = fields.Char(required=True) 20 | label = fields.Char() 21 | display_name = fields.Char(compute='_compute_display_name') 22 | ai_provider_id = fields.Many2one('ai.provider', string='AI Provider', required=True, index=True, ondelete='cascade') 23 | ai_provider_sequence = fields.Integer(related='ai_provider_id.sequence', string='AI Provider Sequence', 24 | store=True, index=True) 25 | vision = fields.Boolean() 26 | 27 | @api.depends('name', 'label') 28 | def _compute_display_name(self): 29 | for record in self: 30 | record.display_name = record.label or record.name 31 | -------------------------------------------------------------------------------- /ai_connector/models/ai_provider.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | from odoo import models, fields, api, _ 4 | 5 | import logging 6 | 7 | from odoo.exceptions import UserError 8 | 9 | _logger = logging.getLogger(__name__) 10 | 11 | 12 | class AIProvider(models.Model): 13 | _name = 'ai.provider' 14 | _description = 'AI Provider' 15 | _order = 'sequence' 16 | 17 | active = fields.Boolean(default=True) 18 | sequence = fields.Integer() 19 | name = fields.Char(required=True) 20 | code = fields.Selection(selection=[], required=True) 21 | api_key = fields.Char() 22 | organization_id = fields.Char(string='Organization ID') 23 | ai_model_ids = fields.One2many('ai.model', 'ai_provider_id', string='AI Models', readonly=True) 24 | base_url = fields.Char() 25 | 26 | def get_ai_model_list(self): 27 | return [] 28 | 29 | def get_ai_client(self): 30 | raise UserError(_('No AI provider found')) 31 | 32 | def action_load_ai_models(self): 33 | ai_models = self.get_ai_model_list() 34 | for ai_model in ai_models: 35 | ai_model_id = self.env['ai.model'].search([('name', '=', ai_model[0]), 36 | ('ai_provider_id', '=', self.id)], limit=1) 37 | if not ai_model_id: 38 | values = {'name': ai_model[0], 39 | 'display_name': ai_model[1], 40 | 'ai_provider_id': self.id} 41 | self.env['ai.model'].create(values) 42 | deprecated_model_ids = self.env['ai.model'].search([('name', 'not in', [m[0] for m in ai_models]), 43 | ('ai_provider_id', '=', self.id)]) 44 | deprecated_model_ids.write({'active': False}) 45 | -------------------------------------------------------------------------------- /ai_connector/models/ai_question_answer.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | import json 4 | from odoo import models, fields, api, _ 5 | from odoo.osv import expression 6 | import logging 7 | 8 | _logger = logging.getLogger(__name__) 9 | 10 | 11 | class AIQuestionAnswer(models.Model): 12 | _name = 'ai.question.answer' 13 | _description = 'AI Question Answer' 14 | _order = 'create_date desc' 15 | 16 | name = fields.Text('Question') 17 | answer = fields.Text('Answer') 18 | model_id = fields.Many2one('ir.model', string='Model', ondelete='cascade') 19 | model = fields.Char(related='model_id.model', string='Model Name', readonly=True, store=True) 20 | res_id = fields.Integer('Resource ID', readonly=True) 21 | resource_ref = fields.Reference(string='Record', selection='_selection_target_model', 22 | compute='_compute_resource_ref', inverse='_set_resource_ref') 23 | answer_completion_id = fields.Many2one('ai.completion', string='Answer Completion') 24 | content_length = fields.Integer(compute='_compute_content_length') 25 | tag_ids = fields.Many2many('ai.question.answer.tag', string='Tags') 26 | 27 | @api.model 28 | def _selection_target_model(self): 29 | model_ids = self.env['ir.model'].search([]) 30 | return [(model.model, model.name) for model in model_ids] 31 | 32 | def _compute_content_length(self): 33 | for res in self: 34 | res.content_length = len(res.name) + len(res.answer) 35 | 36 | @api.depends('res_id') 37 | def _compute_resource_ref(self): 38 | for rec in self: 39 | if rec.model_id and rec.res_id: 40 | record = self.env[rec.model_id.model].browse(rec.res_id) 41 | res_id = record[0] if record else 0 42 | rec.resource_ref = '%s,%s' % (rec.model_id.model, res_id.id) 43 | else: 44 | rec.resource_ref = False 45 | 46 | @api.onchange('resource_ref') 47 | def _set_resource_ref(self): 48 | for rec in self: 49 | if rec.resource_ref: 50 | rec.model_id = self.env['ir.model']._get(rec.resource_ref._name) 51 | rec.res_id = rec.resource_ref.id 52 | 53 | def get_training_content(self): 54 | return [{'role': 'user', 'content': self.name}, 55 | {'role': 'assistant', 'content': self.answer}] 56 | 57 | def action_answer_question(self): 58 | for rec in self: 59 | res = rec.answer_completion_id.create_completion(rec.id, prompt=rec.name) 60 | rec.answer = res[0].answer 61 | 62 | def get_score(self, keyword_list): 63 | score = 0 64 | for keyword in keyword_list: 65 | keyword = keyword.lower() 66 | if keyword in self.name.lower(): 67 | score += 2 68 | if keyword in self.answer.lower(): 69 | score += 1 70 | return score 71 | 72 | @api.model 73 | def search_question_answer(self, keywords): 74 | keyword_list = keywords.replace(' ', ',').replace(';', ',').split(',') 75 | domain = [] 76 | for keyword in keyword_list: 77 | domain = expression.OR([domain, [('name', '=ilike', f'%{keyword}%')]]) 78 | domain = expression.OR([domain, [('answer', '=ilike', f'%{keyword}%')]]) 79 | question_answer_ids = self.search(domain) 80 | if not question_answer_ids: 81 | return 'No result found. Suggest to user to reformulate his question or to provide more relevant keywords.' 82 | res = [{'question': q.name, 83 | 'answer': q.answer, 84 | 'score': q.get_score(keyword_list), 85 | 'length': q.content_length, 86 | } 87 | for q in question_answer_ids] 88 | res = sorted(res, key=lambda x: x['score'], reverse=True) 89 | max_score = res[0]['score'] 90 | res = list(filter(lambda x: x['score'] == max_score, res)) 91 | res = sorted(res, key=lambda x: x['length']) 92 | return json.dumps(res[0]) 93 | -------------------------------------------------------------------------------- /ai_connector/models/ai_question_answer_tag.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | 6 | import logging 7 | 8 | _logger = logging.getLogger(__name__) 9 | 10 | from random import randint 11 | 12 | from odoo import fields, models 13 | 14 | 15 | class AIQuestionAnswerTag(models.Model): 16 | _name = 'ai.question.answer.tag' 17 | _description = "AI Question Answer Tag" 18 | 19 | def _get_default_color(self): 20 | return randint(1, 11) 21 | 22 | name = fields.Char(required=True) 23 | color = fields.Integer('Color', default=_get_default_color) 24 | 25 | _sql_constraints = [ 26 | ('name_uniq', 'unique (name)', "Tag name already exists !"), 27 | ] 28 | -------------------------------------------------------------------------------- /ai_connector/models/ai_result_mixin.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | import logging 6 | 7 | _logger = logging.getLogger(__name__) 8 | 9 | 10 | class AIResultMixin(models.AbstractModel): 11 | _name = 'ai.result.mixin' 12 | _description = 'AI result Mixin' 13 | 14 | name = fields.Char(compute='_compute_name') 15 | ai_provider_id = fields.Many2one('ai.provider', string='AI Provider', required=True, ondelete='cascade') 16 | ai_model_id = fields.Many2one('ai.model', string='AI Model', required=True, ondelete='cascade') 17 | model_id = fields.Many2one('ir.model', string='Model', readonly=True, ondelete='cascade') 18 | model = fields.Char(related='model_id.model', string='Model Name', readonly=True, store=True) 19 | target_field_id = fields.Many2one('ir.model.fields', string='Target Field', readonly=True) 20 | res_id = fields.Integer('Resource ID', readonly=True) 21 | resource_ref = fields.Reference(string='Record', selection='_selection_target_model', 22 | compute='_compute_resource_ref', inverse='_set_resource_ref', readonly=True) 23 | prompt = fields.Text(readonly=True) 24 | test_result = fields.Boolean() 25 | 26 | @api.model 27 | def _selection_target_model(self): 28 | model_ids = self.env['ir.model'].search([]) 29 | return [(model.model, model.name) for model in model_ids] 30 | 31 | @api.depends('model_id', 'res_id') 32 | def _compute_resource_ref(self): 33 | for rec in self: 34 | if rec.model_id and rec.res_id: 35 | record = self.env[rec.model_id.model].browse(rec.res_id) 36 | res_id = record[0] if record else 0 37 | rec.resource_ref = '%s,%s' % (rec.model_id.model, res_id.id) 38 | else: 39 | rec.resource_ref = False 40 | 41 | @api.onchange('resource_ref') 42 | def _set_resource_ref(self): 43 | for rec in self: 44 | if rec.resource_ref: 45 | rec.res_id = rec.resource_ref.id 46 | 47 | def get_answer_value(self): 48 | return self.answer 49 | 50 | def save_result_on_target_field(self): 51 | record = self.env[self.model_id.model].browse(self.res_id) 52 | answer_value = self.get_answer_value() 53 | if answer_value and self.target_field_id: 54 | record.write({self.target_field_id.name: answer_value}) 55 | 56 | def action_apply(self): 57 | self.save_result_on_target_field() 58 | -------------------------------------------------------------------------------- /ai_connector/models/ai_tool.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | 6 | import logging 7 | 8 | _logger = logging.getLogger(__name__) 9 | 10 | 11 | class AITool(models.Model): 12 | _name = 'ai.tool' 13 | _description = 'AI Tool' 14 | 15 | def _get_tool_type_list(self): 16 | return [('function', _('Function'))] 17 | 18 | name = fields.Char() 19 | description = fields.Text() 20 | model_id = fields.Many2one('ir.model', string='Model', ondelete='cascade') 21 | model = fields.Char(related='model_id.model', string='Model Name', readonly=True, store=True) 22 | type = fields.Selection(selection=_get_tool_type_list) 23 | property_ids = fields.One2many('ai.tool.property', 'tool_id', string='Properties', copy=True) 24 | required_property_ids = fields.One2many('ai.tool.property', 'tool_id', string='Required Properties', 25 | domain=[('required', '=', True)], readonly=True) 26 | 27 | def get_tool_dict(self, tool_format='default'): 28 | if tool_format == 'default': 29 | res = {'type': 'function', 30 | 'function': { 31 | 'name': self.name, 32 | 'description': self.description}} 33 | properties = {} 34 | for property_id in self.property_ids: 35 | properties[property_id.name] = {'type': property_id.type, 36 | 'description': property_id.description} 37 | if properties: 38 | parameters = {'type': 'object', 39 | 'properties': properties} 40 | required = [p.name for p in self.required_property_ids] 41 | if required: 42 | parameters['required'] = required 43 | res['function']['parameters'] = parameters 44 | return res 45 | return {} 46 | -------------------------------------------------------------------------------- /ai_connector/models/ai_tool_property.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2024 - Michel Perrocheau (https://github.com/myrrkel). 2 | # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). 3 | 4 | from odoo import models, fields, api, _ 5 | 6 | import logging 7 | 8 | _logger = logging.getLogger(__name__) 9 | 10 | 11 | class AIToolProperty(models.Model): 12 | _name = 'ai.tool.property' 13 | _description = 'AI Tool Property' 14 | 15 | def _get_tool_property_type_list(self): 16 | return [('string', _('String')), 17 | ('integer', _('Integer'))] 18 | 19 | name = fields.Char() 20 | tool_id = fields.Many2one('ai.tool', invisible=True) 21 | type = fields.Selection(selection=_get_tool_property_type_list) 22 | description = fields.Text() 23 | required = fields.Boolean() 24 | -------------------------------------------------------------------------------- /ai_connector/security/ir.model.access.csv: -------------------------------------------------------------------------------- 1 | "id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink" 2 | access_ai_provider_user,access_ai_provider user,ai_connector.model_ai_provider,base.group_user,1,0,0,0 3 | access_ai_provider_admin,access_ai_provider admin,ai_connector.model_ai_provider,base.group_erp_manager,1,1,1,1 4 | access_ai_model_user,access_ai_model user,ai_connector.model_ai_model,base.group_user,1,0,0,0 5 | access_ai_model_admin,access_ai_model admin,ai_connector.model_ai_model,base.group_erp_manager,1,1,1,1 6 | access_ai_completion_user,access_ai_completion user,ai_connector.model_ai_completion,base.group_user,1,0,0,0 7 | access_ai_completion_admin,access_ai_completion admin,ai_connector.model_ai_completion,base.group_erp_manager,1,1,1,1 8 | access_ai_completion_result_user,access_ai_completion_result user,ai_connector.model_ai_completion_result,base.group_user,1,1,1,1 9 | access_ai_tool_user,access_ai_tool user,ai_connector.model_ai_tool,base.group_user,1,0,0,0 10 | access_ai_tool_admin,access_ai_tool admin,ai_connector.model_ai_tool,base.group_erp_manager,1,1,1,1 11 | access_ai_tool_property_user,access_ai_tool_property user,ai_connector.model_ai_tool_property,base.group_user,1,0,0,0 12 | access_ai_tool_property_admin,access_ai_tool_property admin,ai_connector.model_ai_tool_property,base.group_erp_manager,1,1,1,1 13 | access_ai_question_answer_user,access_ai_question_answer user,ai_connector.model_ai_question_answer,base.group_user,1,1,1,1 14 | access_ai_question_answer_tag_user,access_ai_question_answer_tag user,ai_connector.model_ai_question_answer_tag,base.group_user,1,0,0,0 15 | access_ai_question_answer_tag_admin,access_ai_question_answer_tag admin,ai_connector.model_ai_question_answer_tag,base.group_erp_manager,1,1,1,1 16 | access_create_question_answer_wizard_admin,access_create_question_answer_wizard admin,ai_connector.model_create_question_answer_wizard,base.group_erp_manager,1,1,1,0 17 | access_question_answer_dump_wizard_admin,access_question_answer_dump_wizard admin,ai_connector.model_question_answer_dump_wizard,base.group_erp_manager,1,1,1,0 18 | access_ai_fine_tuning_user,access_ai_fine_tuning user,ai_connector.model_ai_fine_tuning,base.group_user,1,0,0,0 19 | access_ai_fine_tuning_admin,access_ai_fine_tuning admin,ai_connector.model_ai_fine_tuning,base.group_erp_manager,1,1,1,1 -------------------------------------------------------------------------------- /ai_connector/security/security.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | OdooAI : Users allowed to use AI completions 6 | 7 | 8 | -------------------------------------------------------------------------------- /ai_connector/static/description/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/description/icon.png -------------------------------------------------------------------------------- /ai_connector/static/description/index.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

AI Connector

4 |

ai_connector

5 |
6 |

7 | This module adds a connector for AI platforms 8 |

9 |
10 |
11 | 12 | 13 |
14 |
15 |
16 | 17 |
18 |
19 | Contact me 20 |
21 |
22 |
23 | -------------------------------------------------------------------------------- /ai_connector/static/description/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/description/logo.png -------------------------------------------------------------------------------- /ai_connector/static/img/completion_params.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/img/completion_params.png -------------------------------------------------------------------------------- /ai_connector/static/img/mistral_ai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/img/mistral_ai.png -------------------------------------------------------------------------------- /ai_connector/static/img/prompt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/img/prompt.png -------------------------------------------------------------------------------- /ai_connector/static/img/settings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/img/settings.png -------------------------------------------------------------------------------- /ai_connector/static/img/tests.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/myrrkel/odoo-ai/d5b0cf31eed3f09c13ae76dfabaeef5c6e12a357/ai_connector/static/img/tests.png -------------------------------------------------------------------------------- /ai_connector/static/src/js/completion_action/completion_action.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /ai_connector/static/src/js/completion_action/form_list_view_action.js: -------------------------------------------------------------------------------- 1 | /** @odoo-module */ 2 | import { _t } from "@web/core/l10n/translation"; 3 | import { patch } from "@web/core/utils/patch"; 4 | import { DropdownItem } from "@web/core/dropdown/dropdown_item"; 5 | import { ActionMenus, ACTIONS_GROUP_NUMBER } from "@web/search/action_menus/action_menus"; 6 | import { useService } from "@web/core/utils/hooks"; 7 | import { Component } from "@odoo/owl"; 8 | 9 | 10 | export class RunCompletion extends Component { 11 | static template = "ai_connector.RunCompletion"; 12 | static props = ["title", "completion_id", "menu"]; 13 | 14 | static components = { DropdownItem }; 15 | 16 | setup() { 17 | this.action = useService("action"); 18 | this.orm = useService("orm"); 19 | } 20 | 21 | async runCompletion() { 22 | await this.orm.call("ai.completion", "run_completion", 23 | [this.props.completion_id, this.props.menu.props.getActiveIds()]); 24 | 25 | this.action.doAction({ 26 | type: "ir.actions.client", 27 | tag: "soft_reload" 28 | }); 29 | } 30 | } 31 | 32 | RunCompletion.template = 'ai_connector.RunCompletion'; 33 | 34 | patch(ActionMenus.prototype, 'ai_connector.ActionMenus', { 35 | setup() { 36 | this._super(); 37 | this.user = useService("user"); 38 | }, 39 | 40 | async setActionItems(props) { 41 | 42 | const items = await this._super(...arguments); 43 | if ('registryItems' in props) { 44 | return items; 45 | } 46 | if (!('getActiveIds' in props)) { 47 | return items; 48 | } 49 | 50 | try { 51 | if (!await this.user.hasGroup("ai_connector.group_ai_user")) { 52 | return items; 53 | } 54 | const results = await this.orm.call("ai.completion", "get_model_completions", [this.props.resModel]); 55 | for (const i in results) { 56 | const res = results[i]; 57 | items.push({ 58 | RunCompletion, 59 | Component: RunCompletion, 60 | groupNumber: ACTIONS_GROUP_NUMBER, 61 | key: `run-completion-${res['id']}`, 62 | description: _t(res['name']), 63 | props: { 64 | menu: this, 65 | title: _t(res['name']), 66 | completion_id: res['id'], 67 | }, 68 | }); 69 | } 70 | return items; 71 | } catch (error) { 72 | return items; 73 | } 74 | }, 75 | 76 | }) 77 | -------------------------------------------------------------------------------- /ai_connector/static/src/js/fine_tuning_graph/fine_tuning_graph_field.js: -------------------------------------------------------------------------------- 1 | /** @odoo-module **/ 2 | 3 | import { _t } from "@web/core/l10n/translation"; 4 | import { loadJS } from "@web/core/assets"; 5 | import { registry } from "@web/core/registry"; 6 | import { formatFloat } from "@web/views/fields/formatters"; 7 | import { standardFieldProps } from "@web/views/fields/standard_field_props"; 8 | 9 | import { Component, onWillStart, useEffect, useRef } from "@odoo/owl"; 10 | 11 | export class FineTuningGraphField extends Component { 12 | setup() { 13 | this.chart = null; 14 | this.canvasRef = useRef("canvas"); 15 | 16 | onWillStart(() => loadJS("/web/static/lib/Chart/Chart.js")); 17 | 18 | useEffect(() => { 19 | this.renderChart(); 20 | return () => { 21 | if (this.chart) { 22 | this.chart.destroy(); 23 | } 24 | }; 25 | }); 26 | } 27 | 28 | get formattedValue() { 29 | return formatFloat(this.props.value, { humanReadable: true, decimals: 1 }); 30 | } 31 | 32 | renderChart() { 33 | const data = this.props.value; 34 | const config = { 35 | type: "line", 36 | data: { 37 | labels: data['labels'], 38 | datasets: [ 39 | { 40 | data: data['train_loss'], 41 | backgroundColor: "rgba(52,156,227,0.68)", 42 | borderColor: "rgba(52,156,227,0.68)", 43 | label: 'Train Loss', 44 | fill: false, 45 | lineTension: 0, 46 | }, 47 | { 48 | data: data['valid_loss'], 49 | backgroundColor: "rgba(56,189,103,0.62)", 50 | borderColor: "rgba(56,189,103,0.62)", 51 | label: 'Valid Loss', 52 | fill: false, 53 | lineTension: 0, 54 | }, 55 | { 56 | data: data['valid_mean_token_accuracy'], 57 | backgroundColor: "rgba(154,44,197,0.57)", 58 | borderColor: "rgba(154,44,197,0.57)", 59 | label: 'Valid Mean Token Accuracy', 60 | fill: false, 61 | lineTension: 0, 62 | }, 63 | ], 64 | }, 65 | options: { 66 | responsive: true, 67 | plugins: { 68 | legend: { 69 | position: 'top', 70 | }, 71 | }, 72 | title: { 73 | display: true, 74 | text: this.props.title, 75 | padding: 4, 76 | }, 77 | scales: { 78 | xAxes: [ 79 | { 80 | display: true, 81 | scaleLabel: { 82 | display: true, 83 | labelString: _t("Steps"), 84 | fontStyle: 'bold', 85 | }, 86 | } 87 | ], 88 | yAxes: [ 89 | { 90 | display: true, 91 | ticks: { 92 | beginAtZero: true, 93 | }, 94 | } 95 | ], 96 | }, 97 | layout: { 98 | padding: { 99 | bottom: 5, 100 | }, 101 | }, 102 | }, 103 | }; 104 | this.chart = new Chart(this.canvasRef.el, config); 105 | } 106 | } 107 | 108 | FineTuningGraphField.template = "web.FineTuningGraphField"; 109 | FineTuningGraphField.props = { 110 | ...standardFieldProps, 111 | title: { type: String }, 112 | }; 113 | 114 | FineTuningGraphField.extractProps = ({ attrs, field }) => { 115 | return { 116 | title: attrs.options.title || field.string, 117 | }; 118 | }; 119 | 120 | registry.category("fields").add("fine_tuning_graph", FineTuningGraphField); 121 | -------------------------------------------------------------------------------- /ai_connector/static/src/js/fine_tuning_graph/fine_tuning_graph_field.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 |
6 | 7 |
8 |
9 | 10 |
11 | -------------------------------------------------------------------------------- /ai_connector/static/src/scss/style.scss: -------------------------------------------------------------------------------- 1 | .btn-center { 2 | width: 30%; 3 | display: block; 4 | margin-left: auto; 5 | margin-right: auto; 6 | } 7 | 8 | .alert-info-center { 9 | width: max-content; 10 | margin-left: auto; 11 | margin-right: auto; 12 | } 13 | .image-transparent-background img { 14 | background: 15 | repeating-conic-gradient(#b8b8b8 0% 25%, transparent 0% 50%) 16 | 50% / 15px 15px 17 | } -------------------------------------------------------------------------------- /ai_connector/views/ai_completion_result_views.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | ai.completion.result.view.form 5 | ai.completion.result 6 | 7 |
8 | 9 |
10 |
13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 |
36 |
37 |
38 | 39 | 40 | ai.completion.result.view.tree 41 | ai.completion.result 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | ai.completion.result.simple.view.tree 54 | ai.completion.result 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 |