├── .gitignore ├── README.md └── functions ├── .gitignore ├── main.py ├── requirements.txt ├── schema.py └── schema_example.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Custom GPT with Third-Party API Integration 2 | 3 | ## Overview 4 | 5 | This repository demonstrates the process of integrating a production-ready third-party API with a custom GPT (Generative Pre-trained Transformer). Unlike using a playground environment, this project focuses on deploying a serverless API using Firebase functions for scalability and efficiency. The primary use-case involves connecting Google Analytics Data API to a custom GPT for insightful analytics. This setup can be adapted for various APIs, making it a versatile solution for custom GPT applications. 6 | 7 | ## Setup and Installation 8 | 9 | 1. **Initial Setup:** 10 | - Create and clone a new GitHub repository. 11 | - Install Firebase CLI using npm: `npm -g install firebase-tools`. 12 | - Initialize Firebase functions in your project folder: `firebase init functions`. 13 | 14 | 2. **Service Account Setup:** 15 | - Generate a service account key from Google Cloud Console. 16 | - Securely store the key in the functions directory (do not commit to GitHub). 17 | 18 | 3. **Function Development:** 19 | - Modify the example function in [main.py](https://github.com/VRSEN/custom-gpt-api-tutorial/functinos/main.py) to suit your application. 20 | - Implement simple hardcoded token authentication for internal use. 21 | - Initialize the Google client (if needed) using the `from_service_account_file` function. 22 | 23 | 4. **Schema Definition:** 24 | - Define the OpenAI function schema using the instructor library. 25 | - Implement models such as `GA4QueryParams`, `DimensionSchema`, `MetricSchema`, and `DateRangeSchema`. 26 | 27 | 5. **API Endpoint Creation:** 28 | - Construct the API request body and define the logic for the endpoint. 29 | - Deploy the function using `firebase deploy —only functions`. 30 | 31 | 6. **OpenAPI Schema Generation:** 32 | - Run the schema definition file to generate the OpenAPI schema. 33 | - Update the schema with the deployed function's endpoint URL. 34 | 35 | ## Testing and Deployment 36 | 37 | - Test the integration by creating a custom GPT in the GPT builder. 38 | - Use the OpenAPI schema to define the GPT's functionality. 39 | - Configure authentication using the API Key (Bearer auth type). 40 | 41 | ## Custom Function Integration 42 | 43 | - Follow similar steps to integrate additional custom functions. 44 | - Define new schemas and update the endpoint logic in main.py. 45 | - Redeploy and update the OpenAPI schema as needed. 46 | 47 | ## Conclusion 48 | 49 | Custom GPTs offer significant potential, especially when tailored for specific business needs. This repository provides a foundational approach for integrating third-party APIs with GPT, allowing for the creation of powerful and efficient AI-driven solutions. 50 | 51 | ## Feedback and Contribution 52 | 53 | Your feedback is valuable. Please feel free to contribute to this project or share your thoughts in the comments section. For any queries or suggestions, open an issue in the repository. 54 | 55 | --- 56 | 57 | Remember to subscribe to the [YouTube channel](https://youtube.com/@vrsen?si=MoNJ0OcxqjsUccQj) for more tutorials and insights into AI and GPT integrations. -------------------------------------------------------------------------------- /functions/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VRSEN/custom-gpt-api-tutorial/c1dcbca7bc2216e843b9245d16317859a9d3a652/functions/.gitignore -------------------------------------------------------------------------------- /functions/main.py: -------------------------------------------------------------------------------- 1 | # Welcome to Cloud Functions for Firebase for Python! 2 | # To get started, simply uncomment the below code or create your own. 3 | # Deploy with `firebase deploy` 4 | 5 | from firebase_admin import initialize_app 6 | from firebase_functions import https_fn 7 | from google.analytics.data_v1beta import BetaAnalyticsDataClient, OrderBy 8 | from google.analytics.data_v1beta.types import DateRange, Dimension, Metric 9 | 10 | from schema import GA4QueryParams 11 | from schema_example import Add2Numbers 12 | 13 | initialize_app() 14 | 15 | db_token = "" 16 | 17 | 18 | @https_fn.on_request() 19 | def query_ga4_data(req: https_fn.Request): 20 | token = req.headers.get("Authorization").split("Bearer ")[1] 21 | 22 | if token != db_token: 23 | return https_fn.Response("Unauthorized", status=401) 24 | 25 | analytics = BetaAnalyticsDataClient.from_service_account_file( 26 | "./custom-gpts-tutorial-c06142bfc81a.json" 27 | ) 28 | 29 | property_id = 406502200 30 | 31 | print("Request", req.get_json()) 32 | 33 | try: 34 | data: GA4QueryParams = GA4QueryParams(**req.get_json()) 35 | 36 | request_body = { 37 | 'property': f"properties/{property_id}", 38 | 'date_ranges': [DateRange(**date_range.model_dump()) for date_range in data.date_ranges], 39 | 'dimensions': [Dimension(**dimension.model_dump()) for dimension in data.dimensions], 40 | 'metrics': [Metric(**metric.model_dump()) for metric in data.metrics], 41 | 'order_bys': [OrderBy(**order_by.model_dump()) for order_by in data.order_bys], 42 | 'limit': data.limit, 43 | } 44 | 45 | report = analytics.run_report(request=request_body) 46 | 47 | return { 48 | "data": str(report.rows), 49 | } 50 | except Exception as e: 51 | print("Analytics error: ", e) 52 | return { 53 | "error": str(e) 54 | } 55 | 56 | @https_fn.on_request() 57 | def add_2_numbers(req: https_fn.Request): 58 | token = req.headers.get("Authorization").split("Bearer ")[1] 59 | 60 | if token != db_token: 61 | return https_fn.Response("Unauthorized", status=401) 62 | 63 | try: 64 | data = Add2Numbers(**req.get_json()) 65 | 66 | return { 67 | "data": data['number1'] + data['number2'] 68 | } 69 | except Exception as e: 70 | print("Add 2 numbers error: ", e) 71 | return { 72 | "error": str(e) 73 | } 74 | -------------------------------------------------------------------------------- /functions/requirements.txt: -------------------------------------------------------------------------------- 1 | firebase_functions~=0.1.0 2 | instructor 3 | google-analytics-data -------------------------------------------------------------------------------- /functions/schema.py: -------------------------------------------------------------------------------- 1 | from pydantic import Field 2 | from typing import List, Optional, Literal 3 | from instructor import OpenAISchema 4 | 5 | 6 | class OrderBySchema(OpenAISchema): 7 | """ 8 | Represents an order by condition for the GA4 query. 9 | """ 10 | dimension_name: Optional[str] = Field(..., description="Dimension name to order by. Can either be a metric or a dimension.") 11 | metric_name: Optional[str] = Field(..., description="Metric name to order by. Can either be a metric or a dimension.") 12 | desc: bool = Field(True, description="Whether to order by descending or ascending.") 13 | 14 | class DateRangeSchema(OpenAISchema): 15 | """ 16 | Represents a date range for the GA4 query. 17 | """ 18 | start_date: str = Field(..., description="Start date of the query.") 19 | end_date: str = Field(..., description="End date of the query.") 20 | 21 | class MetricSchema(OpenAISchema): 22 | """ 23 | Represents a metric for the GA4 query. 24 | """ 25 | name: str = Field(..., description="Name of the metric.") 26 | 27 | class DimensionSchema(OpenAISchema): 28 | """ 29 | Represents a dimension for the GA4 query. 30 | """ 31 | name: str = Field(..., description="Name of the dimension.") 32 | 33 | class GA4QueryParams(OpenAISchema): 34 | """ 35 | Parameters for querying the Google Analytics 4 API runReport endpoint. 36 | """ 37 | date_ranges: List[DateRangeSchema] = Field(..., description="List of date ranges to query.") 38 | metrics: List[MetricSchema] = Field(..., description="List of metric names to query.") 39 | dimensions: Optional[List[DimensionSchema]] = Field([], description="List of dimension names to query.") 40 | order_bys: Optional[List[OrderBySchema]] = Field([], description="List of order bys to query.") 41 | limit: int = Field(5, description="Limit of the query. Defaults to 5.") 42 | 43 | 44 | if __name__ == '__main__': 45 | import json 46 | 47 | openai_schema = GA4QueryParams.openai_schema 48 | defs = {} 49 | if '$defs' in openai_schema['parameters']: 50 | defs = openai_schema['parameters']['$defs'] 51 | del openai_schema['parameters']['$defs'] 52 | schema = { 53 | "openapi": "3.1.0", 54 | "info": { 55 | "title": "Query GA4 Data", 56 | "description": "Google Analytics 4 API", 57 | "version": "v1.0.0" 58 | }, 59 | "servers": [ 60 | { 61 | "url": "" # enter your url here 62 | } 63 | ], 64 | "paths": { 65 | "/": { 66 | "post": { 67 | "description": openai_schema['description'], 68 | "operationId": "runReport", 69 | "parameters": [], 70 | "requestBody": { 71 | "content": { 72 | "application/json": { 73 | "schema": { 74 | "$ref": "#/components/schemas/RunReportParams" 75 | } 76 | } 77 | }, 78 | "required": True, 79 | }, 80 | "deprecated": False, 81 | "security": [ 82 | { 83 | "apiKey": [] 84 | } 85 | ] 86 | } 87 | }, 88 | }, 89 | "components": { 90 | "schemas": { 91 | "RunReportParams": openai_schema['parameters'], 92 | **defs, 93 | }, 94 | "securitySchemes": { 95 | "apiKey": { 96 | "type": "apiKey" 97 | } 98 | } 99 | }, 100 | } 101 | print(json.dumps(schema, indent=2).replace("#/$defs/", "#/components/schemas/")) 102 | -------------------------------------------------------------------------------- /functions/schema_example.py: -------------------------------------------------------------------------------- 1 | from instructor import OpenAISchema 2 | from pydantic import Field 3 | 4 | 5 | class Add2Numbers(OpenAISchema): 6 | """ 7 | This function adds two numbers. 8 | """ 9 | number1: int = Field(..., description="First number.") 10 | number2: int = Field(..., description="Second number.") 11 | 12 | 13 | 14 | if __name__ == '__main__': 15 | import json 16 | 17 | openai_schema = Add2Numbers.openai_schema 18 | if '$defs' in openai_schema['parameters']: 19 | defs = openai_schema['parameters']['$defs'] 20 | del openai_schema['parameters']['$defs'] 21 | schema = { 22 | "openapi": "3.1.0", 23 | "info": { 24 | "title": "Query GA4 Data", 25 | "description": "Google Analytics 4 API", 26 | "version": "v1.0.0" 27 | }, 28 | "servers": [ 29 | { 30 | "url": "" # enter your url here 31 | } 32 | ], 33 | "paths": { 34 | "/": { 35 | "post": { 36 | "description": openai_schema['description'], 37 | "operationId": "runReport", 38 | "parameters": [], 39 | "requestBody": { 40 | "content": { 41 | "application/json": { 42 | "schema": { 43 | "$ref": "#/components/schemas/RunReportParams" 44 | } 45 | } 46 | }, 47 | "required": True, 48 | }, 49 | "deprecated": False, 50 | "security": [ 51 | { 52 | "apiKey": [] 53 | } 54 | ] 55 | } 56 | }, 57 | }, 58 | "components": { 59 | "schemas": { 60 | "RunReportParams": openai_schema['parameters'], 61 | **defs, 62 | }, 63 | "securitySchemes": { 64 | "apiKey": { 65 | "type": "apiKey" 66 | } 67 | } 68 | }, 69 | } 70 | print(json.dumps(schema, indent=2).replace("#/$defs/", "#/components/schemas/")) 71 | --------------------------------------------------------------------------------