├── key.py ├── .gitattributes ├── README.md ├── examples ├── weather_api_python.py └── sort_element_in_list.py ├── main.py ├── .gitignore └── gpt.py /key.py: -------------------------------------------------------------------------------- 1 | key = "XXXXXXXXXXXXXXXXXXXXXXXXXX" 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AI Generates Code Using Python and GPT-3 2 | AI Generates Code Using Python and GPT-3 3 | -------------------------------------------------------------------------------- /examples/weather_api_python.py: -------------------------------------------------------------------------------- 1 | from pprint import pprint 2 | import requests 3 | 4 | r = requests.get('http://api.openweathermap.org/data/2.5/weather?q=London&APPID={APIKEY}') 5 | pprint(r.json()) 6 | -------------------------------------------------------------------------------- /examples/sort_element_in_list.py: -------------------------------------------------------------------------------- 1 | def fun(l): 2 | list_len = len(l) 3 | for i in range(list_len): 4 | if i < list_len - 1: 5 | if l[i] > l[i + 1]: 6 | l[i], l[i + 1] = l[i + 1], l[i] 7 | fun(l) 8 | return l 9 | 10 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from key import * 2 | import glob 3 | import openai 4 | from gpt import GPT 5 | from gpt import Example 6 | 7 | 8 | openai.api_key = key 9 | gpt = GPT(engine="davinci", 10 | temperature=0.5, 11 | output_prefix="Output: \n\n", 12 | max_tokens=100) 13 | 14 | # add some code examples 15 | for file in glob.glob("examples/*"): 16 | title = file.replace("_", " ") 17 | with open(f"{file}", "r") as f: 18 | code = f.read() 19 | gpt.add_example(Example(title, code)) 20 | 21 | # add some calculation examples 22 | gpt.add_example(Example("add 3+5", "8")) 23 | gpt.add_example(Example("add 8+5", "13")) 24 | gpt.add_example(Example("add 50+25", "75")) 25 | 26 | # Inferences 27 | prompt = "sort list in python" 28 | output = gpt.get_top_reply(prompt) 29 | print(prompt, ":", output) 30 | print("----------------------------------------") 31 | 32 | prompt = "Code weather api in python" 33 | output = gpt.get_top_reply(prompt) 34 | print(prompt, ":", output) 35 | print("----------------------------------------") 36 | 37 | prompt = "What is 876+89" 38 | output = gpt.get_top_reply(prompt) 39 | print(prompt, ":", output) 40 | print("----------------------------------------") 41 | 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # IPython 77 | profile_default/ 78 | ipython_config.py 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | .dmypy.json 111 | dmypy.json 112 | 113 | # Pyre type checker 114 | .pyre/ 115 | -------------------------------------------------------------------------------- /gpt.py: -------------------------------------------------------------------------------- 1 | """Creates the Example and GPT classes for a user to interface with the OpenAI 2 | API.""" 3 | 4 | import openai 5 | import uuid 6 | 7 | 8 | def set_openai_key(key): 9 | """Sets OpenAI key.""" 10 | openai.api_key = key 11 | 12 | 13 | class Example: 14 | """Stores an input, output pair and formats it to prime the model.""" 15 | def __init__(self, inp, out): 16 | self.input = inp 17 | self.output = out 18 | self.id = uuid.uuid4().hex 19 | 20 | def get_input(self): 21 | """Returns the input of the example.""" 22 | return self.input 23 | 24 | def get_output(self): 25 | """Returns the intended output of the example.""" 26 | return self.output 27 | 28 | def get_id(self): 29 | """Returns the unique ID of the example.""" 30 | return self.id 31 | 32 | def as_dict(self): 33 | return { 34 | "input": self.get_input(), 35 | "output": self.get_output(), 36 | "id": self.get_id(), 37 | } 38 | 39 | 40 | class GPT: 41 | """The main class for a user to interface with the OpenAI API. 42 | A user can add examples and set parameters of the API request. 43 | """ 44 | def __init__(self, 45 | engine='davinci', 46 | temperature=0.5, 47 | max_tokens=100, 48 | input_prefix="input: ", 49 | input_suffix="\n", 50 | output_prefix="output: ", 51 | output_suffix="\n\n", 52 | append_output_prefix_to_query=False): 53 | self.examples = {} 54 | self.engine = engine 55 | self.temperature = temperature 56 | self.max_tokens = max_tokens 57 | self.input_prefix = input_prefix 58 | self.input_suffix = input_suffix 59 | self.output_prefix = output_prefix 60 | self.output_suffix = output_suffix 61 | self.append_output_prefix_to_query = append_output_prefix_to_query 62 | self.stop = (output_suffix + input_prefix).strip() 63 | 64 | def add_example(self, ex): 65 | """Adds an example to the object. 66 | Example must be an instance of the Example class. 67 | """ 68 | assert isinstance(ex, Example), "Please create an Example object." 69 | self.examples[ex.get_id()] = ex 70 | 71 | def delete_example(self, id): 72 | """Delete example with the specific id.""" 73 | if id in self.examples: 74 | del self.examples[id] 75 | 76 | def get_example(self, id): 77 | """Get a single example.""" 78 | return self.examples.get(id, None) 79 | 80 | def get_all_examples(self): 81 | """Returns all examples as a list of dicts.""" 82 | return {k: v.as_dict() for k, v in self.examples.items()} 83 | 84 | def get_prime_text(self): 85 | """Formats all examples to prime the model.""" 86 | return "".join( 87 | [self.format_example(ex) for ex in self.examples.values()]) 88 | 89 | def get_engine(self): 90 | """Returns the engine specified for the API.""" 91 | return self.engine 92 | 93 | def get_temperature(self): 94 | """Returns the temperature specified for the API.""" 95 | return self.temperature 96 | 97 | def get_max_tokens(self): 98 | """Returns the max tokens specified for the API.""" 99 | return self.max_tokens 100 | 101 | def craft_query(self, prompt): 102 | """Creates the query for the API request.""" 103 | q = self.get_prime_text( 104 | ) + self.input_prefix + prompt + self.input_suffix 105 | if self.append_output_prefix_to_query: 106 | q = q + self.output_prefix 107 | 108 | return q 109 | 110 | def submit_request(self, prompt): 111 | """Calls the OpenAI API with the specified parameters.""" 112 | response = openai.Completion.create(engine=self.get_engine(), 113 | prompt=self.craft_query(prompt), 114 | max_tokens=self.get_max_tokens(), 115 | temperature=self.get_temperature(), 116 | top_p=1, 117 | n=1, 118 | stream=False, 119 | stop=self.stop) 120 | return response 121 | 122 | def get_top_reply(self, prompt): 123 | """Obtains the best result as returned by the API.""" 124 | response = self.submit_request(prompt) 125 | return response['choices'][0]['text'] 126 | 127 | def format_example(self, ex): 128 | """Formats the input, output pair.""" 129 | return self.input_prefix + ex.get_input( 130 | ) + self.input_suffix + self.output_prefix + ex.get_output( 131 | ) + self.output_suffix 132 | --------------------------------------------------------------------------------