├── .gitignore ├── src ├── internal.rs ├── internal │ ├── utils.rs │ ├── auto_impl.rs │ └── auto_test.rs └── lib.rs ├── tests ├── tests.rs ├── auto_test_fn.rs └── auto_impl_fn.rs ├── .github ├── workflows │ ├── check.yml │ └── auto-approve.yml └── dependabot.yml ├── Cargo.toml ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock 3 | -------------------------------------------------------------------------------- /src/internal.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | pub mod auto_impl; 5 | pub mod auto_test; 6 | mod utils; 7 | -------------------------------------------------------------------------------- /tests/tests.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | #[test] 5 | fn tests() { 6 | let t = trybuild::TestCases::new(); 7 | t.pass("tests/auto_test_fn.rs"); 8 | t.pass("tests/auto_impl_fn.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /.github/workflows/check.yml: -------------------------------------------------------------------------------- 1 | name: Check gpt-macro 2 | on: [pull_request] 3 | 4 | env: 5 | RUSTFLAGS: -D warnings 6 | 7 | jobs: 8 | Check: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: dtolnay/rust-toolchain@stable 13 | - run: rustup component add rust-src clippy rustfmt 14 | - run: cargo clippy --all-features 15 | - run: cargo fmt --all -- --check 16 | -------------------------------------------------------------------------------- /tests/auto_test_fn.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | use gpt_macro::auto_test; 5 | 6 | #[auto_test(test_valid, test_div_by_zero)] 7 | fn div_u32(a: u32, b: u32) -> u32 { 8 | if b == 0 { 9 | panic!("attempt to divide by zero"); 10 | } 11 | a / b 12 | } 13 | 14 | #[auto_test] 15 | fn collaz(n: u32) -> u32 { 16 | if n % 2 == 0 { 17 | n / 2 18 | } else { 19 | 3 * n + 1 20 | } 21 | } 22 | 23 | fn main() {} 24 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "cargo" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gpt-macro" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | proc-macro = true 8 | 9 | [[test]] 10 | name = "tests" 11 | path = "tests/tests.rs" 12 | 13 | [features] 14 | default = [] 15 | 16 | [dev-dependencies] 17 | trybuild = { version = "1.0", features = ["diff"] } 18 | 19 | [dependencies] 20 | syn = { version = "2.0", features = ["full", "extra-traits", "parsing"] } 21 | proc-macro2 = { version = "1.0", features = ["nightly"] } 22 | quote = "1.0" 23 | async-openai = "0.30.0" 24 | tokio = { version = "1.0", features = ["rt-multi-thread"] } 25 | -------------------------------------------------------------------------------- /tests/auto_impl_fn.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | use gpt_macro::auto_impl; 5 | 6 | auto_impl! { 7 | "Return fizz if the number is divisible by 3, buzz if the number is divisible by 5, and fizzbuzz if the number is divisible by both 3 and 5." 8 | fn fizzbuzz(n: u32) -> String { 9 | } 10 | 11 | #[test] 12 | fn test_fizzbuzz() { 13 | assert_eq!(fizzbuzz(3), "fizz"); 14 | assert_eq!(fizzbuzz(5), "buzz"); 15 | assert_eq!(fizzbuzz(15), "fizzbuzz"); 16 | assert_eq!(fizzbuzz(1), "1"); 17 | } 18 | } 19 | 20 | fn main() {} 21 | -------------------------------------------------------------------------------- /.github/workflows/auto-approve.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot auto-approve 2 | on: pull_request 3 | 4 | permissions: 5 | pull-requests: write 6 | 7 | jobs: 8 | dependabot: 9 | runs-on: ubuntu-latest 10 | if: github.actor == 'dependabot[bot]' 11 | steps: 12 | - name: Dependabot metadata 13 | id: metadata 14 | uses: dependabot/fetch-metadata@v1 15 | with: 16 | github-token: "${{ secrets.GITHUB_TOKEN }}" 17 | - name: Approve a PR 18 | run: gh pr review --approve "$PR_URL" 19 | env: 20 | PR_URL: ${{github.event.pull_request.html_url}} 21 | GH_TOKEN: ${{secrets.GITHUB_TOKEN}} 22 | -------------------------------------------------------------------------------- /src/internal/utils.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | use async_openai::types::CreateChatCompletionResponse; 5 | 6 | pub fn extract_code( 7 | response: &CreateChatCompletionResponse, 8 | ) -> Result> { 9 | let content = response.choices[0] 10 | .message 11 | .content 12 | .clone() 13 | .expect("No content found."); 14 | 15 | // Remove the code block and remaining explanation text. 16 | // Extract the test case in the code block. Other parts are removed. 17 | let code_block = content 18 | .split("```rust") 19 | .nth(1) 20 | .ok_or(format!("No code block start found: {content}"))? 21 | .split("```") 22 | .next() 23 | .ok_or(format!("No code block end found: {content}"))? 24 | .trim() 25 | .to_string(); 26 | 27 | Ok(code_block) 28 | } 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Akira Moroo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | use internal::auto_impl::auto_impl_impl; 5 | use internal::auto_test::auto_test_impl; 6 | use proc_macro::TokenStream; 7 | 8 | mod internal; 9 | 10 | /// Attribute macro for automatically generating tests for functions. 11 | /// 12 | /// # Example 13 | /// 14 | /// ``` 15 | /// use r#gpt_macro::auto_test; 16 | /// 17 | /// #[auto_test(test_valid, test_div_by_zero)] 18 | /// fn div_u32(a: u32, b: u32) -> u32 { 19 | /// a / b 20 | /// } 21 | /// 22 | /// #[auto_test] 23 | /// fn collaz(n: u32) -> u32 { 24 | /// if n % 2 == 0 { 25 | /// n / 2 26 | /// } else { 27 | /// 3 * n + 1 28 | /// } 29 | /// } 30 | /// ``` 31 | #[proc_macro_attribute] 32 | pub fn auto_test(args: TokenStream, input: TokenStream) -> TokenStream { 33 | auto_test_impl(args, input) 34 | } 35 | 36 | /// Macro for automatically generating implementations for functions. 37 | /// 38 | /// # Example 39 | /// 40 | /// ``` 41 | /// use r#gpt_macro::auto_impl; 42 | /// 43 | /// auto_impl! { 44 | /// "Return fizz if the number is divisible by 3, buzz if the number is divisible by 5, and fizzbuzz if the number is divisible by both 3 and 5." 45 | /// fn fizzbuzz(n: u32) -> String { 46 | /// } 47 | /// 48 | /// #[test] 49 | /// fn test_fizzbuzz() { 50 | /// assert_eq!(fizzbuzz(3), "fizz"); 51 | /// assert_eq!(fizzbuzz(5), "buzz"); 52 | /// assert_eq!(fizzbuzz(15), "fizzbuzz"); 53 | /// assert_eq!(fizzbuzz(1), "1"); 54 | /// } 55 | /// } 56 | /// ``` 57 | #[proc_macro] 58 | pub fn auto_impl(input: TokenStream) -> TokenStream { 59 | auto_impl_impl(input) 60 | } 61 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # gpt-macro 2 | 3 | ChatGPT powered Rust proc macro that generates code at compile-time. 4 | 5 | ## Implemented Macros 6 | 7 | * `auto_impl!{}` 8 | * `#[auto_test(...)]` 9 | 10 | ## Usage 11 | 12 | Get ChatGPT API key and set it to your environment variable `OPENAI_API_KEY` before run. 13 | 14 | ### `auto_impl!{}` 15 | 16 | Syntax: 17 | 18 | ```rust 19 | auto_impl! { 20 | $STR_LIT 21 | $TOKEN_STREAM 22 | } 23 | ``` 24 | 25 | where `$STR_LIT` is a prompt string literal, and `$TOKEN_STREAM` is target code. 26 | 27 | Example: 28 | 29 | ```rust 30 | use gpt_macro::auto_impl; 31 | 32 | auto_impl! { 33 | "Return fizz if the number is divisible by 3, buzz if the number is divisible by 5, and fizzbuzz if the number is divisible by both 3 and 5." 34 | fn fizzbuzz(n: u32) -> String { 35 | } 36 | 37 | #[test] 38 | fn test_fizzbuzz() { 39 | assert_eq!(fizzbuzz(3), "fizz"); 40 | assert_eq!(fizzbuzz(5), "buzz"); 41 | assert_eq!(fizzbuzz(15), "fizzbuzz"); 42 | assert_eq!(fizzbuzz(1), "1"); 43 | } 44 | } 45 | ``` 46 | 47 | As you can see, the `fizzbuzz()` implementation is incomplete, so the build fails without `auto_impl!{}`. The macro parses given prompt and target code, and asks ChatGPT to fill the code when expanding the macro. It replaces the target with code extracted from ChatGPT response. Then Rust compiler continues compiling the code. 48 | 49 | Response Example: 50 | 51 | ```rust 52 | fn fizzbuzz(n: u32) -> String { 53 | if n % 3 == 0 && n % 5 == 0 { 54 | return String::from("fizzbuzz"); 55 | } else if n % 3 == 0 { 56 | return String::from("fizz"); 57 | } else if n % 5 == 0 { 58 | return String::from("buzz"); 59 | } else { 60 | return n.to_string(); 61 | } 62 | } 63 | 64 | #[test] 65 | fn test_fizzbuzz() { 66 | assert_eq!(fizzbuzz(3), "fizz"); 67 | assert_eq!(fizzbuzz(5), "buzz"); 68 | assert_eq!(fizzbuzz(15), "fizzbuzz"); 69 | assert_eq!(fizzbuzz(1), "1"); 70 | } 71 | ``` 72 | 73 | ### `#[auto_test]` 74 | 75 | See this example: 76 | 77 | ```rust 78 | use gpt_macro::auto_test; 79 | 80 | #[auto_test(test_valid, test_div_by_zero)] 81 | fn div_u32(a: u32, b: u32) -> u32 { 82 | if b == 0 { 83 | panic!("attempt to divide by zero"); 84 | } 85 | a / b 86 | } 87 | ``` 88 | 89 | ## License 90 | 91 | gpt-macro is released under the MIT license. 92 | -------------------------------------------------------------------------------- /src/internal/auto_impl.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | use async_openai::{ 5 | types::{ 6 | ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs, 7 | CreateChatCompletionRequestArgs, 8 | }, 9 | Client, 10 | }; 11 | use proc_macro::TokenStream; 12 | use quote::quote; 13 | use syn::{ 14 | parse::{Parse, ParseStream}, 15 | parse_macro_input, parse_str, LitStr, 16 | }; 17 | use tokio::runtime::Runtime; 18 | 19 | use super::utils; 20 | 21 | /// Parses the following syntax: 22 | /// 23 | /// auto_impl! { 24 | /// $STR_LIT 25 | /// $TOKEN_STREAM 26 | /// } 27 | struct AutoImpl { 28 | doc: String, 29 | token_stream: proc_macro2::TokenStream, 30 | } 31 | 32 | impl Parse for AutoImpl { 33 | fn parse(input: ParseStream) -> syn::Result { 34 | let doc = input.parse::()?.value(); 35 | let token_stream = input.parse::()?; 36 | Ok(AutoImpl { doc, token_stream }) 37 | } 38 | } 39 | 40 | impl AutoImpl { 41 | async fn completion(&mut self) -> Result> { 42 | let request = CreateChatCompletionRequestArgs::default() 43 | .model("gpt-3.5-turbo") 44 | .messages([ 45 | ChatCompletionRequestSystemMessageArgs::default() 46 | .content("You are a Rust expert who can implement the given function.") 47 | .build()?.into(), 48 | ChatCompletionRequestUserMessageArgs::default() 49 | .content(format!( 50 | "Read this incomplete Rust code:\n```rust\n{}\n```", 51 | self.token_stream 52 | )) 53 | .build()?.into(), 54 | ChatCompletionRequestUserMessageArgs::default() 55 | .content(format!( 56 | "Complete the Rust code that follows this instruction: '{}'. Your response must start with code block '```rust'.", 57 | self.doc 58 | )) 59 | .build()?.into(), 60 | ]) 61 | .build()?; 62 | 63 | let client = Client::new(); 64 | let response = client.chat().create(request).await?; 65 | 66 | self.parse_str(&utils::extract_code(&response)?) 67 | } 68 | 69 | fn parse_str(&self, s: &str) -> Result> { 70 | let expanded = if let Ok(code) = parse_str::(s) { 71 | quote! { 72 | #code 73 | } 74 | } else { 75 | return Err(format!("Failed to parse the response as Rust code:\n{s}\n").into()); 76 | }; 77 | 78 | Ok(TokenStream::from(expanded)) 79 | } 80 | } 81 | 82 | pub fn auto_impl_impl(input: TokenStream) -> TokenStream { 83 | let mut auto_impl = parse_macro_input!(input as AutoImpl); 84 | 85 | let rt = Runtime::new().expect("Failed to create a runtime."); 86 | rt.block_on(auto_impl.completion()) 87 | .unwrap_or_else(|e| panic!("{}", e)) 88 | } 89 | -------------------------------------------------------------------------------- /src/internal/auto_test.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | // Akira Moroo 2023 3 | 4 | use async_openai::{ 5 | types::{ 6 | ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs, 7 | CreateChatCompletionRequestArgs, 8 | }, 9 | Client, 10 | }; 11 | use proc_macro::TokenStream; 12 | use quote::{quote, ToTokens}; 13 | use std::collections::HashSet; 14 | use syn::{ 15 | parse::{Parse, ParseStream}, 16 | parse_macro_input, parse_str, Ident, Token, 17 | }; 18 | use tokio::runtime::Runtime; 19 | 20 | use super::utils; 21 | 22 | /// Parses a list of test function names separated by commas. 23 | /// 24 | /// test_valid, test_div_by_zero 25 | /// 26 | /// The function name is used to generate the test function name. 27 | struct Args { 28 | test_names: HashSet, 29 | } 30 | 31 | impl Parse for Args { 32 | fn parse(input: ParseStream) -> syn::parse::Result { 33 | let test_names = input.parse_terminated(Ident::parse, Token![,])?; 34 | Ok(Args { 35 | test_names: test_names.into_iter().collect(), 36 | }) 37 | } 38 | } 39 | 40 | struct AutoTest { 41 | token_stream: proc_macro2::TokenStream, 42 | } 43 | 44 | impl AutoTest { 45 | fn new(token_stream: proc_macro2::TokenStream) -> Self { 46 | Self { token_stream } 47 | } 48 | 49 | async fn completion(&mut self, args: Args) -> Result> { 50 | let mut output = self.token_stream.clone(); 51 | 52 | let mut messages = 53 | vec![ 54 | ChatCompletionRequestSystemMessageArgs::default() 55 | .content( 56 | "You are a Rust expert who can generate perfect tests for the given function.", 57 | ) 58 | .build()?.into(), 59 | ChatCompletionRequestUserMessageArgs::default() 60 | .content(format!( 61 | "Read this Rust function:\n```rust\n{}\n```", 62 | self.token_stream 63 | )) 64 | .build()?.into(), 65 | ]; 66 | 67 | if args.test_names.is_empty() { 68 | messages.push( 69 | ChatCompletionRequestUserMessageArgs::default() 70 | .content( 71 | "Write a test case for the function as much as possible in Markdown code snippet style. Your response must start with code block '```rust'.", 72 | ) 73 | .build()?.into(), 74 | ); 75 | } else { 76 | for test_name in args.test_names { 77 | messages.push( 78 | ChatCompletionRequestUserMessageArgs::default() 79 | .content( 80 | format!( 81 | "Write a test case `{test_name}` for the function in Markdown code snippet style. Your response must start with code block '```rust'." 82 | ) 83 | ) 84 | .build()?.into(), 85 | ); 86 | } 87 | } 88 | 89 | let request = CreateChatCompletionRequestArgs::default() 90 | .model("gpt-3.5-turbo") 91 | .messages(messages) 92 | .build()?; 93 | 94 | let client = Client::new(); 95 | let response = client.chat().create(request).await?; 96 | 97 | let test_case = self.parse_str(&utils::extract_code(&response)?)?; 98 | test_case.to_tokens(&mut output); 99 | 100 | Ok(TokenStream::from(output)) 101 | } 102 | 103 | fn parse_str(&self, s: &str) -> Result> { 104 | let expanded = if let Ok(test_case) = parse_str::(s) { 105 | quote! { 106 | #test_case 107 | } 108 | } else { 109 | return Err(format!("Failed to parse the response as Rust code:\n{s}\n").into()); 110 | }; 111 | 112 | Ok(expanded) 113 | } 114 | } 115 | 116 | pub fn auto_test_impl(args: TokenStream, input: TokenStream) -> TokenStream { 117 | // Parse the list of test function names that should be generated. 118 | let args = parse_macro_input!(args as Args); 119 | 120 | let mut auto_test = AutoTest::new(input.into()); 121 | 122 | let rt = Runtime::new().expect("Failed to create a runtime."); 123 | rt.block_on(auto_test.completion(args)) 124 | .unwrap_or_else(|e| panic!("{}", e)) 125 | } 126 | --------------------------------------------------------------------------------