├── .github └── workflows │ ├── .gitignore │ ├── github_actions │ ├── actions │ │ ├── github_create_pull_request_action.rb │ │ ├── github_create_branch_action.rb │ │ ├── github_get_diff_action.rb │ │ ├── get_context_action.rb │ │ └── github_add_or_modify_file_action.rb │ ├── generators │ │ ├── doc_update_necessity_generator.rb │ │ ├── doc_update_generator.rb │ │ └── doc_update_suggestion_generator.rb │ └── providers │ │ └── gemini_15_pro.rb │ ├── ruby.yml │ └── update_docs.yml ├── lib ├── sublayer │ ├── cli │ │ ├── templates │ │ │ ├── cli │ │ │ │ ├── log │ │ │ │ │ └── .keep │ │ │ │ ├── spec │ │ │ │ │ └── .keep │ │ │ │ ├── lib │ │ │ │ │ ├── %project_name% │ │ │ │ │ │ ├── config │ │ │ │ │ │ │ └── .keep │ │ │ │ │ │ ├── version.rb.tt │ │ │ │ │ │ ├── actions │ │ │ │ │ │ │ └── example_action.rb.tt │ │ │ │ │ │ ├── commands │ │ │ │ │ │ │ ├── example_command.rb.tt │ │ │ │ │ │ │ └── base_command.rb.tt │ │ │ │ │ │ ├── cli.rb.tt │ │ │ │ │ │ ├── agents │ │ │ │ │ │ │ └── example_agent.rb.tt │ │ │ │ │ │ ├── generators │ │ │ │ │ │ │ └── example_generator.rb.tt │ │ │ │ │ │ └── config.rb.tt │ │ │ │ │ └── %project_name%.rb.tt │ │ │ │ ├── Gemfile │ │ │ │ ├── bin │ │ │ │ │ └── %project_name%.tt │ │ │ │ ├── .gitignore.tt │ │ │ │ ├── README.md.tt │ │ │ │ └── %project_name%.gemspec.tt │ │ │ ├── github_action │ │ │ │ ├── %project_name% │ │ │ │ │ ├── agents │ │ │ │ │ │ └── .keep │ │ │ │ │ ├── actions │ │ │ │ │ │ └── .keep │ │ │ │ │ ├── generators │ │ │ │ │ │ └── .keep │ │ │ │ │ └── %project_name%.rb.tt │ │ │ │ └── %project_name%.yml.tt │ │ │ ├── quick_script │ │ │ │ ├── actions │ │ │ │ │ └── example_action.rb │ │ │ │ ├── %project_name%.rb │ │ │ │ ├── README.md.tt │ │ │ │ ├── agents │ │ │ │ │ └── example_agent.rb │ │ │ │ └── generators │ │ │ │ │ └── example_generator.rb │ │ │ └── utilities │ │ │ │ └── cli │ │ │ │ └── command.rb.tt │ │ └── commands │ │ │ ├── generators │ │ │ ├── example_action_file_manipulation.rb │ │ │ ├── example_action_api_call.rb │ │ │ ├── example_generator.rb │ │ │ ├── example_agent.rb │ │ │ ├── sublayer_command_generator.rb │ │ │ ├── sublayer_action_generator.rb │ │ │ └── sublayer_agent_generator.rb │ │ │ ├── subcommand_base.rb │ │ │ ├── new_project.rb │ │ │ ├── github_action_project.rb │ │ │ ├── action.rb │ │ │ ├── cli_project.rb │ │ │ ├── quick_script_project.rb │ │ │ └── agent.rb │ ├── actions │ │ └── base.rb │ ├── tasks │ │ └── base.rb │ ├── version.rb │ ├── logging │ │ ├── null_logger.rb │ │ ├── base.rb │ │ ├── debug_logger.rb │ │ └── json_logger.rb │ ├── triggers │ │ ├── base.rb │ │ └── file_change.rb │ ├── components │ │ ├── output_adapters │ │ │ ├── single_string.rb │ │ │ ├── single_integer.rb │ │ │ ├── list_of_strings.rb │ │ │ ├── string_selection_from_list.rb │ │ │ ├── named_strings.rb │ │ │ ├── list_of_named_strings.rb │ │ │ └── formattable.rb │ │ └── output_adapters.rb │ ├── generators │ │ └── base.rb │ ├── agents │ │ └── base.rb │ ├── providers │ │ ├── gemini.rb │ │ ├── open_ai.rb │ │ └── claude.rb │ └── cli.rb └── sublayer.rb ├── .env.example ├── bin ├── sublayer └── setup ├── Gemfile ├── Rakefile ├── .gitignore ├── .contextignore ├── spec ├── logging │ ├── null_logger_spec.rb │ ├── debug_logger_spec.rb │ └── json_logger_spec.rb ├── generators │ ├── examples │ │ ├── four_digit_passcode_generator.rb │ │ ├── invalid_to_valid_json_generator.rb │ │ ├── description_from_code_generator.rb │ │ ├── sentiment_from_text_generator.rb │ │ ├── blog_post_keyword_suggestions_generator.rb │ │ ├── code_from_description_generator.rb │ │ ├── route_selection_from_user_intent_generator.rb │ │ ├── code_from_blueprint_generator.rb │ │ ├── task_steps_generator.rb │ │ ├── imaginary_movie_review_generator.rb │ │ └── product_description_generator.rb │ ├── four_digit_passcode_generator_spec.rb │ ├── blog_post_keyword_suggestions_generator_spec.rb │ ├── invalid_to_valid_json_generator_spec.rb │ ├── sentiment_from_text_generator_spec.rb │ ├── route_selection_from_user_intent_generator_spec.rb │ ├── product_description_generator_spec.rb │ ├── task_steps_generator_spec.rb │ ├── code_from_description_generator_spec.rb │ └── imaginary_movie_review_generator_spec.rb ├── triggers │ ├── base_spec.rb │ └── file_change_spec.rb ├── agents │ ├── examples │ │ └── rspec_agent.rb │ └── base_spec.rb ├── spec_helper.rb ├── components │ └── output_adapters │ │ ├── single_string_spec.rb │ │ ├── single_integer_spec.rb │ │ ├── list_of_strings_spec.rb │ │ ├── named_strings_spec.rb │ │ └── list_of_named_strings_spec.rb ├── providers │ ├── gemini_spec.rb │ └── claude_spec.rb ├── integration │ ├── quick_script_creation_spec.rb │ ├── cli_project_creation_spec.rb │ └── github_action_creation_spec.rb ├── cli │ └── generators │ │ ├── action_generator_spec.rb │ │ ├── generator_generator_spec.rb │ │ └── command_generator_spec.rb └── vcr_cassettes │ ├── claude │ ├── no_function.yml │ ├── generators │ │ ├── invalid_to_valid_json_generator │ │ │ ├── valid_json.yml │ │ │ └── invalid_json.yml │ │ ├── four_digit_passcode_generator │ │ │ └── find_number.yml │ │ ├── sentiment_from_text_generator │ │ │ └── positive.yml │ │ ├── route_selection_from_user_intent_generator │ │ │ └── route.yml │ │ ├── code_from_description_generator │ │ │ └── hello_world.yml │ │ └── blog_post_keyword_suggestions_generator │ │ │ └── ai_in_healthcare.yml │ ├── max_tokens.yml │ └── 42.yml │ ├── gemini │ ├── 42.yml │ └── generators │ │ ├── four_digit_passcode_generator │ │ └── find_number.yml │ │ ├── sentiment_from_text_generator │ │ └── positive.yml │ │ ├── route_selection_from_user_intent_generator │ │ └── route.yml │ │ └── code_from_description_generator │ │ └── hello_world.yml │ └── openai │ └── no_function.yml ├── LICENSE ├── sublayer.gemspec ├── Gemfile.lock └── CLAUDE.md /.github/workflows/.gitignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/log/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/spec/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/config/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/github_action/%project_name%/agents/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/github_action/%project_name%/actions/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/github_action/%project_name%/generators/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | GEMINI_API_KEY=example 2 | ANTHROPIC_API_KEY=example 3 | OPENAI_API_KEY=example 4 | -------------------------------------------------------------------------------- /lib/sublayer/actions/base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Actions 3 | class Base 4 | end 5 | end 6 | end 7 | -------------------------------------------------------------------------------- /lib/sublayer/tasks/base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Tasks 3 | class Base 4 | end 5 | end 6 | end 7 | -------------------------------------------------------------------------------- /lib/sublayer/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sublayer 4 | VERSION = "0.2.9" 5 | end 6 | -------------------------------------------------------------------------------- /bin/sublayer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require_relative "../lib/sublayer/cli" 4 | 5 | Sublayer::CLI.start(ARGV) 6 | -------------------------------------------------------------------------------- /bin/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | IFS=$'\n\t' 4 | set -vx 5 | 6 | bundle install 7 | cp .env.example .env 8 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source "https://rubygems.org" 4 | 5 | # Specify your gem's dependencies in sublayer_ruby.gemspec 6 | gemspec 7 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/Gemfile: -------------------------------------------------------------------------------- 1 | source "https://rubygems.org" 2 | 3 | gemspec 4 | 5 | group :development, :test do 6 | gem "rspec", "~> 3.10" 7 | end 8 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/version.rb.tt: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= project_name.camelize %> 4 | VERSION = "0.0.1" 5 | end 6 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/gem_tasks" 4 | require "rspec/core/rake_task" 5 | 6 | RSpec::Core::RakeTask.new(:spec) 7 | 8 | task default: :spec 9 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/bin/%project_name%.tt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require_relative "../lib/<%= project_name %>" 4 | 5 | <%= project_name.camelize %>::CLI.start(ARGV) 6 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/.gitignore.tt: -------------------------------------------------------------------------------- 1 | /.bundle/ 2 | /.yardoc 3 | /_yardoc/ 4 | /coverage/ 5 | /doc/ 6 | /pkg/ 7 | /spec/reports/ 8 | /tmp/ 9 | /log/ 10 | 11 | .rspec_status 12 | 13 | .DS_Store 14 | .DS_Store? 15 | -------------------------------------------------------------------------------- /lib/sublayer/logging/null_logger.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Logging 3 | class NullLogger < Base 4 | def log(level, message, data = {}) 5 | # do nothing 6 | end 7 | end 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.bundle/ 2 | /.yardoc 3 | /_yardoc/ 4 | /coverage/ 5 | /doc/ 6 | /pkg/ 7 | /spec/reports/ 8 | /tmp/ 9 | 10 | /.sublayer/ 11 | 12 | .rspec_status 13 | .idea/ 14 | 15 | *.log 16 | *.gem 17 | 18 | /TAGS 19 | 20 | *.DS_Store 21 | .env 22 | -------------------------------------------------------------------------------- /lib/sublayer/logging/base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Logging 3 | class Base 4 | def log(level, message, data = {}) 5 | raise NotImplementedError, "Subclasses must implement log method" 6 | end 7 | end 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /.contextignore: -------------------------------------------------------------------------------- 1 | .git/**/* 2 | .github/**/* 3 | .ruby-lsp/**/* 4 | .contextignore 5 | .cursorignore 6 | .gitignore 7 | .rspec_status 8 | 9 | bin/**/* 10 | Gemfile.lock 11 | LICENSE 12 | Rakefile 13 | sublayer.gemspec 14 | spec/vcr_cassettes 15 | spec/spec_helper.rb 16 | -------------------------------------------------------------------------------- /lib/sublayer/logging/debug_logger.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Logging 3 | class DebugLogger < Base 4 | def log(level, message, data = {}) 5 | puts "[#{Time.now.iso8601}] #{level.upcase}: #{message}" 6 | pp data unless data.empty? 7 | end 8 | end 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/quick_script/actions/example_action.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class ExampleAction < Sublayer::Actions::Base 4 | def initialize(input:) 5 | @input = input 6 | end 7 | 8 | def call 9 | puts "Performing action with input: #{@input}" 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/sublayer/triggers/base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Triggers 3 | class Base 4 | def setup(agent) 5 | raise NotImplementedError, "Subclasses must implement setup method" 6 | end 7 | 8 | def activate(agent) 9 | agent.send(:take_step) 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/quick_script/%project_name%.rb: -------------------------------------------------------------------------------- 1 | require "yaml" 2 | require "sublayer" 3 | 4 | # Load any Actions, Generators, and Agents 5 | Dir[File.join(__dir__, "actions", "*.rb")].each { |file| require file } 6 | Dir[File.join(__dir__, "generators", "*.rb")].each { |file| require file } 7 | Dir[File.join(__dir__, "agents", "*.rb")].each { |file| require file } 8 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/utilities/cli/command.rb.tt: -------------------------------------------------------------------------------- 1 | module <%= @project_name.camelize %> 2 | module Commands 3 | class <%= @command_class_name %> < BaseCommand 4 | def self.description 5 | "<%= @command_description %>" 6 | end 7 | 8 | def execute(*args) 9 | <%= @command_execute_body %> 10 | end 11 | end 12 | end 13 | end -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/example_action_file_manipulation.rb: -------------------------------------------------------------------------------- 1 | class WriteFileAction < Sublayer::Actions::Base 2 | def initialize(file_contents:, file_path:) 3 | @file_contents = file_contents 4 | @file_path = file_path 5 | end 6 | 7 | def call 8 | File.open(@file_path, 'wb') do |file| 9 | file.write(@file_contents) 10 | end 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /spec/logging/null_logger_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Logging::NullLogger do 4 | let(:message) { "Test message" } 5 | let(:data) { { key: "value" } } 6 | let(:logger) { described_class.new } 7 | 8 | it "does not produce any output" do 9 | expect { 10 | logger.log(:info, message, data) 11 | }.not_to output.to_stdout 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/README.md.tt: -------------------------------------------------------------------------------- 1 | # <%= project_name %> 2 | 3 | Welcome to your new Sublayer CLI project! 4 | 5 | ## Installation 6 | 7 | Execute: 8 | 9 | $ bundle install 10 | 11 | ## Usage 12 | 13 | To run your CLI application: 14 | 15 | ``` 16 | $ bin/<%= project_name %> 17 | ``` 18 | 19 | Available commands: 20 | - `example`: Run the example generator 21 | - `help`: Display the help message 22 | 23 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/actions/example_action.rb.tt: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= project_name.camelize %> 4 | module Actions 5 | class ExampleAction < Sublayer::Actions::Base 6 | def initialize(input:) 7 | @input = input 8 | end 9 | 10 | def call 11 | puts "Performing action with input: #{@input}" 12 | end 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/quick_script/README.md.tt: -------------------------------------------------------------------------------- 1 | # <%= project_name.camelize %> 2 | 3 | Welcome to your new Sublayer quick script project! 4 | 5 | There are example Agents, Generators, and Actions in the respective folders. 6 | 7 | ## Usage 8 | 9 | Create your own custom agents, generators, and actions and use them in 10 | `<%= project_name %>.rb` 11 | 12 | Run your script: 13 | 14 | ``` 15 | $ ruby <%= project_name %>.rb 16 | ``` 17 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/quick_script/agents/example_agent.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class ExampleAgent < Sublayer::Agents::Base 4 | trigger_on_files_changed { ["example_file.txt"] } 5 | 6 | goal_condition { @goal_reached } 7 | 8 | check_status do 9 | @status_checked = true 10 | end 11 | 12 | step do 13 | @step_taken = true 14 | @goal_reached = true 15 | puts "Example agent step executed" 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/subcommand_base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Commands 3 | class SubCommandBase < Thor 4 | def self.banner(command, namespace = nil, subcommand = false) 5 | "#{basename} #{subcommand_prefix} #{command.usage}" 6 | end 7 | 8 | def self.subcommand_prefix 9 | self.name.gsub(%r{.*::}, '').gsub(%r{^[A-Z]}) { |match| match[0].downcase }.gsub(%r{[A-Z]}) { |match| "-#{match[0].downcase}" } 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/actions/github_create_pull_request_action.rb: -------------------------------------------------------------------------------- 1 | class GithubCreatePullRequestAction < Sublayer::Actions::Base 2 | def initialize(repo:, base:, head:, title:, body:) 3 | @client = Octokit::Client.new(access_token: ENV["ACCESS_TOKEN"]) 4 | @repo = repo 5 | @base = base 6 | @head = head 7 | @title = title 8 | @body = body 9 | end 10 | 11 | def call 12 | @client.create_pull_request(@repo, @base, @head, @title, @body) 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/commands/example_command.rb.tt: -------------------------------------------------------------------------------- 1 | module <%= project_name.camelize %> 2 | module Commands 3 | class ExampleCommand < BaseCommand 4 | def self.description 5 | "An example command that generates a story based on the command line arguments." 6 | end 7 | 8 | def execute(*args) 9 | puts <%= project_name.camelize %>::Generators::ExampleGenerator.new(input: args.join(" ")).generate 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/actions/github_create_branch_action.rb: -------------------------------------------------------------------------------- 1 | class GithubCreateBranchAction < Sublayer::Actions::Base 2 | def initialize(repo:, base_branch:, new_branch:) 3 | @client = Octokit::Client.new(access_token: ENV['ACCESS_TOKEN']) 4 | @repo = repo 5 | @base_branch = base_branch 6 | @new_branch = new_branch 7 | end 8 | 9 | def call 10 | ref = @client.ref(@repo, "heads/#{@base_branch}") 11 | @client.create_ref(@repo, "refs/heads/#{@new_branch}", ref.object.sha) 12 | end 13 | end -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/single_string.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | class SingleString 5 | attr_reader :name, :description 6 | 7 | def initialize(options) 8 | @name = options[:name] 9 | @description = options[:description] 10 | end 11 | 12 | def properties 13 | [OpenStruct.new(name: @name, type: 'string', description: @description, required: true)] 14 | end 15 | end 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/quick_script/generators/example_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class ExampleGenerator < Sublayer::Generators::Base 4 | llm_output_adapter type: :single_string, 5 | name: "generated_text", 6 | description: "A simple generated text" 7 | 8 | def initialize(input:) 9 | @input = input 10 | end 11 | 12 | def generate 13 | super 14 | end 15 | 16 | def prompt 17 | <<-PROMPT 18 | Generate a simple story based on this input: #{@input} 19 | PROMPT 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/actions/github_get_diff_action.rb: -------------------------------------------------------------------------------- 1 | class GithubGetDiffAction < Sublayer::Actions::Base 2 | def initialize(repo:, pr_number:) 3 | @client = Octokit::Client.new(access_token: ENV['ACCESS_TOKEN']) 4 | @repo = repo 5 | @pr_number = pr_number 6 | end 7 | 8 | def call 9 | pr_files = @client.pull_request_files(@repo, @pr_number) 10 | pr_files.map do |file| 11 | { 12 | filename: file.filename, 13 | status: file.status, 14 | patch: file.patch 15 | } 16 | end 17 | end 18 | end -------------------------------------------------------------------------------- /lib/sublayer/logging/json_logger.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Logging 3 | class JsonLogger < Base 4 | def initialize(log_file = "./tmp/sublayer.log") 5 | @log_file = log_file 6 | end 7 | 8 | def log(level, message, data = {}) 9 | File.open(@log_file, "a") do |f| 10 | f.puts JSON.generate({ 11 | timestamp: Time.now.iso8601, 12 | level: level, 13 | message: message, 14 | data: data 15 | }) 16 | end 17 | end 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/cli.rb.tt: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= project_name.camelize %> 4 | class CLI < Thor 5 | <%= project_name.camelize %>::Commands.constants.reject{ |command_class| command_class == :BaseCommand }.each do |command_class| 6 | command = <%= project_name.camelize %>::Commands.const_get(command_class) 7 | desc command.command_name, command.description 8 | define_method(command.command_name) do |*args| 9 | command.new(options).execute(*args) 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /spec/generators/examples/four_digit_passcode_generator.rb: -------------------------------------------------------------------------------- 1 | class FourDigitPasscodeGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :single_integer, 3 | name: "four_digit_passcode", 4 | description: "an uncommon and difficult to guess four digit passcode" 5 | 6 | def initialize 7 | end 8 | 9 | def generate 10 | super 11 | end 12 | 13 | def prompt 14 | <<-PROMPT 15 | You are an expert of common four digit passcodes 16 | 17 | Provide a four digit passcode that is uncommon and hard to guess 18 | PROMPT 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /spec/triggers/base_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Triggers::Base do 4 | let(:test_trigger) { described_class.new } 5 | let(:test_agent) { double("Agent") } 6 | 7 | describe "#setup" do 8 | it "raises NotImplementedError" do 9 | expect { test_trigger.setup(test_agent) }.to raise_error(NotImplementedError) 10 | end 11 | end 12 | 13 | describe "#activate" do 14 | it "calls take_step on the agent" do 15 | expect(test_agent).to receive(:take_step) 16 | test_trigger.activate(test_agent) 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/agents/example_agent.rb.tt: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= project_name.camelize %> 4 | module Agents 5 | class ExampleAgent < Sublayer::Agents::Base 6 | trigger_on_files_changed { ["example_file.txt"] } 7 | 8 | goal_condition { @goal_reached } 9 | 10 | check_status do 11 | @status_checked = true 12 | end 13 | 14 | step do 15 | @step_taken = true 16 | @goal_reached = true 17 | puts "Example agent step executed" 18 | end 19 | end 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/commands/base_command.rb.tt: -------------------------------------------------------------------------------- 1 | module <%= project_name.camelize %> 2 | module Commands 3 | class BaseCommand 4 | def self.command_name 5 | name.split("::").last.gsub(/Command$/, '').downcase 6 | end 7 | 8 | def self.description 9 | "Description for #{command_name}" 10 | end 11 | 12 | def initialize(options) 13 | @options = options 14 | end 15 | 16 | def execute(*args) 17 | raise NotImplementedError, "#{self.class} must implement #execute" 18 | end 19 | end 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /spec/generators/examples/invalid_to_valid_json_generator.rb: -------------------------------------------------------------------------------- 1 | class InvalidToValidJsonGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :single_string, 3 | name: "valid_json", 4 | description: "The valid JSON string" 5 | 6 | def initialize(invalid_json:) 7 | @invalid_json = invalid_json 8 | end 9 | 10 | def generate 11 | super 12 | end 13 | 14 | def prompt 15 | <<-PROMPT 16 | You are an expert in JSON parsing. 17 | 18 | The given string is not a valid JSON: #{@invalid_json} 19 | 20 | Please fix this and produce a valid JSON. 21 | PROMPT 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/github_action/%project_name%/%project_name%.rb.tt: -------------------------------------------------------------------------------- 1 | require "base64" 2 | 3 | require "sublayer" 4 | require "octokit" 5 | 6 | # Load all Sublayer Actions, Generators, and Agents 7 | Dir[File.join(__dir__, "actions", "*.rb")].each { |file| require file } 8 | Dir[File.join(__dir__, "generators", "*.rb")].each { |file| require file } 9 | Dir[File.join(__dir__, "agents", "*.rb")].each { |file| require file } 10 | 11 | Sublayer.configuration.ai_provider = Sublayer::Providers::<%= @ai_provider %> 12 | Sublayer.configuration.ai_model = "<%= @ai_model %>" 13 | 14 | # Add custom Github Action code below: 15 | 16 | -------------------------------------------------------------------------------- /lib/sublayer/triggers/file_change.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Triggers 3 | class FileChange < Base 4 | def initialize(&block) 5 | @block = block 6 | end 7 | 8 | def setup(agent) 9 | files_to_watch = agent.instance_eval(&@block) 10 | folders = files_to_watch.map { |file| File.dirname(File.expand_path(file)) }.uniq 11 | 12 | Listen.to(*folders) do |modified, added, removed| 13 | if files_to_watch.any? { |file| modified.include?(File.expand_path(file)) } 14 | activate(agent) 15 | end 16 | end.start 17 | end 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/example_action_api_call.rb: -------------------------------------------------------------------------------- 1 | class TextToSpeechAction < Sublayer::Actions::Base 2 | def initialize(text) 3 | @text = text 4 | end 5 | 6 | def call 7 | speech = HTTParty.post( 8 | "https://api.openai.com/v1/audio/speech", 9 | headers: { 10 | "Authorization" => "Bearer #{ENV["OPENAI_API_KEY"]}", 11 | "Content-Type" => "application/json", 12 | }, 13 | body: { 14 | "model": "tts-1", 15 | "input": @text, 16 | "voice": "nova", 17 | "response_format": "wav" 18 | }.to_json 19 | ) 20 | 21 | speech 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/single_integer.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | class SingleInteger 5 | attr_reader :name, :description 6 | 7 | def initialize(options) 8 | @name = options[:name] 9 | @description = options[:description] 10 | end 11 | 12 | def properties 13 | [ 14 | OpenStruct.new( 15 | name: @name, 16 | type: 'integer', 17 | description: @description, 18 | required: true 19 | ) 20 | ] 21 | end 22 | end 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/generators/example_generator.rb.tt: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= project_name.camelize %> 4 | module Generators 5 | class ExampleGenerator < Sublayer::Generators::Base 6 | llm_output_adapter type: :single_string, 7 | name: "generated_text", 8 | description: "A simple generated text" 9 | 10 | def initialize(input:) 11 | @input = input 12 | end 13 | 14 | def generate 15 | super 16 | end 17 | 18 | def prompt 19 | <<-PROMPT 20 | Generate a simple story based on this input: #{@input} 21 | PROMPT 22 | end 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /spec/logging/debug_logger_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Logging::DebugLogger do 4 | let(:message) { "Test message" } 5 | let(:data) { { key: "value" } } 6 | let(:logger) { described_class.new } 7 | 8 | it "outputs messages to stdout" do 9 | expect { 10 | logger.log(:info, message, data) 11 | }.to output(/\[.*\] INFO: #{message}/).to_stdout 12 | end 13 | 14 | it "pretty prints the data" do 15 | expected = if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new("3.4") 16 | /\{key: "value"\}/ 17 | else 18 | /\{:key=>"value"\}/ 19 | end 20 | 21 | expect { 22 | logger.log(:info, message, data) 23 | }.to output(expected).to_stdout 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/list_of_strings.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | class ListOfStrings 5 | attr_reader :name, :description 6 | 7 | def initialize(options) 8 | @name = options[:name] 9 | @description = options[:description] 10 | end 11 | 12 | def properties 13 | [ 14 | OpenStruct.new( 15 | name: @name, 16 | type: 'array', 17 | description: @description, 18 | required: true, 19 | items: { 20 | type: 'string' 21 | } 22 | ) 23 | ] 24 | end 25 | end 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%/config.rb.tt: -------------------------------------------------------------------------------- 1 | module <%= project_name.camelize %> 2 | module Config 3 | def self.load 4 | config_path = File.join(File.dirname(__FILE__), "config", "sublayer.yml") 5 | 6 | if File.exist?(config_path) 7 | config = YAML.load_file(config_path) 8 | 9 | Sublayer.configure do |c| 10 | c.ai_provider = Object.const_get("Sublayer::Providers::#{config[:ai_provider]}") 11 | c.ai_model = config[:ai_model] 12 | c.logger = Sublayer::Logging::JsonLogger.new(File.join(Dir.pwd, 'log', 'sublayer.log')) 13 | end 14 | else 15 | puts "Warning: config/sublayer.yml not found. Using default configuration." 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /spec/generators/examples/description_from_code_generator.rb: -------------------------------------------------------------------------------- 1 | class DescriptionFromCodeGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :single_string, 3 | name: "code_description", 4 | description: "A description of what the code does, its purpose,functionality, and any noteworthy details" 5 | 6 | def initialize(code:) 7 | @code = code 8 | end 9 | 10 | def generate 11 | super 12 | end 13 | 14 | def prompt 15 | <<-PROMPT 16 | You are an experienced software engineer. Below is a chunk of code: 17 | 18 | #{@code} 19 | 20 | Please read the code carefully and provide a high-level description of what this code does, including its purpose, functionalities, and any noteworthy details. 21 | PROMPT 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /spec/generators/examples/sentiment_from_text_generator.rb: -------------------------------------------------------------------------------- 1 | class SentimentFromTextGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :string_selection_from_list, 3 | name: "sentiment_value", 4 | description: "A sentiment value from the list", 5 | options: -> { @sentiment_options } 6 | 7 | def initialize(text:, sentiment_options:) 8 | @text = text 9 | @sentiment_options = sentiment_options 10 | end 11 | 12 | def generate 13 | super 14 | end 15 | 16 | def prompt 17 | <<-PROMPT 18 | You are an expert at determining sentiment from text. 19 | 20 | You are tasked with analyzing the following text and determining its sentiment value. 21 | 22 | The text is: 23 | #{@text} 24 | PROMPT 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /spec/logging/json_logger_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "tempfile" 3 | 4 | RSpec.describe Sublayer::Logging::JsonLogger do 5 | let(:message) { "Test message" } 6 | let(:data) { { key: "value" } } 7 | let(:log_file) { Tempfile.new("sublayer_test_log") } 8 | let(:logger) { described_class.new(log_file.path) } 9 | 10 | after { log_file.unlink } 11 | 12 | it "logs messages in JSON format" do 13 | logger.log(:info, message, data) 14 | 15 | log_content = File.read(log_file.path) 16 | log_entry = JSON.parse(log_content) 17 | 18 | expect(log_entry["level"]).to eq("info") 19 | expect(log_entry["message"]).to eq(message) 20 | expect(log_entry["data"]).to eq(data.stringify_keys) 21 | expect(log_entry["timestamp"]).to be_a(String) 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /spec/generators/examples/blog_post_keyword_suggestions_generator.rb: -------------------------------------------------------------------------------- 1 | class BlogPostKeywordSuggestionGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :list_of_strings, 3 | name: "suggestions", 4 | description: "List of keyword suggestions" 5 | 6 | def initialize(topic:, num_keywords: 5) 7 | @topic = topic 8 | end 9 | 10 | def generate 11 | super 12 | end 13 | 14 | def prompt 15 | <<-PROMPT 16 | You are an SEO expect tasked with suggesting keywords for a blog post. 17 | 18 | The blog post topic is: #{@topic} 19 | 20 | Please suggest relevant #{@num_keywords} keywords or key phrases for this post's topic. 21 | Each keyword or phrase should be concise and directly related to the topic. 22 | 23 | Provide your suggestions as a list of strings. 24 | PROMPT 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/example_generator.rb: -------------------------------------------------------------------------------- 1 | class CodeFromDescriptionGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :single_string, 3 | name: "generated_code", 4 | description: "The generated code in the requested language" 5 | 6 | def initialize(description:, technologies:) 7 | @description = description 8 | @technologies = technologies 9 | end 10 | 11 | def generate 12 | super 13 | end 14 | 15 | def prompt 16 | <<-PROMPT 17 | You are an expert programmer in #{@technologies.join(", ")}. 18 | 19 | You are tasked with writing code using the following technologies: #{@technologies.join(", ")}. 20 | 21 | The description of the task is #{@description} 22 | 23 | Take a deep breath and think step by step before you start coding. 24 | PROMPT 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /spec/generators/examples/code_from_description_generator.rb: -------------------------------------------------------------------------------- 1 | class CodeFromDescriptionGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :single_string, 3 | name: "generated_code", 4 | description: "The generated code in the requested language" 5 | 6 | def initialize(description:, technologies:) 7 | @description = description 8 | @technologies = technologies 9 | end 10 | 11 | def generate 12 | super 13 | end 14 | 15 | def prompt 16 | <<-PROMPT 17 | You are an expert programmer in #{@technologies.join(", ")}. 18 | 19 | You are tasked with writing code using the following technologies: #{@technologies.join(", ")}. 20 | 21 | The description of the task is #{@description} 22 | 23 | Take a deep breath and think step by step before you start coding. 24 | PROMPT 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /lib/sublayer/generators/base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Generators 3 | class Base 4 | attr_reader :results 5 | 6 | def self.llm_output_adapter(options) 7 | output_adapter = Sublayer::Components::OutputAdapters.create(options).extend(Sublayer::Components::OutputAdapters::Formattable) 8 | const_set(:OUTPUT_ADAPTER, output_adapter) 9 | end 10 | 11 | def generate 12 | self.class::OUTPUT_ADAPTER.load_instance_data(self) if self.class::OUTPUT_ADAPTER.respond_to?(:load_instance_data) 13 | 14 | raw_results = Sublayer.configuration.ai_provider.call(prompt: prompt, output_adapter: self.class::OUTPUT_ADAPTER) 15 | 16 | @results = self.class::OUTPUT_ADAPTER.respond_to?(:materialize_result) ? self.class::OUTPUT_ADAPTER.materialize_result(raw_results) : raw_results 17 | end 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /spec/generators/examples/route_selection_from_user_intent_generator.rb: -------------------------------------------------------------------------------- 1 | class RouteSelectionFromUserIntentGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :string_selection_from_list, 3 | name: "route", 4 | description: "A route selected from the list", 5 | options: :available_routes 6 | 7 | def initialize(user_intent:) 8 | @user_intent = user_intent 9 | end 10 | 11 | def generate 12 | super 13 | end 14 | 15 | def available_routes 16 | ["GET /", "GET /users", "GET /users/:id", "POST /users", "PUT /users/:id", "DELETE /users/:id"] 17 | end 18 | 19 | def prompt 20 | <<-PROMPT 21 | You are skilled at selecting routes based on user intent. 22 | 23 | Your task is to choose a route based on the following intent: 24 | 25 | The user's intent is: 26 | #{@user_intent} 27 | PROMPT 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | attr_reader :name 5 | 6 | def self.create(options) 7 | klass = if options.has_key?(:class) 8 | klass = options[:class] 9 | if klass.is_a?(String) 10 | klass.constantize 11 | elsif klass.is_a?(Class) 12 | klass 13 | else 14 | raise "Invalid :class option" 15 | end 16 | elsif (type = options[:type]) 17 | "Sublayer::Components::OutputAdapters::#{type.to_s.camelize}".constantize 18 | else 19 | raise "Output adapter must be specified with :class or :type" 20 | end 21 | 22 | options[:name] = options[:name].to_s if options[:name].is_a?(Symbol) 23 | 24 | klass.new(options) 25 | end 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/string_selection_from_list.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | class StringSelectionFromList 5 | attr_reader :name, :description, :options 6 | 7 | def initialize(options) 8 | @name = options[:name] 9 | @description = options[:description] 10 | @list = options[:options] 11 | end 12 | 13 | def properties 14 | [OpenStruct.new(name: @name, type: 'string', description: @description, required: true, enum: @list)] 15 | end 16 | 17 | def load_instance_data(generator) 18 | case @list 19 | when Proc 20 | @list = generator.instance_exec(&@list) 21 | when Symbol 22 | @list = generator.send(@list) 23 | else 24 | @list 25 | end 26 | end 27 | end 28 | end 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/lib/%project_name%.rb.tt: -------------------------------------------------------------------------------- 1 | require "yaml" 2 | require "thor" 3 | require "sublayer" 4 | require_relative "<%= project_name.gsub("-", "_") %>/version" 5 | require_relative "<%= project_name.gsub("-", "_") %>/config" 6 | 7 | Dir[File.join(__dir__, "<%= project_name.gsub("-", "_") %>", "commands", "*.rb")].each { |file| require file } 8 | Dir[File.join(__dir__, "<%= project_name.gsub("-", "_") %>", "generators", "*.rb")].each { |file| require file } 9 | Dir[File.join(__dir__, "<%= project_name.gsub("-", "_") %>", "actions", "*.rb")].each { |file| require file } 10 | Dir[File.join(__dir__, "<%= project_name.gsub("-", "_") %>", "agents", "*.rb")].each { |file| require file } 11 | 12 | require_relative "<%= project_name.gsub("-", "_") %>/cli" 13 | 14 | module <%= project_name.camelize %> 15 | class Error < StandardError; end 16 | Config.load 17 | 18 | def self.root 19 | File.dirname __dir__ 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/github_action/%project_name%.yml.tt: -------------------------------------------------------------------------------- 1 | name: <%= project_name %> 2 | 3 | 4 | # Add a definition of when you want this workflow to trigger here 5 | # 6 | # For example, for it to run on every pull request that contains changes to .rb files, use: 7 | # 8 | # on: 9 | # pull_request: 10 | # paths: 11 | # - "**/*.rb" 12 | 13 | jobs: 14 | <%= project_name.gsub("-", "_") %>: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v4.2.0 18 | - name: Set up Ruby 19 | uses: ruby/setup-ruby@v1 20 | with: 21 | ruby-version: 3.2 22 | - name: Install dependencies 23 | run: | 24 | gem install sublayer octokit 25 | - name: Run <%= project_name.gsub("-", "_") %> 26 | env: 27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 28 | <%= ai_provider_key %>: ${{ secrets.<%= ai_provider_key %> }} 29 | 30 | run: ruby .github/workflows/<%= project_name %>/<%= project_name %>.rb 31 | -------------------------------------------------------------------------------- /lib/sublayer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "zeitwerk" 4 | require 'active_support' 5 | require 'active_support/core_ext/hash/indifferent_access' 6 | require 'active_support/inflector' 7 | require 'ostruct' 8 | require "httparty" 9 | require "openai" 10 | require "listen" 11 | require "securerandom" 12 | require "time" 13 | 14 | require_relative "sublayer/version" 15 | 16 | loader = Zeitwerk::Loader.for_gem 17 | loader.inflector.inflect('open_ai' => 'OpenAI') 18 | loader.inflector.inflect("cli" => "CLI") 19 | loader.ignore("#{__dir__}/sublayer/cli") 20 | loader.ignore("#{__dir__}/sublayer/cli.rb") 21 | loader.setup 22 | 23 | module Sublayer 24 | class Error < StandardError; end 25 | 26 | def self.configuration 27 | @configuration ||= OpenStruct.new( 28 | ai_provider: Sublayer::Providers::OpenAI, 29 | ai_model: "gpt-4o", 30 | logger: Sublayer::Logging::NullLogger.new 31 | ) 32 | end 33 | 34 | def self.configure 35 | yield(configuration) if block_given? 36 | end 37 | end 38 | 39 | loader.eager_load 40 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/named_strings.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | class NamedStrings 5 | attr_reader :name, :description, :attributes 6 | 7 | def initialize(options) 8 | @name = options[:name] 9 | @description = options[:description] 10 | @attributes = options[:attributes] 11 | end 12 | 13 | def properties 14 | [ 15 | OpenStruct.new( 16 | name: @name, 17 | type: "object", 18 | description: @description, 19 | required: true, 20 | properties: @attributes.map { |attribute| 21 | OpenStruct.new(type: "string", description: attribute[:description], required: true, name: attribute[:name]) 22 | } 23 | ) 24 | ] 25 | end 26 | 27 | def materialize_result(raw_result) 28 | OpenStruct.new( @attributes.map { |attribute| [attribute[:name], raw_result[attribute[:name]]] }.to_h) 29 | end 30 | end 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /spec/generators/examples/code_from_blueprint_generator.rb: -------------------------------------------------------------------------------- 1 | class CodeFromBlueprintGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :single_string, 3 | name: "generated_code", 4 | description: "The generated code for the description" 5 | 6 | def initialize(blueprint_description:, blueprint_code:, description:) 7 | @blueprint_description = blueprint_description 8 | @blueprint_code = blueprint_code 9 | @description = description 10 | end 11 | 12 | def generate 13 | super 14 | end 15 | 16 | def prompt 17 | <<-PROMPT 18 | You are an expert programmer and are great at looking at and understanding existing patterns and applying them to new situations. 19 | 20 | The blueprint we're working with is: #{@blueprint_description}. 21 | The code for that blueprint is: 22 | #{@blueprint_code} 23 | 24 | You need to use the blueprint above and modify it so that it satisfied the following description: 25 | #{@description} 26 | 27 | Take a deep breath and think step by step before you start coding. 28 | PROMPT 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Sublayer 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /spec/agents/examples/rspec_agent.rb: -------------------------------------------------------------------------------- 1 | class RSpecAgent < Sublayer::Agents::Base 2 | def initialize(implementation_file_path:, test_file_path:) 3 | @implementation_file_path = implementation_file_path 4 | @test_file_path = test_file_path 5 | @tests_passing = false 6 | end 7 | 8 | trigger_on_files_changed { [@implementation_file_path, @test_file_path] } 9 | 10 | goal_condition { @tests_passing == true } 11 | 12 | check_status do 13 | stdout, stderr, status = Sublayer::Actions::RunTestCommandAction.new( 14 | test_command: "rspec #{@test_file_path}" 15 | ).call 16 | 17 | @test_output = stdout 18 | @tests_passing = (status.exitstatus == 0) 19 | end 20 | 21 | step do 22 | modified_implementation = Sublayer::Generators::ModifiedImplementationToPassTestsGenerator.new( 23 | implementation_file_contents: File.read(@implementation_file_path), 24 | test_file_contents: File.read(@test_file_path), 25 | test_output: @test_output 26 | ).generate 27 | 28 | Sublayer::Actions::WriteFileAction.new( 29 | file_contents: modified_implementation, 30 | file_path: @implementation_file_path 31 | ).call 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/actions/get_context_action.rb: -------------------------------------------------------------------------------- 1 | class GetContextAction < Sublayer::Actions::Base 2 | def initialize(path:) 3 | @path = path 4 | end 5 | 6 | def call 7 | ignored_patterns = load_contextignore 8 | files = get_files(ignored_patterns) 9 | concatenate_files(files) 10 | end 11 | 12 | private 13 | 14 | def load_contextignore 15 | contextignore_path = File.join(@path, '.contextignore') 16 | return [] unless File.exist?(contextignore_path) 17 | 18 | File.readlines(contextignore_path).map(&:strip).reject do |line| 19 | line.empty? || line.start_with?('#') 20 | end 21 | end 22 | 23 | def get_files(ignored_patterns) 24 | Dir.chdir(@path) do 25 | all_files = `git ls-files`.split("\n") 26 | all_files.reject do |file| 27 | ignored_patterns.any? do |pattern| 28 | File.fnmatch?(pattern, file) || 29 | file.start_with?(pattern.chomp('/')) 30 | end 31 | end 32 | end 33 | end 34 | 35 | def concatenate_files(files) 36 | files.map do |file| 37 | content = File.read(File.join(@path, file)) 38 | "File: #{file}\n#{content}\n\n" 39 | end.join 40 | end 41 | end -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/example_agent.rb: -------------------------------------------------------------------------------- 1 | class RSpecAgent < Sublayer::Agents::Base 2 | def initialize(implementation_file_path:, test_file_path:) 3 | @implementation_file_path = implementation_file_path 4 | @test_file_path = test_file_path 5 | @tests_passing = false 6 | end 7 | 8 | trigger_on_files_changed { [@implementation_file_path, @test_file_path] } 9 | 10 | goal_condition { @tests_passing == true } 11 | 12 | check_status do 13 | stdout, stderr, status = Sublayer::Actions::RunTestCommandAction.new( 14 | test_command: "rspec #{@test_file_path}" 15 | ).call 16 | 17 | @test_output = stdout 18 | @tests_passing = (status.exitstatus == 0) 19 | end 20 | 21 | step do 22 | modified_implementation = Sublayer::Generators::ModifiedImplementationToPassTestsGenerator.new( 23 | implementation_file_contents: File.read(@implementation_file_path), 24 | test_file_contents: File.read(@test_file_path), 25 | test_output: @test_output 26 | ).generate 27 | 28 | Sublayer::Actions::WriteFileAction.new( 29 | file_contents: modified_implementation, 30 | file_path: @implementation_file_path 31 | ).call 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /spec/generators/examples/task_steps_generator.rb: -------------------------------------------------------------------------------- 1 | class TaskStepsGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :list_of_named_strings, 3 | name: "steps", 4 | description: "List of steps to complete a task", 5 | item_name: "step", 6 | attributes: [ { name: "description", description: "A brief description of the step" }, { name: "command", description: "The command to execute for this step" } ] 7 | 8 | def initialize(task:) 9 | @task = task 10 | end 11 | 12 | def generate 13 | super 14 | end 15 | 16 | def prompt 17 | <<-PROMPT 18 | You are an expert at breaking down tasks into step-by-step instructions with associated commands. 19 | Please generate a list of steps to complete the following task: 20 | 21 | #{@task} 22 | 23 | For each step, provide: 24 | - description: A brief description of what the step accomplishes 25 | - command: The exact command to execute for this step (if applicable; use "N/A" if no command is needed) 26 | 27 | Provide your response as a list of objects, each containing the above attributes. 28 | Ensure the steps are in the correct order to complete the task successfully. 29 | PROMPT 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /spec/triggers/file_change_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Triggers::FileChange do 4 | let(:test_agent) { double("Agent") } 5 | let(:block) { -> { ["file1.txt", "file2.txt"] } } 6 | let(:trigger) { described_class.new(&block) } 7 | 8 | describe "#initialize" do 9 | it "stores the provided block" do 10 | expect(trigger.instance_variable_get(:@block)).to eq(block) 11 | end 12 | end 13 | 14 | describe "#setup" do 15 | let(:listen_mock) { double("Listen") } 16 | let(:listener_mock) { double("Listener") } 17 | 18 | before do 19 | allow(Listen).to receive(:to).and_return(listen_mock) 20 | allow(listen_mock).to receive(:start).and_return(listener_mock) 21 | allow(test_agent).to receive(:instance_eval).and_return(["file1.txt", "file2.txt"]) 22 | allow(File).to receive(:dirname).and_return("/test/path") 23 | allow(File).to receive(:expand_path) { |file| "/test/path/#{file}" } 24 | end 25 | 26 | it "sets up a listener for the specified files" do 27 | expect(Listen).to receive(:to).with("/test/path").and_return(listen_mock) 28 | expect(listen_mock).to receive(:start) 29 | 30 | trigger.setup(test_agent) 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /spec/generators/examples/imaginary_movie_review_generator.rb: -------------------------------------------------------------------------------- 1 | class ImaginaryMovieReviewGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :list_of_named_strings, 3 | name: "review_summaries", 4 | description: "List of movie reviews", 5 | item_name: "review", 6 | attributes: [ 7 | { name: "movie_title", description: "The title of the movie" }, 8 | { name: "reviewer_name", description: "The name of the reviewer" }, 9 | { name: "rating", description: "The rating given by the reviewer (out of 5 stars)" }, 10 | { name: "brief_comment", description: "A brief summary of the movie" } 11 | ] 12 | 13 | def initialize(num_reviews:) 14 | @num_reviews = num_reviews 15 | end 16 | 17 | def generate 18 | super 19 | end 20 | 21 | def prompt 22 | <<-PROMPT 23 | You are a movie review summarizer. 24 | Please generate #{@num_reviews} movie review summaries with the following details for each: 25 | - movie_title: The title of a movie (real or fictional) 26 | - reviewer_name: A plausible name for a movie critic 27 | - rating: A rating out of 5 stars (format: X.X) 28 | - brief_comment: A concise summary of the review (1-2 sentences) 29 | 30 | Provide your response as a list of objects, each containing the above attributes. 31 | PROMPT 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "dotenv/load" 4 | 5 | unless ENV["OPENAI_API_KEY"] && ENV["ANTHROPIC_API_KEY"] && ENV["GEMINI_API_KEY"] 6 | puts <<~EOS 7 | Some API keys are missing from the environment. 8 | You can run `bin/setup` to configure dummy API keys. 9 | If you need to add or update any VCR cassettes then you will need to use real keys. 10 | EOS 11 | exit(1) 12 | end 13 | 14 | require "sublayer" 15 | require "pry" 16 | require "vcr" 17 | require_relative "../lib/sublayer/cli" 18 | 19 | VCR.configure do |config| 20 | config.cassette_library_dir = "spec/vcr_cassettes" 21 | config.hook_into :webmock 22 | config.filter_sensitive_data("") { ENV.fetch("OPENAI_API_KEY") } 23 | config.filter_sensitive_data("") { ENV.fetch("ANTHROPIC_API_KEY") } 24 | config.filter_sensitive_data("") { ENV.fetch("GEMINI_API_KEY") } 25 | end 26 | 27 | RSpec.configure do |config| 28 | # Enable flags like --only-failures and --next-failure 29 | config.example_status_persistence_file_path = ".rspec_status" 30 | 31 | # Disable RSpec exposing methods globally on `Module` and `main` 32 | config.disable_monkey_patching! 33 | 34 | config.expect_with :rspec do |c| 35 | c.syntax = :expect 36 | end 37 | 38 | config.after(:suite) do 39 | FileUtils.rm_rf(File.expand_path('../tmp', __dir__)) 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /.github/workflows/ruby.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | # This workflow will download a prebuilt Ruby version, install dependencies and run tests with Rake 6 | # For more information see: https://github.com/marketplace/actions/setup-ruby-jruby-and-truffleruby 7 | 8 | name: Ruby 9 | 10 | on: 11 | push: 12 | branches: [ "main" ] 13 | pull_request: 14 | branches: 15 | - "**" 16 | 17 | permissions: 18 | contents: read 19 | 20 | jobs: 21 | test: 22 | 23 | runs-on: ubuntu-latest 24 | strategy: 25 | matrix: 26 | ruby-version: ['3.2', '3.4'] 27 | 28 | steps: 29 | - uses: actions/checkout@v4 30 | - name: Set up Ruby 31 | # To automatically get bug fixes and new Ruby versions for ruby/setup-ruby, 32 | # change this to (see https://github.com/ruby/setup-ruby#versioning): 33 | uses: ruby/setup-ruby@v1 34 | with: 35 | ruby-version: ${{ matrix.ruby-version }} 36 | bundler-cache: true # runs 'bundle install' and caches installed gems automatically 37 | - name: Run tests 38 | run: bundle exec rspec 39 | env: 40 | OPENAI_API_KEY: dummy_openai_api_key 41 | ANTHROPIC_API_KEY: dummy_anthropic_api_key 42 | GEMINI_API_KEY: dummy_gemini_api_key 43 | -------------------------------------------------------------------------------- /spec/generators/examples/product_description_generator.rb: -------------------------------------------------------------------------------- 1 | class ProductDescriptionGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :named_strings, 3 | name: "product_description", 4 | description: "Generate product descriptions", 5 | attributes: [ 6 | { name: "short_description", description: "A brief one-sentence description of the product" }, 7 | { name: "long_description", description: "A detailed paragraph describing the product" }, 8 | { name: "key_features", description: "A comma-separated list of key product features" }, 9 | { name: "target_audience", description: "A brief description of the target audience for this product" } 10 | ] 11 | 12 | def initialize(product_name:, product_category:) 13 | @product_name = product_name 14 | @product_category = product_category 15 | end 16 | 17 | def generate 18 | super 19 | end 20 | 21 | def prompt 22 | <<-PROMPT 23 | You are a skilled product copywriter. Create compelling product descriptions for the following: 24 | 25 | Product Name: #{@product_name} 26 | Product Category: #{@product_category} 27 | 28 | Please provide the following: 29 | 1. A brief one-sentence description of the product 30 | 2. A detailed paragraph describing the product 31 | 3. A comma-separated list of key product features 32 | 4. A brief description of the target audience for this product 33 | PROMPT 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /.github/workflows/update_docs.yml: -------------------------------------------------------------------------------- 1 | name: Doc Updater 2 | on: 3 | pull_request: 4 | types: [closed] 5 | 6 | jobs: 7 | update-docs: 8 | if: github.event.pull_request.merged == true && github.event.pull_request.base.ref == 'main' 9 | runs-on: ubuntu-latest 10 | permissions: 11 | contents: write 12 | pull-requests: write 13 | steps: 14 | - name: Checkout Code Repository 15 | uses: actions/checkout@v3 16 | with: 17 | repository: sublayerapp/sublayer 18 | path: sublayer 19 | fetch-depth: 0 20 | token: ${{ secrets.ACCESS_TOKEN }} 21 | - name: Checkout Documentation Repository 22 | uses: actions/checkout@v3 23 | with: 24 | repository: sublayerapp/sublayer_documentation 25 | path: sublayer_documentation 26 | ref: main 27 | fetch-depth: 0 28 | token: ${{ secrets.ACCESS_TOKEN }} 29 | - name: Set up Ruby 30 | uses: ruby/setup-ruby@v1 31 | with: 32 | ruby-version: 3.2 33 | - name: Install dependencies 34 | run: | 35 | gem install sublayer octokit 36 | - name: Update the docs 37 | env: 38 | ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }} 39 | GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} 40 | PR_NUMBER: ${{ github.event.pull_request.number }} 41 | run: ruby sublayer/.github/workflows/github_actions/update_docs.rb 42 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/actions/github_add_or_modify_file_action.rb: -------------------------------------------------------------------------------- 1 | class GithubAddOrModifyFileAction < Sublayer::Actions::Base 2 | def initialize(repo:, branch:, file_path:, file_content:) 3 | @client = Octokit::Client.new(access_token: ENV['ACCESS_TOKEN']) 4 | @repo = repo 5 | @branch = branch 6 | @file_path = file_path 7 | @file_content = file_content 8 | end 9 | 10 | def call 11 | content = @client.contents(@repo, path: @file_path, ref: @branch) 12 | @client.update_contents( 13 | @repo, 14 | @file_path, 15 | "Updating #{@file_path}", 16 | content.sha, 17 | @file_content, 18 | branch: @branch 19 | ) 20 | end 21 | 22 | def call 23 | begin 24 | # Try to fetch the file contents to get its SHA 25 | content = @client.contents(@repo, path: @file_path, ref: @branch) 26 | 27 | # If the file exists, update it 28 | @client.update_contents( 29 | @repo, 30 | @file_path, 31 | "Updating #{@file_path}", 32 | content.sha, 33 | @file_content, 34 | branch: @branch 35 | ) 36 | puts "File updated: #{@file_path}" 37 | rescue Octokit::NotFound 38 | # If the file does not exist, create it instead 39 | @client.create_contents( 40 | @repo, 41 | @file_path, 42 | "Creating #{@file_path}", 43 | @file_content, 44 | branch: @branch 45 | ) 46 | puts "File created: #{@file_path}" 47 | end 48 | end 49 | end -------------------------------------------------------------------------------- /spec/components/output_adapters/single_string_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Components::OutputAdapters::SingleString do 4 | let(:name) { 'output_adapter_name' } 5 | let(:description) { 'output_adapter_description' } 6 | let(:output_adapter) { Sublayer::Components::OutputAdapters::SingleString.new(name: name, description: description) } 7 | 8 | describe '#name' do 9 | it 'returns the name' do 10 | expect(output_adapter.name).to eq(name) 11 | end 12 | end 13 | 14 | describe '#description' do 15 | it 'returns the description' do 16 | expect(output_adapter.description).to eq(description) 17 | end 18 | end 19 | 20 | describe '#properties' do 21 | it 'returns an array with a single property' do 22 | expect(output_adapter.properties).to be_an(Array) 23 | expect(output_adapter.properties.length).to eq(1) 24 | expect(output_adapter.properties.first).to be_an(OpenStruct) 25 | end 26 | 27 | describe 'the first property' do 28 | let(:property) { output_adapter.properties.first } 29 | 30 | it 'has a name' do 31 | expect(property.name).to eq(name) 32 | end 33 | 34 | it 'has a type' do 35 | expect(property.type).to eq('string') 36 | end 37 | 38 | it 'has a description' do 39 | expect(property.description).to eq(description) 40 | end 41 | 42 | it 'is required' do 43 | expect(property.required).to eq(true) 44 | end 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /spec/components/output_adapters/single_integer_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Components::OutputAdapters::SingleInteger do 4 | let(:name) { 'output_adapter_name' } 5 | let(:description) { 'output_adapter_description' } 6 | let(:output_adapter) { Sublayer::Components::OutputAdapters::SingleInteger.new(name: name, description: description) } 7 | 8 | describe '#name' do 9 | it 'returns the name' do 10 | expect(output_adapter.name).to eq(name) 11 | end 12 | end 13 | 14 | describe '#description' do 15 | it 'returns the description' do 16 | expect(output_adapter.description).to eq(description) 17 | end 18 | end 19 | 20 | describe '#properties' do 21 | it 'returns an array with a single property' do 22 | expect(output_adapter.properties).to be_an(Array) 23 | expect(output_adapter.properties.length).to eq(1) 24 | expect(output_adapter.properties.first).to be_an(OpenStruct) 25 | end 26 | 27 | describe 'the first property' do 28 | let(:property) { output_adapter.properties.first } 29 | 30 | it 'has a name' do 31 | expect(property.name).to eq(name) 32 | end 33 | 34 | it 'has a type' do 35 | expect(property.type).to eq('integer') 36 | end 37 | 38 | it 'has a description' do 39 | expect(property.description).to eq(description) 40 | end 41 | 42 | it 'is required' do 43 | expect(property.required).to eq(true) 44 | end 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /lib/sublayer/agents/base.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Agents 3 | class Base 4 | class << self 5 | attr_reader :triggers, :goal_condition_block, :check_status_block, :step_block, :listeners 6 | 7 | def trigger(trigger_instance = nil) 8 | @triggers ||= [] 9 | 10 | if trigger_instance 11 | @triggers << trigger_instance 12 | else 13 | raise ArgumentError, "Either a trigger instance or a block must be provided" 14 | end 15 | end 16 | 17 | def trigger_on_files_changed(&block) 18 | trigger(Triggers::FileChange.new(&block)) 19 | end 20 | 21 | def goal_condition(&block) 22 | @goal_condition_block = block 23 | end 24 | 25 | def check_status(&block) 26 | @check_status_block = block 27 | end 28 | 29 | def step(&block) 30 | @step_block = block 31 | end 32 | end 33 | 34 | def run 35 | setup_triggers 36 | take_step 37 | sleep 38 | end 39 | 40 | private 41 | 42 | def setup_triggers 43 | @listeners = [] 44 | 45 | self.class.triggers.each do |trigger| 46 | listener = trigger.setup(self) 47 | @listeners << listener if listener 48 | end 49 | end 50 | 51 | def take_step 52 | instance_eval(&self.class.check_status_block) 53 | instance_eval(&self.class.step_block) unless instance_eval(&self.class.goal_condition_block) 54 | end 55 | end 56 | end 57 | end 58 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/generators/doc_update_necessity_generator.rb: -------------------------------------------------------------------------------- 1 | class DocUpdateNecessityGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :named_strings, 3 | name: "doc_update_score", 4 | description: "boolean and confidence score indicating if documentation changes are needed", 5 | attributes: [ 6 | { name: "reasoning", description: "brief explanation for whether or not doc changes are needed" }, 7 | { name: "confidence", description: "number from 0 to 1 indicating confidence in the decision" }, 8 | { name: "needs_update", description: "boolean indicating if updates are needed" } 9 | ] 10 | 11 | def initialize(doc_context:, code_context:, diff:) 12 | @doc_context = doc_context 13 | @code_context = code_context 14 | @diff = diff 15 | end 16 | 17 | def generate 18 | super 19 | end 20 | 21 | def prompt 22 | <<~PROMPT 23 | Code diff: 24 | #{@diff} 25 | 26 | Code context: 27 | #{@code_context} 28 | 29 | Documentation context: 30 | #{@doc_context} 31 | 32 | Given the above code diff and context, determine if documentation updates are necessary. 33 | Consider the following factors: 34 | 1. The significance of the changes 35 | 2. Whether the changes affect public APIs or user-facing features 36 | 3. If the changes introduce new concepts or modify existing ones 37 | 4. Whether the current documentation accurately reflects the changes 38 | 39 | Based on this information, are documentation updates necessary? 40 | PROMPT 41 | end 42 | end 43 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/new_project.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Commands 3 | class NewProject < Thor::Group 4 | include Thor::Actions 5 | 6 | argument :project_name, type: :string, desc: "The name of your project" 7 | 8 | class_option :template, type: :string, desc: "Type of project (CLI, GithubAction, QuickScript)", aliases: :t 9 | class_option :provider, type: :string, desc: "AI provider (OpenAI, Claude, or Gemini)", aliases: :p 10 | class_option :model, type: :string, desc: "AI model name to use (e.g. gpt-4o, claude-3-haiku-20240307, gemini-1.5-flash-latest)", aliases: :m 11 | 12 | def sublayer_version 13 | Sublayer::VERSION 14 | end 15 | 16 | def self.source_root 17 | File.dirname(__FILE__) 18 | end 19 | 20 | def self.banner 21 | "sublayer new PROJECT_NAME" 22 | end 23 | 24 | def create_project 25 | @project_template = options[:template] || ask("Select a project template:", default: "CLI", limited_to: %w[CLI GithubAction QuickScript]) 26 | 27 | case @project_template.downcase 28 | when 'cli' 29 | invoke Commands::CLIProject, [project_name], options 30 | when 'githubaction', 'github_action' 31 | invoke Commands::GithubActionProject, [project_name], options 32 | when 'quickscript', 'quick_script' 33 | invoke Commands::QuickScriptProject, [project_name], options 34 | else 35 | say "Unknown project template: #{@project_template}", :red 36 | exit 1 37 | end 38 | end 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/list_of_named_strings.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | class ListOfNamedStrings 5 | attr_reader :name, :description, :attributes, :item_name 6 | 7 | def initialize(options) 8 | @name = options[:name] 9 | @item_name = options[:item_name] 10 | @description = options[:description] 11 | @attributes = options[:attributes] 12 | end 13 | 14 | def properties 15 | [ 16 | OpenStruct.new( 17 | name: @name, 18 | type: "array", 19 | description: @description, 20 | required: true, 21 | items: OpenStruct.new( 22 | type: "object", 23 | description: "a single #{@item_name}", 24 | name: @item_name, 25 | properties: @attributes.map do |attribute| 26 | OpenStruct.new( 27 | type: "string", 28 | name: attribute[:name], 29 | description: attribute[:description], 30 | required: true 31 | ) 32 | end 33 | ) 34 | ) 35 | ] 36 | end 37 | 38 | def materialize_result(raw_results) 39 | raw_results.map do |raw_result| 40 | OpenStruct.new( 41 | @attributes.map { |attribute| [attribute[:name], raw_result[attribute[:name]]] }.to_h 42 | ) 43 | end 44 | end 45 | end 46 | end 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /lib/sublayer/cli/templates/cli/%project_name%.gemspec.tt: -------------------------------------------------------------------------------- 1 | require_relative "lib/<%= project_name.gsub("-", "_") %>/version" 2 | 3 | Gem::Specification.new do |spec| 4 | spec.name = "<%= project_name %>" 5 | spec.version = <%= project_name.camelize %>::VERSION 6 | spec.authors = ["Your Name"] 7 | spec.email = ["your.email@example.com"] 8 | 9 | spec.summary = "Summary of your project" 10 | spec.description = "Longer description of your project" 11 | spec.homepage = "https://github.com/yourusername/<%= project_name %>" 12 | spec.license = "MIT" 13 | spec.required_ruby_version = ">= 2.6.0" 14 | 15 | spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'" 16 | 17 | spec.metadata["homepage_uri"] = spec.homepage 18 | spec.metadata["source_code_uri"] = "https://github.com/yourusername/<%= project_name %>" 19 | spec.metadata["changelog_uri"] = "https://github.com/yourusername/<%= project_name %>/blob/master/CHANGELOG.md" 20 | 21 | # Specify which files should be added to the gem when it is released. 22 | # The `git ls-files -z` loads the files in the RubyGem that have been added into git. 23 | spec.files = Dir.chdir(File.expand_path(__dir__)) do 24 | `git ls-files -z`.split("\x0").reject do |f| 25 | (f == __FILE__) || f.match(%r{\A(?:(?:test|spec|features)/|\.(?:git|travis|circleci)|appveyor)}) 26 | end 27 | end 28 | spec.bindir = "bin" 29 | spec.executables = spec.files.grep(%r{\Abin/}) { |f| File.basename(f) } 30 | spec.require_paths = ["lib"] 31 | 32 | # Add dependencies here 33 | spec.add_dependency "sublayer", "~> <%= sublayer_version %>" 34 | spec.add_dependency "thor", "~> 1.2" 35 | end 36 | -------------------------------------------------------------------------------- /spec/components/output_adapters/list_of_strings_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Components::OutputAdapters::ListOfStrings do 4 | describe "#initialize" do 5 | it "sets the name and description" do 6 | adapter = Sublayer::Components::OutputAdapters::ListOfStrings.new(name: "test_list", description: "A test list of strings") 7 | 8 | expect(adapter.name).to eq("test_list") 9 | expect(adapter.description).to eq("A test list of strings") 10 | end 11 | end 12 | 13 | describe "#properties" do 14 | it "returns an array with one item" do 15 | adapter = Sublayer::Components::OutputAdapters::ListOfStrings.new(name: "test_list", description: "A test list of strings") 16 | 17 | expect(adapter.properties).to be_an(Array) 18 | expect(adapter.properties.size).to eq(1) 19 | end 20 | 21 | it "returns an OpenStruct object" do 22 | adapter = Sublayer::Components::OutputAdapters::ListOfStrings.new(name: "test_list", description: "A test list of strings") 23 | 24 | expect(adapter.properties.first).to be_an(OpenStruct) 25 | end 26 | 27 | it "has the correct attributes" do 28 | adapter = Sublayer::Components::OutputAdapters::ListOfStrings.new(name: "test_list", description: "A test list of strings") 29 | 30 | expect(adapter.properties.first.name).to eq("test_list") 31 | expect(adapter.properties.first.type).to eq("array") 32 | expect(adapter.properties.first.description).to eq("A test list of strings") 33 | expect(adapter.properties.first.required).to eq(true) 34 | expect(adapter.properties.first.items).to eq( {type: "string"} ) 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /spec/generators/four_digit_passcode_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/four_digit_passcode_generator" 4 | 5 | RSpec.describe FourDigitPasscodeGenerator do 6 | def generate 7 | described_class.new.generate 8 | end 9 | 10 | context "OpenAI" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 13 | Sublayer.configuration.ai_model = "gpt-4o-2024-08-06" 14 | end 15 | 16 | it "generates an with the correct keys" do 17 | VCR.use_cassette("openai/generators/four_digit_passcode_generator/find_number") do 18 | result = generate 19 | expect(result).to be_an_instance_of(Integer) 20 | end 21 | end 22 | end 23 | 24 | context "Claude" do 25 | before do 26 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 27 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 28 | end 29 | 30 | it "generates an with the correct keys" do 31 | VCR.use_cassette("claude/generators/four_digit_passcode_generator/find_number") do 32 | result = generate 33 | expect(result).to be_an_instance_of(Integer) 34 | end 35 | end 36 | end 37 | 38 | context "Gemini" do 39 | before do 40 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 41 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 42 | end 43 | 44 | it "generates an with the correct keys" do 45 | VCR.use_cassette("gemini/generators/four_digit_passcode_generator/find_number") do 46 | result = generate 47 | expect(result).to be_an_instance_of(Integer) 48 | end 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /lib/sublayer/components/output_adapters/formattable.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Components 3 | module OutputAdapters 4 | module Formattable 5 | def format_properties 6 | build_json_schema(self.properties) 7 | end 8 | 9 | def build_json_schema(props) 10 | formatted_properties = {} 11 | 12 | props.map do |prop| 13 | formatted_property = format_property(prop) 14 | formatted_properties[prop.name.to_sym] = formatted_property 15 | end 16 | 17 | formatted_properties 18 | end 19 | 20 | def format_property(property) 21 | result = { 22 | type: property.type, 23 | description: property.description 24 | } 25 | 26 | result[:enum] = property.enum if property.respond_to?(:enum) && property.enum 27 | result[:default] = property.default if property.respond_to?(:default) && !property.default.nil? 28 | result[:minimum] = property.minimum if property.respond_to?(:minimum) && !property.minimum.nil? 29 | result[:maximum] = property.maximum if property.respond_to?(:maximum) && !property.maximum.nil? 30 | 31 | case property.type 32 | when 'array' 33 | result[:items] = property.items.is_a?(OpenStruct) ? format_property(property.items) : property.items 34 | when 'object' 35 | result[:properties] = build_json_schema(property.properties) if property.properties 36 | result[:required] = property.properties.select(&:required).map(&:name) if property.properties 37 | end 38 | 39 | result 40 | end 41 | 42 | def format_required 43 | self.properties.select(&:required).map(&:name) 44 | end 45 | end 46 | end 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /spec/providers/gemini_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Providers::Gemini do 4 | let(:basic_output_adapter) { 5 | Sublayer::Components::OutputAdapters.create( 6 | type: :single_string, 7 | name: "the_answer", 8 | description: "The answer to the given question" 9 | ).extend(Sublayer::Components::OutputAdapters::Formattable) 10 | } 11 | 12 | before do 13 | Sublayer.configuration.ai_provider = described_class 14 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 15 | end 16 | 17 | describe "#call" do 18 | it "calls the Gemini API" do 19 | VCR.use_cassette("gemini/42") do 20 | response = described_class.call( 21 | prompt: "What is the meaning of life, the universe, and everything?", 22 | output_adapter: basic_output_adapter 23 | ) 24 | 25 | expect(response).to be_a(String) 26 | expect(response.length).to be > 0 27 | end 28 | end 29 | 30 | context "logging" do 31 | let(:mock_logger) { instance_double(Sublayer::Logging::Base) } 32 | 33 | before do 34 | Sublayer.configuration.logger = mock_logger 35 | end 36 | 37 | after do 38 | Sublayer.configuration.logger = Sublayer::Logging::NullLogger.new 39 | end 40 | 41 | it "logs the request and response" do 42 | expect(mock_logger).to receive(:log).with(:info, "Gemini API request", hash_including(:model, :prompt)) 43 | expect(mock_logger).to receive(:log).with(:info, "Gemini API response", instance_of(Hash)) 44 | 45 | VCR.use_cassette("gemini/42") do 46 | described_class.call( 47 | prompt: "What is the meaning of life, the universe, and everything?", 48 | output_adapter: basic_output_adapter 49 | ) 50 | end 51 | end 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /spec/generators/blog_post_keyword_suggestions_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/blog_post_keyword_suggestions_generator" 4 | 5 | RSpec.describe BlogPostKeywordSuggestionGenerator do 6 | let(:topic) { "Artificial Intelligence in Healthcare" } 7 | let(:num_keywords) { 5 } 8 | 9 | subject { described_class.new(topic: topic, num_keywords: num_keywords) } 10 | 11 | context "claude" do 12 | before do 13 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 14 | Sublayer.configuration.ai_model = "claude-3-5-sonnet-20240620" 15 | end 16 | 17 | it "generates keyword suggestions for a blog post" do 18 | VCR.use_cassette("claude/generators/blog_post_keyword_suggestions_generator/ai_in_healthcare") do 19 | keywords = subject.generate 20 | expect(keywords).to be_an_instance_of(Array) 21 | end 22 | end 23 | end 24 | 25 | context "openai" do 26 | before do 27 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 28 | Sublayer.configuration.ai_model = "gpt-4o" 29 | end 30 | 31 | it "generates keyword suggestions for a blog post" do 32 | VCR.use_cassette("openai/generators/blog_post_keyword_suggestions_generator/ai_in_healthcare") do 33 | keywords = subject.generate 34 | expect(keywords).to be_an_instance_of(Array) 35 | end 36 | end 37 | end 38 | 39 | context "gemini" do 40 | before do 41 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 42 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 43 | end 44 | 45 | it "generates keyword suggestions for a blog post" do 46 | VCR.use_cassette("gemini/generators/blog_post_keyword_suggestions_generator/ai_in_healthcare") do 47 | keywords = subject.generate 48 | expect(keywords).to be_an_instance_of(Array) 49 | end 50 | end 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /spec/generators/invalid_to_valid_json_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/invalid_to_valid_json_generator" 4 | 5 | RSpec.describe InvalidToValidJsonGenerator do 6 | def generate(json) 7 | described_class.new(invalid_json: json).generate 8 | end 9 | 10 | context "Claude" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 13 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 14 | end 15 | 16 | it "does nothing to valid JSON" do 17 | VCR.use_cassette("claude/generators/invalid_to_valid_json_generator/valid_json") do 18 | json = %q({"valid": "json"}) 19 | valid_json = generate(json) 20 | expect(valid_json).to eq json 21 | end 22 | end 23 | 24 | it "converts invalid JSON to valid JSON" do 25 | VCR.use_cassette("claude/generators/invalid_to_valid_json_generator/invalid_json") do 26 | invalid_json = %q({invalid: "json"}) 27 | valid_json = generate(invalid_json) 28 | 29 | expect{ JSON.parse(valid_json) }.to_not raise_error 30 | end 31 | end 32 | end 33 | 34 | context "OpenAI" do 35 | before do 36 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 37 | Sublayer.configuration.ai_model = "gpt-4-turbo" 38 | end 39 | 40 | it "does nothing to valid JSON" do 41 | VCR.use_cassette("openai/generators/invalid_to_valid_json_generator/valid_json") do 42 | json = %q({"valid": "json"}) 43 | valid_json = generate(json) 44 | expect(valid_json).to eq json 45 | end 46 | end 47 | 48 | it "converts invalid JSON to valid JSON" do 49 | VCR.use_cassette("openai/generators/invalid_to_valid_json_generator/invalid_json") do 50 | invalid_json = %q({invalid: "json"}) 51 | valid_json = generate(invalid_json) 52 | expect(valid_json).to eq %q({"invalid": "json"}) 53 | end 54 | end 55 | 56 | end 57 | end 58 | -------------------------------------------------------------------------------- /spec/agents/base_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Agents::Base do 4 | let(:test_agent_class) do 5 | Class.new(described_class) do 6 | trigger_on_files_changed { ["test_file.txt"] } 7 | goal_condition { @goal_reached } 8 | check_status { @status_checked = true } 9 | step { @step_taken = true } 10 | 11 | attr_accessor :goal_reached, :status_checked, :step_taken 12 | end 13 | end 14 | 15 | let(:agent) { test_agent_class.new } 16 | 17 | describe ".trigger" do 18 | it "adds triggers to the agent class" do 19 | expect(test_agent_class.triggers.size).to eq(1) 20 | end 21 | 22 | it "accepts custom trigger instances" do 23 | custom_trigger = Sublayer::Triggers::Base.new 24 | 25 | test_agent_class.trigger(custom_trigger) 26 | expect(test_agent_class.triggers.last).to eq(custom_trigger) 27 | end 28 | end 29 | 30 | describe "#run" do 31 | before do 32 | allow(Listen).to receive(:to).and_return(double(start: true)) 33 | allow(agent).to receive(:sleep) 34 | end 35 | 36 | it "sets up triggers and takes a step" do 37 | expect(agent).to receive(:setup_triggers) 38 | expect(agent).to receive(:take_step) 39 | expect(agent).to receive(:sleep) 40 | 41 | agent.run 42 | end 43 | end 44 | 45 | describe "#take_step" do 46 | context "when goal is not reached" do 47 | before { agent.goal_reached = false } 48 | 49 | it "checks status and takes a step" do 50 | agent.send(:take_step) 51 | expect(agent.status_checked).to be true 52 | expect(agent.step_taken).to be true 53 | end 54 | end 55 | 56 | context "when the goal is reached" do 57 | before { agent.goal_reached = true } 58 | 59 | it "checks status but does not take a step" do 60 | agent.send(:take_step) 61 | expect(agent.status_checked).to be true 62 | expect(agent.step_taken).to be_nil 63 | end 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /spec/generators/sentiment_from_text_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/sentiment_from_text_generator" 4 | 5 | RSpec.describe SentimentFromTextGenerator do 6 | def generate(text) 7 | described_class.new(text: text, sentiment_options: %w[positive negative neutral]).generate 8 | end 9 | 10 | context "OpenAI" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 13 | Sublayer.configuration.ai_model = "gpt-4-turbo" 14 | end 15 | 16 | it "generates a sentiment value from the text" do 17 | VCR.use_cassette("openai/generators/sentiment_from_text_generator/positive") do 18 | text = "Matz is nice so we are nice" 19 | sentiment_value = generate(text) 20 | expect(["positive", "negative", "neutral"]).to include(sentiment_value) 21 | end 22 | end 23 | end 24 | 25 | context "Claude" do 26 | before do 27 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 28 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 29 | end 30 | 31 | it "generates a sentiment value from the text" do 32 | VCR.use_cassette("claude/generators/sentiment_from_text_generator/positive") do 33 | text = "Matz is nice so we are nice" 34 | sentiment_value = generate(text) 35 | expect(["positive", "negative", "neutral"]).to include(sentiment_value) 36 | end 37 | end 38 | end 39 | 40 | context "Gemini" do 41 | before do 42 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 43 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 44 | end 45 | 46 | it "generates a sentiment value from the text" do 47 | VCR.use_cassette("gemini/generators/sentiment_from_text_generator/positive") do 48 | text = "Matz is nice so we are nice" 49 | sentiment_value = generate(text) 50 | expect(["positive", "negative", "neutral"]).to include(sentiment_value) 51 | end 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /sublayer.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "lib/sublayer/version" 4 | 5 | Gem::Specification.new do |spec| 6 | spec.name = "sublayer" 7 | spec.version = Sublayer::VERSION 8 | spec.authors = ["Scott Werner"] 9 | spec.email = ["scott@sublayer.com"] 10 | spec.license = "MIT" 11 | 12 | spec.summary = "A model-agnostic Ruby GenerativeAI DSL and Framework" 13 | spec.description = "A DSL and framework for building AI powered applications through the use of Generators, Actions, Tasks, and Agents" 14 | spec.homepage = "https://docs.sublayer.com" 15 | spec.required_ruby_version = ">= 3.2.0" 16 | 17 | spec.metadata["homepage_uri"] = "https://docs.sublayer.com" 18 | spec.metadata["documentation_uri"] = "https://docs.sublayer.com" 19 | spec.metadata["bug_tracker_uri"] = "https://github.com/sublayerapp/sublayer/issues" 20 | spec.metadata["source_code_uri"] = "https://github.com/sublayerapp/sublayer" 21 | 22 | # Specify which files should be added to the gem when it is released. 23 | # The `git ls-files -z` loads the files in the RubyGem that have been added into git. 24 | spec.files = Dir.chdir(__dir__) do 25 | `git ls-files -z`.split("\x0").reject do |f| 26 | (File.expand_path(f) == __FILE__) || 27 | f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor Gemfile]) 28 | end 29 | end 30 | 31 | spec.require_paths = ["lib"] 32 | spec.bindir = "bin" 33 | spec.executables = ["sublayer"] 34 | 35 | spec.add_dependency "ruby-openai" 36 | spec.add_dependency "activesupport" 37 | spec.add_dependency "zeitwerk" 38 | spec.add_dependency "httparty" 39 | spec.add_dependency "listen" 40 | spec.add_dependency "ostruct" 41 | spec.add_dependency "thor" 42 | 43 | spec.add_development_dependency "dotenv", "~> 3.1" 44 | spec.add_development_dependency "rake" 45 | spec.add_development_dependency "rspec", "~> 3.12" 46 | spec.add_development_dependency "pry", "~> 0.14" 47 | spec.add_development_dependency "vcr", "~> 6.0" 48 | spec.add_development_dependency "webmock", "~> 3" 49 | end 50 | -------------------------------------------------------------------------------- /spec/generators/route_selection_from_user_intent_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/route_selection_from_user_intent_generator" 4 | 5 | RSpec.describe RouteSelectionFromUserIntentGenerator do 6 | def generate(text) 7 | described_class.new(user_intent: text).generate 8 | end 9 | 10 | def available_routes 11 | described_class.new(user_intent: "").available_routes 12 | end 13 | 14 | context "OpenAI" do 15 | before do 16 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 17 | Sublayer.configuration.ai_model = "gpt-4-turbo" 18 | end 19 | 20 | it "selects a route based on the user's intent" do 21 | VCR.use_cassette("openai/generators/route_selection_from_user_intent_generator/route") do 22 | user_intent = "I want to get all the users" 23 | route = generate(user_intent) 24 | expect(available_routes).to include(route) 25 | end 26 | end 27 | end 28 | 29 | context "Claude" do 30 | before do 31 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 32 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 33 | end 34 | 35 | it "selects a route based on the user's intent" do 36 | VCR.use_cassette("claude/generators/route_selection_from_user_intent_generator/route") do 37 | user_intent = "I want to get all the users" 38 | route = generate(user_intent) 39 | expect(available_routes).to include(route) 40 | end 41 | end 42 | end 43 | 44 | context "Gemini" do 45 | before do 46 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 47 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 48 | end 49 | 50 | it "selects a route based on the user's intent" do 51 | VCR.use_cassette("gemini/generators/route_selection_from_user_intent_generator/route") do 52 | user_intent = "I want to get all the users" 53 | route = generate(user_intent) 54 | expect(available_routes).to include(route) 55 | end 56 | end 57 | end 58 | end 59 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/sublayer_command_generator.rb: -------------------------------------------------------------------------------- 1 | class SublayerCommandGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :named_strings, 3 | name: "sublayer_command", 4 | description: "The new command code based on the generator", 5 | attributes: [ 6 | { name: "class_name", description: "The class name of the command" }, 7 | { name: "description", description: "The description of the command" }, 8 | { name: "execute_body", description: "The code inside the execute method" }, 9 | { name: "filename", description: "The filename of the command, snake_cased with a .rb extension" } 10 | ] 11 | 12 | def initialize(generator_code:) 13 | @generator_code = generator_code 14 | end 15 | 16 | def generate 17 | super 18 | end 19 | 20 | def prompt 21 | <<-PROMPT 22 | You are an expert Ruby developer. 23 | 24 | Given the following Sublayer generator code: 25 | 26 | #{@generator_code} 27 | 28 | Please generate a Thor command class that interacts with this generator. The command should: 29 | 30 | - Be a subclass of `BaseCommand`. 31 | - Include a descriptive class name. 32 | - Provide a description for the command. 33 | - Implement an `execute` method that accepts appropriate arguments and invokes the generator. 34 | 35 | Provide the class name, description, execute method body, and filename for the command. 36 | 37 | These parameters will be used in a template to create the command file. The template is: 38 | module <%= project_name.camelize %> 39 | module Commands 40 | class <%= command_class_name %> < BaseCommand 41 | def self.description 42 | "<%= command_description %>" 43 | end 44 | 45 | def execute(*args) 46 | <%= command_execute_body %> 47 | end 48 | end 49 | end 50 | end 51 | 52 | Take into account any parameters the generator requires and map them to command-line arguments. 53 | PROMPT 54 | end 55 | end -------------------------------------------------------------------------------- /spec/components/output_adapters/named_strings_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Components::OutputAdapters::NamedStrings do 4 | let(:name) { 'test_adapter' } 5 | let(:description) { 'Test adapter description' } 6 | 7 | let(:attributes) do 8 | [ 9 | { name: "field1", description: "Description of field1" }, 10 | { name: "field2", description: "Description of field2" } 11 | ] 12 | end 13 | 14 | let(:output_adapter) { described_class.new(name: name, description: description, attributes: attributes) } 15 | 16 | describe "#initialize" do 17 | it "sets the name, description, and attributes" do 18 | expect(output_adapter.name).to eq(name) 19 | expect(output_adapter.description).to eq(description) 20 | expect(output_adapter.attributes).to eq(attributes) 21 | end 22 | end 23 | 24 | describe "#properties" do 25 | it "returns an array with one OpenStruct object" do 26 | properties = output_adapter.properties 27 | expect(properties).to be_an(Array) 28 | expect(properties.size).to eq(1) 29 | expect(properties.first).to be_an(OpenStruct) 30 | end 31 | 32 | it "sets the correct attributes for the main property" do 33 | property = output_adapter.properties.first 34 | expect(property.name).to eq(name) 35 | expect(property.description).to eq(description) 36 | expect(property.required).to eq(true) 37 | expect(property.type).to eq("object") 38 | end 39 | 40 | it "sets the correct nested properties" do 41 | nested_properties = output_adapter.properties.first.properties 42 | expect(nested_properties[0].name).to eq("field1") 43 | expect(nested_properties[0].type).to eq("string") 44 | expect(nested_properties[0].description).to eq("Description of field1") 45 | expect(nested_properties[0].required).to eq(true) 46 | expect(nested_properties[1].name).to eq("field2") 47 | expect(nested_properties[1].type).to eq("string") 48 | expect(nested_properties[1].description).to eq("Description of field2") 49 | expect(nested_properties[1].required).to eq(true) 50 | end 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /spec/integration/quick_script_creation_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "fileutils" 3 | require "open3" 4 | require "tmpdir" 5 | 6 | TMP_DIR = ENV['RUNNER_TEMP'] || Dir.tmpdir 7 | 8 | RSpec.describe "Quick Script Project Creation" do 9 | let(:project_name) { "test_project" } 10 | let(:project_path) { File.join(TMP_DIR, project_name) } 11 | 12 | before(:all) do 13 | FileUtils.mkdir_p(TMP_DIR) 14 | end 15 | 16 | after(:all) do 17 | FileUtils.rm_rf(TMP_DIR) 18 | end 19 | 20 | after(:each) do 21 | FileUtils.rm_rf(project_path) 22 | end 23 | 24 | it "creates a new project with all the expected files and structures" do 25 | command = "ruby -I lib #{File.dirname(__FILE__)}/../../bin/sublayer new #{project_name} --template quick_script" 26 | input = "OpenAI\ngpt-4o\nn\n\n" 27 | 28 | output, status = Open3.capture2e(command, chdir: TMP_DIR, stdin_data: input) 29 | 30 | expect(status.success?).to be true 31 | expect(output).to include("Sublayer project '#{project_name}' created successfully!") 32 | 33 | expect(Dir.exist?(project_path)).to be true 34 | 35 | %w[ 36 | test_project.rb 37 | agents/example_agent.rb 38 | generators/example_generator.rb 39 | actions/example_action.rb 40 | README.md 41 | ].each do |file| 42 | expect(File.exist?(File.join(project_path, file))).to be true 43 | end 44 | end 45 | 46 | it "correctly configures AI provider and model in the generated files" do 47 | command = "ruby -I lib #{File.dirname(__FILE__)}/../../bin/sublayer new #{project_name} --template quick_script" 48 | input = "OpenAI\ngpt-4o\nn\n\n" 49 | 50 | output, status = Open3.capture2e(command, chdir: TMP_DIR, stdin_data: input) 51 | expect(status.success?).to be true 52 | 53 | # Check the Ruby file has the correct AI configuration 54 | rb_file_path = File.join(project_path, "#{project_name}.rb") 55 | expect(File.exist?(rb_file_path)).to be true 56 | 57 | file_content = File.read(rb_file_path) 58 | expect(file_content).to include('Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI') 59 | expect(file_content).to include('Sublayer.configuration.ai_model = "gpt-4o"') 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/generators/doc_update_generator.rb: -------------------------------------------------------------------------------- 1 | class DocUpdateGenerator < Sublayer::Generators::Base 2 | TEMPLATE_CONTENT = File.read(File.join(__dir__, 'just_the_docs_template.md')) 3 | 4 | llm_output_adapter type: :list_of_named_strings, 5 | name: "files_and_contents", 6 | description: "A list of files to update along with their corresponding updated contents", 7 | item_name: "file_update", 8 | attributes: [ 9 | { name: "explanation", description: "Brief explanation for how a change to a specified file makes progress towards the suggested update." }, 10 | { name: "file_path", description: "The path of the file to update" }, 11 | { name: "file_content", description: "The updated content for the file" } 12 | ] 13 | 14 | def initialize(suggestions:, doc_context:, code_context:, context_ignore_list:) 15 | @suggestions = suggestions 16 | @doc_context = doc_context 17 | @code_context = code_context 18 | @context_ignore_list = context_ignore_list 19 | end 20 | 21 | def generate 22 | super 23 | end 24 | 25 | def prompt 26 | <<~PROMPT 27 | You are tasked to make changes in the documentation repository based on suggestions. 28 | 29 | Use the following information to guide both tasks: 30 | 31 | 1. Code repository structure: 32 | #{@code_context} 33 | 34 | 2. Documentation repository structure: 35 | #{@doc_context} 36 | 37 | 3. Documentation update suggestions: 38 | #{@suggestions} 39 | 40 | 4. Files excluded from updates (do not modify these files): 41 | #{@context_ignore_list} 42 | 43 | 5. Example of doc format: 44 | #{TEMPLATE_CONTENT} 45 | 46 | Guidelines: 47 | 1. Do not make updates to any files excluded from updates 48 | 2. Follow the format given in the example as a template for the structure of your file 49 | 3. If a new page is added make sure to add them to the navigation as well 50 | 3. If a link is added make sure it leads to an existing page, or create the new page being referenced 51 | 52 | Your task: 53 | Generate the full updated content for each file in the documentation repository that should be changed according to the suggestions. 54 | PROMPT 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/sublayer/providers/gemini.rb: -------------------------------------------------------------------------------- 1 | # Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 2 | # Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 3 | 4 | module Sublayer 5 | module Providers 6 | class Gemini 7 | def self.call(prompt:, output_adapter:) 8 | 9 | request_id = SecureRandom.uuid 10 | before_request = Time.now 11 | Sublayer.configuration.logger.log(:info, "Gemini API request", { 12 | model: Sublayer.configuration.ai_model, 13 | prompt: prompt, 14 | request_id: request_id 15 | }) 16 | 17 | response = HTTParty.post( 18 | "https://generativelanguage.googleapis.com/v1beta/models/#{Sublayer.configuration.ai_model}:generateContent?key=#{ENV['GEMINI_API_KEY']}", 19 | body: { 20 | contents: { 21 | role: "user", 22 | parts: { 23 | text: "#{prompt}" 24 | }, 25 | }, 26 | generationConfig: { 27 | responseMimeType: "application/json", 28 | responseSchema: { 29 | type: "OBJECT", 30 | properties: output_adapter.format_properties, 31 | required: output_adapter.format_required 32 | } 33 | } 34 | }.to_json, 35 | headers: { 36 | "Content-Type" => "application/json" 37 | } 38 | ) 39 | 40 | after_request = Time.now 41 | response_time = after_request - before_request 42 | 43 | raise "Error generating with Gemini, error: #{response.body}" unless response.success? 44 | 45 | Sublayer.configuration.logger.log(:info, "Gemini API response", { 46 | request_id: request_id, 47 | response_time: response_time, 48 | usage: { 49 | input_tokens: response["usageMetadata"]["promptTokenCount"], 50 | output_tokens: response["usageMetadata"]["candidatesTokenCount"], 51 | total_tokens: response["usageMetadata"]["totalTokenCount"] 52 | } 53 | }) 54 | 55 | output = response.dig("candidates", 0, "content", "parts", 0, "text") 56 | 57 | parsed_output = JSON.parse(output)[output_adapter.name] 58 | end 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /spec/generators/product_description_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/product_description_generator" 4 | 5 | RSpec.describe ProductDescriptionGenerator do 6 | def generate 7 | described_class.new(product_name: "Super Gadget", product_category: "Electronics").generate 8 | end 9 | 10 | context "OpenAI" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 13 | Sublayer.configuration.ai_model = "gpt-4o" 14 | end 15 | 16 | it "generates an with the correct keys" do 17 | VCR.use_cassette("openai/generators/product_description_generator/super_gadget") do 18 | result = generate 19 | expect(result).to respond_to(:short_description) 20 | expect(result).to respond_to(:long_description) 21 | expect(result).to respond_to(:key_features) 22 | expect(result).to respond_to(:target_audience) 23 | end 24 | end 25 | end 26 | 27 | context "Claude" do 28 | before do 29 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 30 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 31 | end 32 | 33 | it "generates an with the correct keys" do 34 | VCR.use_cassette("claude/generators/product_description_generator/super_gadget") do 35 | result = generate 36 | expect(result).to respond_to(:short_description) 37 | expect(result).to respond_to(:long_description) 38 | expect(result).to respond_to(:key_features) 39 | expect(result).to respond_to(:target_audience) 40 | end 41 | end 42 | 43 | end 44 | 45 | context "Gemini" do 46 | before do 47 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 48 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 49 | end 50 | 51 | it "generates an with the correct keys" do 52 | VCR.use_cassette("gemini/generators/product_description_generator/super_gadget") do 53 | result = generate 54 | expect(result).to respond_to(:short_description) 55 | expect(result).to respond_to(:long_description) 56 | expect(result).to respond_to(:key_features) 57 | expect(result).to respond_to(:target_audience) 58 | end 59 | end 60 | 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /lib/sublayer/cli.rb: -------------------------------------------------------------------------------- 1 | require "thor" 2 | 3 | require "sublayer" 4 | require "sublayer/version" 5 | require "yaml" 6 | require "fileutils" 7 | require "active_support/inflector" 8 | 9 | require_relative "cli/commands/subcommand_base" 10 | require_relative "cli/commands/new_project" 11 | require_relative "cli/commands/generator" 12 | require_relative "cli/commands/agent" 13 | require_relative "cli/commands/action" 14 | require_relative "cli/commands/cli_project" 15 | require_relative "cli/commands/github_action_project" 16 | require_relative "cli/commands/quick_script_project" 17 | 18 | module Sublayer 19 | class CLI < Thor 20 | 21 | register(Sublayer::Commands::NewProject, "new", "new PROJECT_NAME", "Creates a new Sublayer project") 22 | 23 | register(Sublayer::Commands::Generator, "generate:generator", "generate:generator", "Generates a new Sublayer::Generator subclass for your project") 24 | register(Sublayer::Commands::Agent, "generate:agent", "generate:agent", "Generates a new Sublayer::Agent subclass for your project") 25 | register(Sublayer::Commands::Action, "generate:action", "generate:action", "Generates a new Sublayer::Action subclass for your project") 26 | 27 | desc "version", "Prints the Sublayer version" 28 | def version 29 | puts Sublayer::VERSION 30 | end 31 | 32 | desc "help [COMMAND]", "Describe available commands or one specific command" 33 | def help(command = nil, subcommand = false) 34 | if command.nil? 35 | puts "Sublayer CLI" 36 | puts 37 | puts "Usage:" 38 | puts " sublayer COMMAND [OPTIONS]" 39 | puts 40 | puts "Commands:" 41 | print_commands(self.class.commands.reject { |name, _| name == "help" || name == "version" }) 42 | puts 43 | print_commands(self.class.commands.select { |name, _| name == "help" }) 44 | print_commands(self.class.commands.select { |name, _| name == "version" }) 45 | puts 46 | puts "Run 'sublayer COMMAND --help' for more information on a command." 47 | else 48 | super 49 | end 50 | end 51 | 52 | default_command :help 53 | 54 | private 55 | 56 | def print_commands(commands) 57 | commands.each do |name, command| 58 | puts " #{name.ljust(15)} # #{command.description}" 59 | end 60 | end 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /spec/cli/generators/action_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require_relative "../../../lib/sublayer/cli/commands/generators/sublayer_action_generator" 4 | 5 | RSpec.describe SublayerActionGenerator do 6 | def generate(description) 7 | described_class.new(description: description).generate 8 | end 9 | 10 | context "Claude" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 13 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 14 | end 15 | 16 | it "generates the sublayer action code and filename" do 17 | VCR.use_cassette("claude/cli/generators/sublayer_action_generator") do 18 | results = generate("a sublayer action that sends a notification to a particular discord channel") 19 | 20 | expect(results.filename).to be_a(String) 21 | expect(results.filename.length).to be > 0 22 | expect(results.code).to be_a(String) 23 | expect(results.code.length).to be > 0 24 | end 25 | end 26 | end 27 | 28 | context "OpenAI" do 29 | before do 30 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 31 | Sublayer.configuration.ai_model = "gpt-4o-mini" 32 | end 33 | 34 | it "generates the sublayer action code and filename" do 35 | VCR.use_cassette("openai/cli/generators/sublayer_action_generator") do 36 | results = generate("a sublayer action that sends a notification to a particular discord channel") 37 | 38 | expect(results.filename).to be_a(String) 39 | expect(results.filename.length).to be > 0 40 | expect(results.code).to be_a(String) 41 | expect(results.code.length).to be > 0 42 | end 43 | end 44 | end 45 | 46 | context "Gemini" do 47 | before do 48 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 49 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 50 | end 51 | 52 | it "generates the sublayer action code and filename" do 53 | VCR.use_cassette("gemini/cli/generators/sublayer_action_generator") do 54 | results = generate("a sublayer action that sends a notification to a particular discord channel") 55 | 56 | expect(results.filename).to be_a(String) 57 | expect(results.filename.length).to be > 0 58 | expect(results.code).to be_a(String) 59 | expect(results.code.length).to be > 0 60 | end 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/providers/gemini_15_pro.rb: -------------------------------------------------------------------------------- 1 | # Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 2 | # Sublayer.configuration.ai_model = "gemini-1.5-pro-latest" 3 | 4 | module Sublayer 5 | module Providers 6 | class GeminiInternalServiceError < StandardError; end 7 | 8 | class Gemini15Pro 9 | def self.call(prompt:, output_adapter:) 10 | response = HTTParty.post( 11 | "https://generativelanguage.googleapis.com/v1beta/models/#{Sublayer.configuration.ai_model}:generateContent?key=#{ENV['GEMINI_API_KEY']}", 12 | body: { 13 | contents: [ 14 | { 15 | role: "user", 16 | parts: { 17 | text: "#{prompt}\n#{system_prompt}" 18 | } 19 | } 20 | ], 21 | generationConfig: { 22 | responseMimeType: "application/json", 23 | responseSchema: { 24 | type: "OBJECT", 25 | properties: output_adapter.format_properties, 26 | required: output_adapter.format_required 27 | } 28 | } 29 | }.to_json, 30 | headers: { 31 | "Content-Type" => "application/json" 32 | }, 33 | timeout: 600 34 | ) 35 | 36 | raise GeminiInternalServiceError, "Error generating with Gemini, error: #{response.body}" if response.code == 500 37 | 38 | raise "Error generating with Gemini, error: #{response.body}" unless response.success? 39 | 40 | output = response.dig("candidates", 0, "content", "parts", 0, "text") 41 | 42 | parsed_output = JSON.parse(output)[output_adapter.name] 43 | end 44 | 45 | def self.system_prompt 46 | <<-SYSTEM_PROMPT 47 | CRITICAL INSTRUCTIONS: 48 | **Backticks**: Do not use triple backticks in any form. 49 | - Instead of: 50 | ``` 51 | pwd 52 | ``` 53 | ```ruby 54 | puts "hello world" 55 | ``` 56 | ```bash 57 | rails generate new my_app 58 | ``` 59 | - Use: 60 | %%% 61 | pwd 62 | %%% 63 | %%%ruby 64 | puts "hello world" 65 | %%% 66 | %%%bash 67 | rails generate new my_app 68 | %%% 69 | SYSTEM_PROMPT 70 | end 71 | end 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/no_function.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"the_answer","description":"The 9 | answer to the given question","input_schema":{"type":"object","properties":{"the_answer":{"type":"string","description":"The 10 | answer to the given question"}},"required":["the_answer"]}}],"messages":[{"role":"user","content":"What 11 | is the meaning of life, the universe, and everything?"}]}' 12 | headers: 13 | X-Api-Key: 14 | - "" 15 | Anthropic-Version: 16 | - '2023-06-01' 17 | Content-Type: 18 | - application/json 19 | Anthropic-Beta: 20 | - tools-2024-04-04 21 | Accept-Encoding: 22 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 23 | Accept: 24 | - "*/*" 25 | User-Agent: 26 | - Ruby 27 | response: 28 | status: 29 | code: 200 30 | message: OK 31 | headers: 32 | Date: 33 | - Sun, 04 Aug 2024 18:16:36 GMT 34 | Content-Type: 35 | - application/json 36 | Transfer-Encoding: 37 | - chunked 38 | Connection: 39 | - keep-alive 40 | Anthropic-Ratelimit-Requests-Limit: 41 | - '1000' 42 | Anthropic-Ratelimit-Requests-Remaining: 43 | - '999' 44 | Anthropic-Ratelimit-Requests-Reset: 45 | - '2024-08-04T18:17:34Z' 46 | Anthropic-Ratelimit-Tokens-Limit: 47 | - '100000' 48 | Anthropic-Ratelimit-Tokens-Remaining: 49 | - '100000' 50 | Anthropic-Ratelimit-Tokens-Reset: 51 | - '2024-08-04T18:16:36Z' 52 | Request-Id: 53 | - req_01Qf2LEGKN3ULRNVq9sJfb2U 54 | X-Cloud-Trace-Context: 55 | - 2d986f920b449e58f24aa7050bd07001 56 | Via: 57 | - 1.1 google 58 | Cf-Cache-Status: 59 | - DYNAMIC 60 | Server: 61 | - cloudflare 62 | Cf-Ray: 63 | - 8ae08a105a298ccd-EWR 64 | body: 65 | encoding: ASCII-8BIT 66 | string: '{"id":"msg_01JgumH23YHwKfeNL4usg4bv","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"text","text":"the answer to the meaning of life, the universe and everything is 42."}],"stop_reason":"end_turn","stop_sequence":null,"usage":{"input_tokens":353,"output_tokens":191}}' 67 | recorded_at: Sun, 04 Aug 2024 18:16:36 GMT 68 | recorded_with: VCR 6.2.0 69 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/sublayer_action_generator.rb: -------------------------------------------------------------------------------- 1 | class SublayerActionGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :named_strings, 3 | name: "sublayer_action", 4 | description: "The new sublayer action based on the description and supporting information", 5 | attributes: [ 6 | { name: "code", description: "The code of the generated Sublayer action" }, 7 | { name: "filename", description: "The filename of the generated sublayer action snake cased with a .rb extension" } 8 | ] 9 | 10 | def initialize(description:) 11 | @description = description 12 | end 13 | 14 | def generate 15 | super 16 | end 17 | 18 | def prompt 19 | <<-PROMPT 20 | You are an expert ruby programmer and are great at repurposing code examples to use for new situations. 21 | 22 | A Sublayer Action is an example of a command pattern that is used in the Sublayer AI framework to perform actions in the outside world such as manipulating files or making API calls. 23 | 24 | The Sublayer framework also has a component called a Generator that takes data in, sends it to an LLM and gets structured data out. 25 | Sublayer::Actions are used both to retrieve data for use in a generator or perform actions based on the output of the generator. 26 | This is used to both aid in generating new composable bits of functionality and to ease testing. 27 | 28 | A sublayer action is initialized with the data it needs and then exposes a `call` method which is used to perform the action. 29 | 30 | An example of an action being used to save a file to the file system, for example after generating the contents is: 31 | 32 | #{example_filesystem_action} 33 | 34 | 35 | An example action being used to make a call to an external api, such as OpenAI's text to speech API is: 36 | 37 | #{example_api_action} 38 | 39 | 40 | Your task is to generate a new Sublayer::Action::Base subclass that performs an action based on the description provided. 41 | 42 | #{@description} 43 | 44 | PROMPT 45 | end 46 | 47 | private 48 | def example_filesystem_action 49 | File.read(File.join(File.dirname(__FILE__), 'example_action_file_manipulation.rb')) 50 | end 51 | 52 | def example_api_action 53 | File.read(File.join(File.dirname(__FILE__), 'example_action_api_call.rb')) 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /spec/generators/task_steps_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/task_steps_generator" 4 | 5 | RSpec.describe TaskStepsGenerator do 6 | subject { described_class.new(task: "Set up a new Ruby on Rails project") } 7 | 8 | context "OpenAI" do 9 | before do 10 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 11 | Sublayer.configuration.ai_model = "gpt-4o" 12 | end 13 | 14 | it "generates a list of steps with descriptions and commands" do 15 | VCR.use_cassette("openai/generators/task_steps_generator/ruby_on_rails_project") do 16 | steps = subject.generate 17 | 18 | expect(steps).to be_an(Array) 19 | expect(steps.size).to be > 0 20 | expect(steps.first).to respond_to(:description) 21 | expect(steps.first).to respond_to(:command) 22 | expect(steps.first.description).to be_a(String) 23 | expect(steps.first.command).to be_a(String) 24 | end 25 | end 26 | end 27 | 28 | context "Claude" do 29 | before do 30 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 31 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 32 | end 33 | 34 | it "generates a list of steps with descriptions and commands" do 35 | VCR.use_cassette("claude/generators/task_steps_generator/ruby_on_rails_project") do 36 | steps = subject.generate 37 | 38 | expect(steps).to be_an(Array) 39 | expect(steps.size).to be > 0 40 | expect(steps.first).to respond_to(:description) 41 | expect(steps.first).to respond_to(:command) 42 | expect(steps.first.description).to be_a(String) 43 | expect(steps.first.command).to be_a(String) 44 | end 45 | end 46 | end 47 | 48 | context "Gemini" do 49 | before do 50 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 51 | Sublayer.configuration.ai_model = "gemini-1.5-pro-latest" 52 | end 53 | 54 | it "generates a list of steps with descriptions and commands" do 55 | VCR.use_cassette("gemini/generators/task_steps_generator/ruby_on_rails_project") do 56 | steps = subject.generate 57 | 58 | expect(steps).to be_an(Array) 59 | expect(steps.size).to be > 0 60 | expect(steps.first).to respond_to(:description) 61 | expect(steps.first).to respond_to(:command) 62 | expect(steps.first.description).to be_a(String) 63 | expect(steps.first.command).to be_a(String) 64 | end 65 | end 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /spec/cli/generators/generator_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require_relative "../../../lib/sublayer/cli/commands/generators/sublayer_generator_generator" 4 | 5 | RSpec.describe SublayerGeneratorGenerator do 6 | def generate(description) 7 | described_class.new(description: description).generate 8 | end 9 | 10 | context "Claude" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 13 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 14 | end 15 | 16 | it "generates the Sublayer Generator code and filename" do 17 | VCR.use_cassette("claude/cli/generators/sublayer_generator_generator") do 18 | results = generate("a sublayer generator that takes in code and converts it to the users requested programming language") 19 | 20 | expect(results.filename).to be_a(String) 21 | expect(results.filename.length).to be > 0 22 | expect(results.code).to be_a(String) 23 | expect(results.code.length).to be > 0 24 | end 25 | end 26 | end 27 | 28 | context "OpenAI" do 29 | before do 30 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 31 | Sublayer.configuration.ai_model = "gpt-4o-mini" 32 | end 33 | 34 | it "generates the Sublayer Generator code and filename" do 35 | VCR.use_cassette("openai/cli/generators/sublayer_generator_generator") do 36 | results = generate("a sublayer generator that takes in code and converts it to the users requested programming language") 37 | 38 | expect(results.filename).to be_a(String) 39 | expect(results.filename.length).to be > 0 40 | expect(results.code).to be_a(String) 41 | expect(results.code.length).to be > 0 42 | end 43 | end 44 | 45 | end 46 | 47 | context "Gemini" do 48 | before do 49 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 50 | Sublayer.configuration.ai_model = "gemini-1.5-pro-latest" 51 | end 52 | 53 | it "generates the Sublayer Generator code and filename" do 54 | VCR.use_cassette("gemini/cli/generators/sublayer_generator_generator") do 55 | results = generate("a sublayer generator that takes in code and converts it to the users requested programming language") 56 | 57 | expect(results.filename).to be_a(String) 58 | expect(results.filename.length).to be > 0 59 | expect(results.code).to be_a(String) 60 | expect(results.code.length).to be > 0 61 | end 62 | end 63 | 64 | end 65 | end 66 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/github_action_project.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Commands 3 | class GithubActionProject < Thor::Group 4 | attr_reader :ai_provider_key 5 | 6 | include Thor::Actions 7 | 8 | argument :project_name 9 | 10 | class_option :provider, type: :string, desc: "AI provider (OpenAI, Claude, or Gemini)", aliases: :p 11 | class_option :model, type: :string, desc: "AI model name to use (e.g. gpt-4o, claude-3-haiku-20240307, gemini-1.5-flash-latest)", aliases: :m 12 | 13 | def self.source_root 14 | File.dirname(__FILE__) 15 | end 16 | 17 | def sublayer_version 18 | Sublayer::VERSION 19 | end 20 | 21 | def ask_for_project_details 22 | @ai_provider = options[:provider] || ask("Select an AI provider:", default: "OpenAI", limited_to: %w[OpenAI Claude Gemini]) 23 | @ai_model = options[:model] || select_ai_model 24 | end 25 | 26 | def create_project_directory 27 | say "Creating project directory", :green 28 | 29 | empty_directory ".github/workflows" 30 | end 31 | 32 | def copy_template_files 33 | say "Copying template files", :green 34 | 35 | directory "../templates/github_action", ".github/workflows" 36 | end 37 | 38 | 39 | def generate_configuration 40 | 41 | end 42 | 43 | def finalize_project 44 | 45 | end 46 | 47 | def print_next_steps 48 | say "\nSublayer Github Action Project '#{project_name}' created successfully!", :green 49 | say "To get started: " 50 | say "Create some Sublayer Actions and Sublayer generators within '.github/workflows/#{project_name}/{actions/,generators/}'" 51 | say "And edit the file at '.github/workflows/#{project_name}.yml' to set up what you want the action triggered by" 52 | end 53 | 54 | private 55 | def select_ai_model 56 | case @ai_provider 57 | when "OpenAI" 58 | @ai_provider_key = "OPENAI_API_KEY" 59 | ask("Which OpenAI model would you like to use?", default: "gpt-4o", limited_to: %w[gpt-4o gpt-4o-mini gpt-4-turbo gpt-3.5-turbo]) 60 | when "Claude" 61 | @ai_provider_key = "ANTHROPIC_API_KEY" 62 | ask("Which Anthropic model would you like to use?", default: "claude-3-5-sonnet-20240620", limited_to: %w[claude-3-5-sonnet-20240620 claude-3-opus-20240620 claude-3-haiku-20240307]) 63 | when "Gemini" 64 | @ai_provider_key = "GEMINI_API_KEY" 65 | ask("Which Google model would you like to use?", default: "gemini-1.5-flash-latest", limited_to: %w[gemini-1.5-flash-latest gemini-1.5-pro-latest]) 66 | end 67 | end 68 | end 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /spec/generators/code_from_description_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/code_from_description_generator" 4 | 5 | RSpec.describe CodeFromDescriptionGenerator do 6 | def generate(description:, technologies: ["ruby"]) 7 | described_class.new( 8 | description: description, 9 | technologies: technologies 10 | ).generate 11 | end 12 | 13 | context "claude" do 14 | before do 15 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 16 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 17 | end 18 | 19 | it "generates code from description" do 20 | VCR.use_cassette("claude/generators/code_from_description_generator/hello_world") do 21 | code = generate(description: "a hello world app where I pass --who argument to set the 'world' value using optparser") 22 | expect(code.strip).to include("require 'optparse'") 23 | expect(code.strip).to include("OptionParser.new") 24 | expect(code.strip).to include("puts \"Hello, \#{") 25 | end 26 | end 27 | end 28 | 29 | context "openai" do 30 | before do 31 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 32 | Sublayer.configuration.ai_model = "gpt-4-turbo" 33 | end 34 | 35 | it "generates code from description" do 36 | VCR.use_cassette("openai/generators/code_from_description_generator/hello_world") do 37 | code = generate(description: "a hello world app where I pass --who argument to set the 'world' value using optparser") 38 | expect(code.strip).to eq %q(require 'optparse' 39 | 40 | # Define the options 41 | options = {} 42 | OptionParser.new do |parser| 43 | parser.banner = "Usage: example.rb [options]" 44 | 45 | parser.on("-w", "--who WHO", "Who to greet") do |v| 46 | options[:who] = v 47 | end 48 | end.parse! 49 | 50 | # Greeting 51 | who_to_greet = options[:who] || "World" 52 | puts "Hello, #{who_to_greet}!") 53 | end 54 | end 55 | 56 | end 57 | 58 | context "Gemini" do 59 | before do 60 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 61 | Sublayer.configuration.ai_model = "gemini-1.5-flash-latest" 62 | end 63 | 64 | it "generates code from description" do 65 | VCR.use_cassette("gemini/generators/code_from_description_generator/hello_world") do 66 | code = generate(description: "a hello world app where I pass --who argument to set the 'world' value using optparser") 67 | expect(code.strip).to include("require 'optparse'") 68 | expect(code.strip).to include("OptionParser.new") 69 | expect(code.strip).to include("puts \"Hello, \#{") 70 | end 71 | end 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/invalid_to_valid_json_generator/valid_json.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"valid_json","description":"The 9 | valid JSON string","input_schema":{"type":"object","properties":{"valid_json":{"type":"string","description":"The 10 | valid JSON string"}},"required":["valid_json"]}}],"tool_choice":{"type":"tool","name":"valid_json"},"messages":[{"role":"user","content":" You 11 | are an expert in JSON parsing.\n\n The given string is not a valid JSON: 12 | {\"valid\": \"json\"}\n\n Please fix this and produce a valid JSON.\n"}]}' 13 | headers: 14 | X-Api-Key: 15 | - "" 16 | Anthropic-Version: 17 | - '2023-06-01' 18 | Content-Type: 19 | - application/json 20 | Anthropic-Beta: 21 | - tools-2024-04-04 22 | Accept-Encoding: 23 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 24 | Accept: 25 | - "*/*" 26 | User-Agent: 27 | - Ruby 28 | response: 29 | status: 30 | code: 200 31 | message: OK 32 | headers: 33 | Date: 34 | - Sun, 04 Aug 2024 19:33:12 GMT 35 | Content-Type: 36 | - application/json 37 | Transfer-Encoding: 38 | - chunked 39 | Connection: 40 | - keep-alive 41 | Anthropic-Ratelimit-Requests-Limit: 42 | - '1000' 43 | Anthropic-Ratelimit-Requests-Remaining: 44 | - '999' 45 | Anthropic-Ratelimit-Requests-Reset: 46 | - '2024-08-04T19:33:34Z' 47 | Anthropic-Ratelimit-Tokens-Limit: 48 | - '100000' 49 | Anthropic-Ratelimit-Tokens-Remaining: 50 | - '100000' 51 | Anthropic-Ratelimit-Tokens-Reset: 52 | - '2024-08-04T19:33:12Z' 53 | Request-Id: 54 | - req_016U8jpXTmyyXze34czHkBwh 55 | X-Cloud-Trace-Context: 56 | - ae7566ad6f46d8aa83f5ceb769697dfe 57 | Via: 58 | - 1.1 google 59 | Cf-Cache-Status: 60 | - DYNAMIC 61 | Server: 62 | - cloudflare 63 | Cf-Ray: 64 | - 8ae0fa4faa669e17-EWR 65 | body: 66 | encoding: ASCII-8BIT 67 | string: '{"id":"msg_015xcKWYvcWkYzKz7Wab9beq","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01JiTBA6AVvFCA2aKYps2EKw","name":"valid_json","input":{"valid_json":"{\"valid\": 68 | \"json\"}"}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":469,"output_tokens":40}}' 69 | recorded_at: Sun, 04 Aug 2024 19:33:12 GMT 70 | recorded_with: VCR 6.2.0 71 | -------------------------------------------------------------------------------- /spec/integration/cli_project_creation_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "fileutils" 3 | require "open3" 4 | require "tmpdir" 5 | 6 | TMP_DIR = ENV['RUNNER_TEMP'] || Dir.tmpdir 7 | 8 | RSpec.describe "CLI Project Creation" do 9 | 10 | let(:project_name) { "test_project" } 11 | let(:project_path) { File.join(TMP_DIR, project_name) } 12 | 13 | before(:all) do 14 | FileUtils.mkdir_p(TMP_DIR) 15 | end 16 | 17 | after(:all) do 18 | FileUtils.rm_rf(TMP_DIR) 19 | end 20 | 21 | after(:each) do 22 | FileUtils.rm_rf(project_path) 23 | end 24 | 25 | it "creates a new project with all the expected files and structures" do 26 | command = "ruby -I lib #{File.dirname(__FILE__)}/../../bin/sublayer new #{project_name}" 27 | input = "CLI\nOpenAI\ngpt-4o\nn\n\n" 28 | 29 | output, status = Open3.capture2e(command, chdir: TMP_DIR, stdin_data: input) 30 | 31 | expect(status.success?).to be true 32 | expect(output).to include("Sublayer project '#{project_name}' created successfully!") 33 | 34 | expect(Dir.exist?(project_path)).to be true 35 | 36 | %w[bin lib spec log].each do |dir| 37 | expect(Dir.exist?(File.join(project_path, dir))).to be true 38 | end 39 | 40 | %w[ 41 | bin/test_project 42 | lib/test_project.rb 43 | lib/test_project/version.rb 44 | lib/test_project/cli.rb 45 | lib/test_project/config.rb 46 | lib/test_project/commands/example_command.rb 47 | lib/test_project/commands/base_command.rb 48 | lib/test_project/actions/example_action.rb 49 | lib/test_project/agents/example_agent.rb 50 | lib/test_project/generators/example_generator.rb 51 | Gemfile 52 | test_project.gemspec 53 | README.md 54 | ].each do |file| 55 | expect(File.exist?(File.join(project_path, file))).to be true 56 | end 57 | end 58 | 59 | it "properly sets config values for ai_provider and ai_model" do 60 | command = "ruby -I lib #{File.dirname(__FILE__)}/../../bin/sublayer new #{project_name}" 61 | input = "CLI\nOpenAI\ngpt-4o\nn\n\n" 62 | 63 | output, status = Open3.capture2e(command, chdir: TMP_DIR, stdin_data: input) 64 | expect(status.success?).to be true 65 | 66 | # Check the config file was created 67 | config_file_path = File.join(project_path, "lib", project_name, "config", "sublayer.yml") 68 | expect(File.exist?(config_file_path)).to be true 69 | 70 | # Parse the YAML config and check values 71 | config = YAML.load_file(config_file_path) 72 | expect(config[:ai_provider]).to eq("OpenAI") 73 | expect(config[:ai_model]).to eq("gpt-4o") 74 | expect(config[:project_name]).to eq(project_name) 75 | expect(config[:project_template]).to eq("CLI") 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/invalid_to_valid_json_generator/invalid_json.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"valid_json","description":"The 9 | valid JSON string","input_schema":{"type":"object","properties":{"valid_json":{"type":"string","description":"The 10 | valid JSON string"}},"required":["valid_json"]}}],"tool_choice":{"type":"tool","name":"valid_json"},"messages":[{"role":"user","content":" You 11 | are an expert in JSON parsing.\n\n The given string is not a valid JSON: 12 | {invalid: \"json\"}\n\n Please fix this and produce a valid JSON.\n"}]}' 13 | headers: 14 | X-Api-Key: 15 | - "" 16 | Anthropic-Version: 17 | - '2023-06-01' 18 | Content-Type: 19 | - application/json 20 | Anthropic-Beta: 21 | - tools-2024-04-04 22 | Accept-Encoding: 23 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 24 | Accept: 25 | - "*/*" 26 | User-Agent: 27 | - Ruby 28 | response: 29 | status: 30 | code: 200 31 | message: OK 32 | headers: 33 | Date: 34 | - Sun, 04 Aug 2024 19:34:48 GMT 35 | Content-Type: 36 | - application/json 37 | Transfer-Encoding: 38 | - chunked 39 | Connection: 40 | - keep-alive 41 | Anthropic-Ratelimit-Requests-Limit: 42 | - '1000' 43 | Anthropic-Ratelimit-Requests-Remaining: 44 | - '999' 45 | Anthropic-Ratelimit-Requests-Reset: 46 | - '2024-08-04T19:35:34Z' 47 | Anthropic-Ratelimit-Tokens-Limit: 48 | - '100000' 49 | Anthropic-Ratelimit-Tokens-Remaining: 50 | - '100000' 51 | Anthropic-Ratelimit-Tokens-Reset: 52 | - '2024-08-04T19:34:48Z' 53 | Request-Id: 54 | - req_019ew67UySmbMxaVTyQh71kM 55 | X-Cloud-Trace-Context: 56 | - db7d396f7d49a3013a9aa0e41045b9ee 57 | Via: 58 | - 1.1 google 59 | Cf-Cache-Status: 60 | - DYNAMIC 61 | Server: 62 | - cloudflare 63 | Cf-Ray: 64 | - 8ae0fca66b82182d-EWR 65 | body: 66 | encoding: ASCII-8BIT 67 | string: '{"id":"msg_01DjjkAmRYhxKdJzbzo1MKjx","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01PQJ4Qvu2YG2UnhcG9gbtyx","name":"valid_json","input":{"valid_json":"{\n \"invalid\": 68 | \"json\"\n}"}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":469,"output_tokens":44}}' 69 | recorded_at: Sun, 04 Aug 2024 19:34:48 GMT 70 | recorded_with: VCR 6.2.0 71 | -------------------------------------------------------------------------------- /spec/integration/github_action_creation_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "fileutils" 3 | require "open3" 4 | require "tmpdir" 5 | 6 | TMP_DIR = ENV['RUNNER_TEMP'] || Dir.tmpdir 7 | 8 | RSpec.describe "Github Action Project Creation" do 9 | let(:project_name) { "test_action" } 10 | let(:project_path) { File.join(TMP_DIR, ".github", "workflows") } 11 | 12 | before(:all) do 13 | FileUtils.mkdir_p(TMP_DIR) 14 | end 15 | 16 | after(:all) do 17 | FileUtils.rm_rf(TMP_DIR) 18 | end 19 | 20 | after(:each) do 21 | FileUtils.rm_rf(project_path) 22 | end 23 | 24 | it "creates a new github action with all the expected files and structures" do 25 | command = "ruby -I lib #{File.dirname(__FILE__)}/../../bin/sublayer new #{project_name}" 26 | input = "GithubAction\nOpenAI\ngpt-4o\nn\n\n" 27 | 28 | output, status = Open3.capture2e(command, chdir: TMP_DIR, stdin_data: input) 29 | 30 | expect(status.success?).to be true 31 | expect(output).to include("Sublayer Github Action Project '#{project_name}' created successfully!") 32 | expect(Dir.exist?(project_path)).to be true 33 | 34 | [ 35 | project_name, 36 | "#{project_name}/actions", 37 | "#{project_name}/generators", 38 | "#{project_name}/agents" 39 | ].each do |dir| 40 | expect(Dir.exist?(File.join(project_path, dir))).to be true 41 | end 42 | 43 | [ 44 | "#{project_name}.yml", 45 | "#{project_name}/#{project_name}.rb", 46 | ].each do |file| 47 | expect(File.exist?(File.join(project_path, file))).to be true 48 | end 49 | end 50 | 51 | it "correctly configures AI provider and model in the generated files" do 52 | command = "ruby -I lib #{File.dirname(__FILE__)}/../../bin/sublayer new #{project_name}" 53 | input = "GithubAction\nOpenAI\ngpt-4o\nn\n\n" 54 | 55 | output, status = Open3.capture2e(command, chdir: TMP_DIR, stdin_data: input) 56 | expect(status.success?).to be true 57 | 58 | # Check the Ruby file has the correct AI configuration 59 | rb_file_path = File.join(project_path, project_name, "#{project_name}.rb") 60 | expect(File.exist?(rb_file_path)).to be true 61 | 62 | file_content = File.read(rb_file_path) 63 | expect(file_content).to include('Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI') 64 | expect(file_content).to include('Sublayer.configuration.ai_model = "gpt-4o"') 65 | 66 | # Check the YAML file has the correct API key reference 67 | yml_file_path = File.join(project_path, "#{project_name}.yml") 68 | expect(File.exist?(yml_file_path)).to be true 69 | 70 | yml_content = File.read(yml_file_path) 71 | expect(yml_content).to include('OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}') 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /.github/workflows/github_actions/generators/doc_update_suggestion_generator.rb: -------------------------------------------------------------------------------- 1 | class DocUpdateSuggestionGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :list_of_named_strings, 3 | name: "doc_update_suggestions", 4 | description: "List of doc update suggestions with usefulness scores", 5 | item_name: "suggestion", 6 | attributes: [ 7 | { name: "suggestion", description: "description of the doc update suggestion and its reasoning" }, 8 | { name: "file_changes", description: "description of the files and their respective changes" }, 9 | { name: "usefulness_score", description: "A score from 1-10 indicating the usefulness of the suggestion" }, #unused 10 | { name: "title", description: "doc update suggestion title" } #unused 11 | ] 12 | 13 | def initialize(code_context:, doc_context:, context_ignore_list:, diff:) 14 | @code_context = code_context 15 | @doc_context = doc_context 16 | @context_ignore_list = context_ignore_list 17 | @diff = diff 18 | end 19 | 20 | def generate 21 | super 22 | end 23 | 24 | def prompt 25 | <<~PROMPT 26 | As an expert in documentation with a focus on concise and hierarchical organization. Consider the following: 27 | 28 | 1. Newest changes: 29 | #{@diff} 30 | 31 | 1. Documentation repository context: 32 | #{@doc_context} 33 | 34 | 2. Code repository context: 35 | #{@code_context} 36 | 37 | 3. Files excluded from updates (do not modify these files): 38 | #{@context_ignore_list} 39 | 40 | You are tasked with generating detailed and specific suggestions for updating a documentation repository based on the newest changes to the code repository. 41 | 42 | Generate documentation update suggestions, considering: 43 | 1. The appropriate level in the documentation hierarchy for each change (high-level concepts vs. specific details) 44 | 2. The impact of the changes on existing documentation (updates, additions, or removals) 45 | 3. The importance of each change for user understanding and API use 46 | 4. The need for examples or clarifications of new or modified functionality 47 | 48 | For each suggestion 49 | - Describe the suggestion and the reasoning behind it. Be specific. 50 | - Meticulously describe the files and the changes that should be made in them. 51 | - Indicate its usefulness, 10 being most useful and 1 being least, as a way of prioritizing which suggestion should be done first 52 | - A succinct title that encapsulates the spirit of the suggestion 53 | 54 | Guidelines: 55 | 1. Do not suggest changes to any files excluded from updates 56 | 2. Make the fewest number of suggestions possible to achieve the desired outcome 57 | PROMPT 58 | end 59 | end -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/max_tokens.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"the_answer","description":"The 9 | answer to the given question","input_schema":{"type":"object","properties":{"the_answer":{"type":"string","description":"The 10 | answer to the given question"}},"required":["the_answer"]}}],"tool_choice":{"type":"tool","name":"the_answer"},"messages":[{"role":"user","content":"What 11 | is the meaning of life, the universe, and everything?"}]}' 12 | headers: 13 | X-Api-Key: 14 | - "" 15 | Anthropic-Version: 16 | - '2023-06-01' 17 | Content-Type: 18 | - application/json 19 | Anthropic-Beta: 20 | - tools-2024-04-04 21 | Accept-Encoding: 22 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 23 | Accept: 24 | - "*/*" 25 | User-Agent: 26 | - Ruby 27 | response: 28 | status: 29 | code: 200 30 | message: OK 31 | headers: 32 | Date: 33 | - Mon, 12 Aug 2024 21:20:20 GMT 34 | Content-Type: 35 | - application/json 36 | Transfer-Encoding: 37 | - chunked 38 | Connection: 39 | - keep-alive 40 | Anthropic-Ratelimit-Requests-Limit: 41 | - '1000' 42 | Anthropic-Ratelimit-Requests-Remaining: 43 | - '999' 44 | Anthropic-Ratelimit-Requests-Reset: 45 | - '2024-08-12T21:20:34Z' 46 | Anthropic-Ratelimit-Tokens-Limit: 47 | - '100000' 48 | Anthropic-Ratelimit-Tokens-Remaining: 49 | - '100000' 50 | Anthropic-Ratelimit-Tokens-Reset: 51 | - '2024-08-12T21:20:20Z' 52 | Request-Id: 53 | - req_01DDrCniEL4s6oyNX3tEbx4e 54 | X-Cloud-Trace-Context: 55 | - 52d47d3738be9c40c4f88ba1f02fad7b 56 | Via: 57 | - 1.1 google 58 | Cf-Cache-Status: 59 | - DYNAMIC 60 | X-Robots-Tag: 61 | - none 62 | Server: 63 | - cloudflare 64 | Cf-Ray: 65 | - 8b23823a5ee50c76-EWR 66 | body: 67 | encoding: ASCII-8BIT 68 | string: '{"id":"msg_01DgBXa2zSaR4DYk2hzAE93y","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01HeXRSdR7PmciZvpHs1n91s","name":"the_answer","input":{"the_answer":"According 69 | to the classic novel The Hitchhiker''s Guide to the Galaxy by Douglas Adams, 70 | the answer to \"the meaning of life, the universe, and everything\" is 42."}}],"stop_reason":"max_tokens","stop_sequence":null,"usage":{"input_tokens":449,"output_tokens":71}}' 71 | recorded_at: Mon, 12 Aug 2024 21:20:20 GMT 72 | recorded_with: VCR 6.2.0 73 | -------------------------------------------------------------------------------- /lib/sublayer/providers/open_ai.rb: -------------------------------------------------------------------------------- 1 | # Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 2 | # Sublayer.configuration.ai_model = "gpt-4o" 3 | 4 | module Sublayer 5 | module Providers 6 | class OpenAI 7 | def self.call(prompt:, output_adapter:) 8 | client = ::OpenAI::Client.new(access_token: ENV.fetch("OPENAI_API_KEY")) 9 | 10 | request_id = SecureRandom.uuid 11 | 12 | Sublayer.configuration.logger.log(:info, "OpenAI API request", { 13 | model: Sublayer.configuration.ai_model, 14 | prompt: prompt, 15 | request_id: request_id, 16 | }) 17 | 18 | before_request = Time.now 19 | 20 | response = client.chat( 21 | parameters: { 22 | model: Sublayer.configuration.ai_model, 23 | messages: [ 24 | { 25 | "role": "user", 26 | "content": prompt 27 | } 28 | ], 29 | tool_choice: { type: "function", function: { name: output_adapter.name }}, 30 | tools: [ 31 | { 32 | type: "function", 33 | function: { 34 | name: output_adapter.name, 35 | description: output_adapter.description, 36 | parameters: { 37 | type: "object", 38 | properties: output_adapter.format_properties 39 | }, 40 | required: output_adapter.format_required 41 | } 42 | } 43 | ] 44 | }) 45 | 46 | after_request = Time.now 47 | response_time = after_request - before_request 48 | 49 | Sublayer.configuration.logger.log(:info, "OpenAI API response", { 50 | request_id: request_id, 51 | response_time: response_time, 52 | usage: { 53 | input_tokens: response["usage"]["prompt_tokens"], 54 | output_tokens: response["usage"]["completion_tokens"], 55 | total_tokens: response["usage"]["total_tokens"] 56 | } 57 | }) 58 | 59 | message = response.dig("choices", 0, "message") 60 | 61 | raise "No function called" unless message["tool_calls"] 62 | 63 | function_body = message.dig("tool_calls", 0, "function", "arguments") 64 | 65 | raise "Error generating with OpenAI. Empty response. Try rewording your output adapter params to be from the perspective of the model. Full Response: #{response}" if function_body == "{}" 66 | raise "Error generating with OpenAI. Error: Max tokens exceeded. Try breaking your problem up into smaller pieces." if response["choices"][0]["finish_reason"] == "length" 67 | 68 | results = JSON.parse(function_body)[output_adapter.name] 69 | end 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/four_digit_passcode_generator/find_number.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"four_digit_passcode","description":"an 9 | uncommon and difficult to guess four digit passcode","input_schema":{"type":"object","properties":{"four_digit_passcode":{"type":"integer","description":"an 10 | uncommon and difficult to guess four digit passcode"}},"required":["four_digit_passcode"]}}],"tool_choice":{"type":"tool","name":"four_digit_passcode"},"messages":[{"role":"user","content":" You 11 | are an expert of common four digit passcodes\n\n Provide a four digit passcode 12 | that is uncommon and hard to guess\n"}]}' 13 | headers: 14 | X-Api-Key: 15 | - "" 16 | Anthropic-Version: 17 | - '2023-06-01' 18 | Content-Type: 19 | - application/json 20 | Anthropic-Beta: 21 | - tools-2024-04-04 22 | Accept-Encoding: 23 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 24 | Accept: 25 | - "*/*" 26 | User-Agent: 27 | - Ruby 28 | response: 29 | status: 30 | code: 200 31 | message: OK 32 | headers: 33 | Date: 34 | - Mon, 19 Aug 2024 02:26:09 GMT 35 | Content-Type: 36 | - application/json 37 | Transfer-Encoding: 38 | - chunked 39 | Connection: 40 | - keep-alive 41 | Anthropic-Ratelimit-Requests-Limit: 42 | - '1000' 43 | Anthropic-Ratelimit-Requests-Remaining: 44 | - '999' 45 | Anthropic-Ratelimit-Requests-Reset: 46 | - '2024-08-19T02:26:34Z' 47 | Anthropic-Ratelimit-Tokens-Limit: 48 | - '100000' 49 | Anthropic-Ratelimit-Tokens-Remaining: 50 | - '100000' 51 | Anthropic-Ratelimit-Tokens-Reset: 52 | - '2024-08-19T02:26:09Z' 53 | Request-Id: 54 | - req_01Xfgqydteeh1AzezsZkYJcQ 55 | X-Cloud-Trace-Context: 56 | - ca5cdb3d740ed1f8f159f483699dc98d 57 | Via: 58 | - 1.1 google 59 | Cf-Cache-Status: 60 | - DYNAMIC 61 | X-Robots-Tag: 62 | - none 63 | Server: 64 | - cloudflare 65 | Cf-Ray: 66 | - 8b56b2725850aa77-ICN 67 | body: 68 | encoding: ASCII-8BIT 69 | string: '{"id":"msg_01RKpJ671gEykktxdUoDFH5o","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01CvHK1cqHPFTp9EVo14LxZK","name":"four_digit_passcode","input":{"four_digit_passcode":5287}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":487,"output_tokens":39}}' 70 | recorded_at: Mon, 19 Aug 2024 02:26:09 GMT 71 | recorded_with: VCR 6.2.0 72 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/sentiment_from_text_generator/positive.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"sentiment_value","description":"A 9 | sentiment value from the list","input_schema":{"type":"object","properties":{"sentiment_value":{"type":"string","description":"A 10 | sentiment value from the list","enum":["positive","negative","neutral"]}},"required":["sentiment_value"]}}],"tool_choice":{"type":"tool","name":"sentiment_value"},"messages":[{"role":"user","content":" You 11 | are an expert at determining sentiment from text.\n\n You are tasked 12 | with analyzing the following text and determining its sentiment value.\n\n The 13 | text is:\n Matz is nice so we are nice\n"}]}' 14 | headers: 15 | X-Api-Key: 16 | - "" 17 | Anthropic-Version: 18 | - '2023-06-01' 19 | Content-Type: 20 | - application/json 21 | Anthropic-Beta: 22 | - tools-2024-04-04 23 | Accept-Encoding: 24 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 25 | Accept: 26 | - "*/*" 27 | User-Agent: 28 | - Ruby 29 | response: 30 | status: 31 | code: 200 32 | message: OK 33 | headers: 34 | Date: 35 | - Sun, 04 Aug 2024 19:33:12 GMT 36 | Content-Type: 37 | - application/json 38 | Transfer-Encoding: 39 | - chunked 40 | Connection: 41 | - keep-alive 42 | Anthropic-Ratelimit-Requests-Limit: 43 | - '1000' 44 | Anthropic-Ratelimit-Requests-Remaining: 45 | - '999' 46 | Anthropic-Ratelimit-Requests-Reset: 47 | - '2024-08-04T19:33:34Z' 48 | Anthropic-Ratelimit-Tokens-Limit: 49 | - '100000' 50 | Anthropic-Ratelimit-Tokens-Remaining: 51 | - '100000' 52 | Anthropic-Ratelimit-Tokens-Reset: 53 | - '2024-08-04T19:33:12Z' 54 | Request-Id: 55 | - req_012Dd8vLjHzN5SscB6PCGude 56 | X-Cloud-Trace-Context: 57 | - fb56c4a63b941b7d499afd552c933e7e 58 | Via: 59 | - 1.1 google 60 | Cf-Cache-Status: 61 | - DYNAMIC 62 | Server: 63 | - cloudflare 64 | Cf-Ray: 65 | - 8ae0fa4b3f360c8a-EWR 66 | body: 67 | encoding: ASCII-8BIT 68 | string: '{"id":"msg_016mrB54uiPEAwMBy9CF8d1k","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01FFcNKh8AdZe1pZhbMV9qXw","name":"sentiment_value","input":{"sentiment_value":"positive"}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":493,"output_tokens":35}}' 69 | recorded_at: Sun, 04 Aug 2024 19:33:12 GMT 70 | recorded_with: VCR 6.2.0 71 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/route_selection_from_user_intent_generator/route.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"route","description":"A 9 | route selected from the list","input_schema":{"type":"object","properties":{"route":{"type":"string","description":"A 10 | route selected from the list","enum":["GET /","GET /users","GET /users/:id","POST 11 | /users","PUT /users/:id","DELETE /users/:id"]}},"required":["route"]}}],"tool_choice":{"type":"tool","name":"route"},"messages":[{"role":"user","content":" You 12 | are skilled at selecting routes based on user intent.\n\n Your task 13 | is to choose a route based on the following intent:\n\n The user''s 14 | intent is:\n I want to get all the users\n"}]}' 15 | headers: 16 | X-Api-Key: 17 | - "" 18 | Anthropic-Version: 19 | - '2023-06-01' 20 | Content-Type: 21 | - application/json 22 | Anthropic-Beta: 23 | - tools-2024-04-04 24 | Accept-Encoding: 25 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 26 | Accept: 27 | - "*/*" 28 | User-Agent: 29 | - Ruby 30 | response: 31 | status: 32 | code: 200 33 | message: OK 34 | headers: 35 | Date: 36 | - Sun, 04 Aug 2024 19:33:11 GMT 37 | Content-Type: 38 | - application/json 39 | Transfer-Encoding: 40 | - chunked 41 | Connection: 42 | - keep-alive 43 | Anthropic-Ratelimit-Requests-Limit: 44 | - '1000' 45 | Anthropic-Ratelimit-Requests-Remaining: 46 | - '999' 47 | Anthropic-Ratelimit-Requests-Reset: 48 | - '2024-08-04T19:33:34Z' 49 | Anthropic-Ratelimit-Tokens-Limit: 50 | - '100000' 51 | Anthropic-Ratelimit-Tokens-Remaining: 52 | - '100000' 53 | Anthropic-Ratelimit-Tokens-Reset: 54 | - '2024-08-04T19:33:11Z' 55 | Request-Id: 56 | - req_01EThhttprxZh9WPigwWig7g 57 | X-Cloud-Trace-Context: 58 | - 766a22ee2c95f66d863210e312996731 59 | Via: 60 | - 1.1 google 61 | Cf-Cache-Status: 62 | - DYNAMIC 63 | Server: 64 | - cloudflare 65 | Cf-Ray: 66 | - 8ae0fa46d9888c0f-EWR 67 | body: 68 | encoding: ASCII-8BIT 69 | string: '{"id":"msg_011Dby6895APkMh2kQcS631X","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01Trx3ZKz59u8ufpDPc2GbvG","name":"route","input":{"route":"GET 70 | /users"}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":510,"output_tokens":35}}' 71 | recorded_at: Sun, 04 Aug 2024 19:33:11 GMT 72 | recorded_with: VCR 6.2.0 73 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/action.rb: -------------------------------------------------------------------------------- 1 | require_relative "./generators/sublayer_action_generator" 2 | 3 | module Sublayer 4 | module Commands 5 | class Action < Thor::Group 6 | include Thor::Actions 7 | 8 | class_option :description, type: :string, desc: "Description of the action you want to generate", aliases: :d 9 | class_option :provider, type: :string, desc: "AI provider (OpenAI, Claude, or Gemini)", aliases: :p 10 | class_option :model, type: :string, desc: "AI model name to use (e.g. gpt-4o, claude-3-haiku-20240307, gemini-1.5-flash-latest)", aliases: :m 11 | 12 | def self.banner 13 | "sublayer generate:action" 14 | end 15 | 16 | def confirm_usage_of_ai_api 17 | puts "You are about to generate a new agent that uses an AI API to generate content." 18 | puts "Please ensure you have the necessary API keys and that you are aware of the costs associated with using the API." 19 | exit unless yes?("Do you want to continue?") 20 | end 21 | 22 | def determine_available_providers 23 | @available_providers = [] 24 | 25 | @available_providers << "OpenAI" if ENV["OPENAI_API_KEY"] 26 | @available_providers << "Claude" if ENV["ANTHROPIC_API_KEY"] 27 | @available_providers << "Gemini" if ENV["GEMINI_API_KEY"] 28 | end 29 | 30 | def ask_for_action_details 31 | @ai_provider = options[:provider] || ask("Select an AI provider:", default: "OpenAI", limited_to: @available_providers) 32 | @ai_model = options[:model] || select_ai_model 33 | 34 | @description = options[:description] || ask("Enter a description for the Sublayer Action you'd like to create:") 35 | end 36 | 37 | def generate_action 38 | @results = SublayerActionGenerator.new(description: @description).generate 39 | end 40 | 41 | def determine_destination_folder 42 | @destination_folder = if File.directory?("./actions") 43 | "./actions" 44 | elsif Dir.glob("./lib/**/actions").any? 45 | Dir.glob("./lib/**/actions").first 46 | else 47 | "./" 48 | end 49 | end 50 | 51 | def save_action_to_destination_folder 52 | create_file File.join(@destination_folder, @results.filename), @results.code 53 | end 54 | 55 | private 56 | 57 | def select_ai_model 58 | case @ai_provider 59 | when "OpenAI" 60 | ask("Which OpenAI model would you like to use?", default: "gpt-4o") 61 | when "Claude" 62 | ask("Which Anthropic model would you like to use?", default: "claude-3-5-sonnet-20240620") 63 | when "Gemini" 64 | ask("Which Google model would you like to use?", default: "gemini-1.5-flash-latest") 65 | end 66 | end 67 | end 68 | end 69 | end 70 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/cli_project.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Commands 3 | class CLIProject < Thor::Group 4 | include Thor::Actions 5 | 6 | argument :project_name 7 | 8 | class_option :provider, type: :string, desc: "AI provider (OpenAI, Claude, or Gemini)", aliases: :p 9 | class_option :model, type: :string, desc: "AI model name to use (e.g. gpt-4o, claude-3-haiku-20240307, gemini-1.5-flash-latest)", aliases: :m 10 | 11 | def self.source_root 12 | File.dirname(__FILE__) 13 | end 14 | 15 | def sublayer_version 16 | Sublayer::VERSION 17 | end 18 | 19 | def ask_for_project_details 20 | @project_name = project_name 21 | @ai_provider = options[:provider] || ask("Select an AI provider:", default: "OpenAI", limited_to: %w[OpenAI Claude Gemini]) 22 | @ai_model = options[:model] || select_ai_model 23 | end 24 | 25 | def create_project_directory 26 | say "Creating project directory", :green 27 | 28 | empty_directory project_name 29 | end 30 | 31 | def copy_template_files 32 | say "Copying template files", :green 33 | 34 | directory "../templates/cli", project_name 35 | end 36 | 37 | def generate_configuration 38 | say "Generating configuration", :green 39 | 40 | config = { 41 | project_name: @project_name, 42 | project_template: "CLI", 43 | ai_provider: @ai_provider, 44 | ai_model: @ai_model 45 | } 46 | 47 | create_file File.join(project_name, "lib", project_name, "config", "sublayer.yml"), YAML.dump(config) 48 | end 49 | 50 | def finalize_project 51 | say "Finalizing project", :green 52 | 53 | inside(project_name) do 54 | chmod("bin/#{project_name}", "+x") 55 | run("git init") if yes?("Initialize a git repository?") 56 | run("bundle install") if yes?("Install gems?") 57 | end 58 | end 59 | 60 | def print_next_steps 61 | say "\nSublayer project '#{project_name}' created successfully!", :green 62 | say "To get started, run:" 63 | say " cd #{project_name}" 64 | say " ./bin/#{project_name}" 65 | end 66 | 67 | private 68 | def select_ai_model 69 | case @ai_provider 70 | when "OpenAI" 71 | ask("Which OpenAI model would you like to use?", default: "gpt-4o", limited_to: %w[gpt-4o gpt-4o-mini gpt-4-turbo gpt-3.5-turbo]) 72 | when "Claude" 73 | ask("Which Anthropic model would you like to use?", default: "claude-3-5-sonnet-20240620", limited_to: %w[claude-3-5-sonnet-20240620 claude-3-opus-20240620 claude-3-haiku-20240307]) 74 | when "Gemini" 75 | ask("Which Google model would you like to use?", default: "gemini-1.5-flash-latest", limited_to: %w[gemini-1.5-flash-latest gemini-1.5-pro-latest]) 76 | end 77 | end 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /spec/cli/generators/command_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require_relative "../../../lib/sublayer/cli/commands/generators/sublayer_command_generator" 4 | 5 | RSpec.describe SublayerCommandGenerator do 6 | def generate 7 | described_class.new(generator_code: File.read("spec/generators/examples/task_steps_generator.rb")).generate 8 | end 9 | 10 | context "OpenAI" do 11 | before do 12 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 13 | Sublayer.configuration.ai_model = "gpt-4o-mini" 14 | end 15 | 16 | it "generates the Sublayer Command code and filename" do 17 | VCR.use_cassette("openai/cli/generators/sublayer_command_generator") do 18 | results = generate 19 | 20 | expect(results.filename).to be_a(String) 21 | expect(results.filename.length).to be > 0 22 | expect(results.class_name).to be_a(String) 23 | expect(results.class_name.length).to be > 0 24 | expect(results.description).to be_a(String) 25 | expect(results.description.length).to be > 0 26 | expect(results.execute_body).to be_a(String) 27 | expect(results.execute_body.length).to be > 0 28 | end 29 | end 30 | end 31 | 32 | context "Gemini" do 33 | before do 34 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 35 | Sublayer.configuration.ai_model = "gemini-1.5-flash" 36 | end 37 | 38 | it "generates the Sublayer Command code and filename" do 39 | VCR.use_cassette("gemini/cli/generators/sublayer_command_generator") do 40 | results = generate 41 | 42 | expect(results.filename).to be_a(String) 43 | expect(results.filename.length).to be > 0 44 | expect(results.class_name).to be_a(String) 45 | expect(results.class_name.length).to be > 0 46 | expect(results.description).to be_a(String) 47 | expect(results.description.length).to be > 0 48 | expect(results.execute_body).to be_a(String) 49 | expect(results.execute_body.length).to be > 0 50 | end 51 | end 52 | end 53 | 54 | context "Claude" do 55 | before do 56 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 57 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 58 | end 59 | 60 | it "generates the Sublayer Command code and filename" do 61 | VCR.use_cassette("claude/cli/generators/sublayer_command_generator") do 62 | results = generate 63 | 64 | expect(results.filename).to be_a(String) 65 | expect(results.filename.length).to be > 0 66 | expect(results.class_name).to be_a(String) 67 | expect(results.class_name.length).to be > 0 68 | expect(results.description).to be_a(String) 69 | expect(results.description.length).to be > 0 70 | expect(results.execute_body).to be_a(String) 71 | expect(results.execute_body.length).to be > 0 72 | end 73 | end 74 | end 75 | end -------------------------------------------------------------------------------- /spec/vcr_cassettes/gemini/42.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent?key= 6 | body: 7 | encoding: UTF-8 8 | string: '{"contents":{"role":"user","parts":{"text":"What is the meaning of 9 | life, the universe, and everything?"}},"generationConfig":{"responseMimeType":"application/json","responseSchema":{"type":"OBJECT","properties":{"the_answer":{"type":"string","description":"The 10 | answer to the given question"}},"required":["the_answer"]}}}' 11 | headers: 12 | Content-Type: 13 | - application/json 14 | Accept-Encoding: 15 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 16 | Accept: 17 | - "*/*" 18 | User-Agent: 19 | - Ruby 20 | response: 21 | status: 22 | code: 200 23 | message: OK 24 | headers: 25 | Content-Type: 26 | - application/json; charset=UTF-8 27 | Vary: 28 | - Origin 29 | - Referer 30 | - X-Origin 31 | Date: 32 | - Mon, 26 Aug 2024 19:19:05 GMT 33 | Server: 34 | - scaffolding on HTTPServer2 35 | Cache-Control: 36 | - private 37 | X-Xss-Protection: 38 | - '0' 39 | X-Frame-Options: 40 | - SAMEORIGIN 41 | X-Content-Type-Options: 42 | - nosniff 43 | Server-Timing: 44 | - gfet4t7; dur=451 45 | Alt-Svc: 46 | - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 47 | Transfer-Encoding: 48 | - chunked 49 | body: 50 | encoding: ASCII-8BIT 51 | string: | 52 | { 53 | "candidates": [ 54 | { 55 | "content": { 56 | "parts": [ 57 | { 58 | "text": "{\"the_answer\": \"42\"}\n" 59 | } 60 | ], 61 | "role": "model" 62 | }, 63 | "finishReason": "STOP", 64 | "index": 0, 65 | "safetyRatings": [ 66 | { 67 | "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", 68 | "probability": "NEGLIGIBLE" 69 | }, 70 | { 71 | "category": "HARM_CATEGORY_HATE_SPEECH", 72 | "probability": "NEGLIGIBLE" 73 | }, 74 | { 75 | "category": "HARM_CATEGORY_HARASSMENT", 76 | "probability": "NEGLIGIBLE" 77 | }, 78 | { 79 | "category": "HARM_CATEGORY_DANGEROUS_CONTENT", 80 | "probability": "NEGLIGIBLE" 81 | } 82 | ] 83 | } 84 | ], 85 | "usageMetadata": { 86 | "promptTokenCount": 14, 87 | "candidatesTokenCount": 9, 88 | "totalTokenCount": 23 89 | } 90 | } 91 | recorded_at: Mon, 26 Aug 2024 19:19:05 GMT 92 | recorded_with: VCR 6.2.0 93 | -------------------------------------------------------------------------------- /lib/sublayer/providers/claude.rb: -------------------------------------------------------------------------------- 1 | # Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 2 | # Sublayer.configuration.ai_model ="claude-3-5-sonnet-20240620" 3 | 4 | module Sublayer 5 | module Providers 6 | class Claude 7 | def self.call(prompt:, output_adapter:) 8 | request_id = SecureRandom.uuid 9 | Sublayer.configuration.logger.log(:info, "Claude API request", { 10 | model: Sublayer.configuration.ai_model, 11 | prompt: prompt, 12 | request_id: request_id 13 | }); 14 | 15 | before_request = Time.now 16 | 17 | response = HTTParty.post( 18 | "https://api.anthropic.com/v1/messages", 19 | headers: { 20 | "x-api-key": ENV.fetch("ANTHROPIC_API_KEY"), 21 | "anthropic-version": "2023-06-01", 22 | "content-type": "application/json", 23 | "anthropic-beta": "tools-2024-04-04" 24 | }, 25 | body: { 26 | model: Sublayer.configuration.ai_model, 27 | max_tokens: 4096, 28 | tools: [ 29 | { 30 | name: output_adapter.name, 31 | description: output_adapter.description, 32 | input_schema: { 33 | type: "object", 34 | properties: output_adapter.format_properties, 35 | required: output_adapter.format_required 36 | } 37 | } 38 | ], 39 | tool_choice: { type: "tool", name: output_adapter.name }, 40 | messages: [{ "role": "user", "content": prompt }] 41 | }.to_json 42 | ) 43 | 44 | raise "Error generating with Claude, error: #{response.body}" unless response.code == 200 45 | 46 | after_request = Time.now 47 | response_time = after_request - before_request 48 | 49 | json_response = JSON.parse(response.body) 50 | 51 | Sublayer.configuration.logger.log(:info, "Claude API response", { 52 | request_id: request_id, 53 | response_time: response_time, 54 | usage: { 55 | input_tokens: json_response.dig("usage", "input_tokens"), 56 | output_tokens: json_response.dig("usage", "output_tokens"), 57 | total_tokens: json_response.dig("usage", "input_tokens") + json_response.dig("usage", "output_tokens") 58 | } 59 | }) 60 | 61 | tool_use = json_response.dig("content").find { |content| content['type'] == 'tool_use' && content['name'] == output_adapter.name } 62 | 63 | raise "Error generating with Claude, error: No function called. If the answer is in the response, try rewording your prompt or output adapter name to be from the perspective of the model. Response: #{response.body}" unless tool_use 64 | raise "Error generating with Claude, error: Max tokens exceeded. Try breaking your problem up into smaller pieces." if json_response.dig("stop_reason") == "max_tokens" 65 | 66 | tool_use.dig("input")[output_adapter.name] 67 | end 68 | end 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/quick_script_project.rb: -------------------------------------------------------------------------------- 1 | module Sublayer 2 | module Commands 3 | class QuickScriptProject < Thor::Group 4 | include Thor::Actions 5 | 6 | argument :project_name 7 | 8 | class_option :provider, type: :string, desc: "AI provider (OpenAI, Claude, or Gemini)", aliases: :p 9 | class_option :model, type: :string, desc: "AI model name to use (e.g. gpt-4o, claude-3-haiku-20240307, gemini-1.5-flash-latest)", aliases: :m 10 | 11 | def self.source_root 12 | File.dirname(__FILE__) 13 | end 14 | 15 | def sublayer_version 16 | Sublayer::VERSION 17 | end 18 | 19 | def ask_for_project_details 20 | @ai_provider = options[:provider] || ask("Select an AI provider:", default: "OpenAI", limited_to: %w[OpenAI Claude Gemini]) 21 | @ai_model = options[:model] || select_ai_model 22 | end 23 | 24 | def create_project_directory 25 | say "Creating project directory", :green 26 | 27 | empty_directory project_name 28 | end 29 | 30 | def copy_template_files 31 | say "Copying template files", :green 32 | 33 | directory "../templates/quick_script", project_name 34 | end 35 | 36 | def generate_configuration 37 | append_to_file File.join(project_name, "#{project_name}.rb") do 38 | <<~CONFIG 39 | Sublayer.configuration.ai_provider = Sublayer::Providers::#{@ai_provider} 40 | Sublayer.configuration.ai_model = "#{@ai_model}" 41 | CONFIG 42 | end 43 | end 44 | 45 | def finalize_project 46 | inside(project_name) do 47 | append_to_file "#{project_name}.rb" do 48 | <<~INSTRUCTIONS 49 | puts "Welcome to your quick Sublayer script!" 50 | puts "To get started, create some generators, actions, or agents in their respective directories and call them here" 51 | puts "For more information, visit https://docs.sublayer.com" 52 | INSTRUCTIONS 53 | end 54 | 55 | run("git init") if yes?("Initialize a git repository?") 56 | end 57 | end 58 | 59 | def print_next_steps 60 | say "\nSublayer project '#{project_name}' created successfully!", :green 61 | say "To get started, run:" 62 | say " cd #{project_name}" 63 | say " ruby #{project_name}.rb" 64 | end 65 | 66 | private 67 | def select_ai_model 68 | case @ai_provider 69 | when "OpenAI" 70 | ask("Which OpenAI model would you like to use?", default: "gpt-4o", limited_to: %w[gpt-4o gpt-4o-mini gpt-4-turbo gpt-3.5-turbo]) 71 | when "Claude" 72 | ask("Which Anthropic model would you like to use?", default: "claude-3-5-sonnet-20240620", limited_to: %w[claude-3-5-sonnet-20240620 claude-3-opus-20240620 claude-3-haiku-20240307]) 73 | when "Gemini" 74 | ask("Which Google model would you like to use?", default: "gemini-1.5-flash-latest", limited_to: %w[gemini-1.5-flash-latest gemini-1.5-pro-latest]) 75 | end 76 | end 77 | end 78 | end 79 | end 80 | -------------------------------------------------------------------------------- /spec/components/output_adapters/list_of_named_strings_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Components::OutputAdapters::ListOfNamedStrings do 4 | let(:name) { 'test_adapter' } 5 | let(:description) { 'Test adapter description' } 6 | let(:attributes) do 7 | [ 8 | { name: "field1", description: "Description of field1" }, 9 | { name: "field2", description: "Description of field2" }, 10 | ] 11 | end 12 | 13 | let(:output_adapter) { described_class.new(name: name, description: description, attributes: attributes) } 14 | 15 | describe "#initialize" do 16 | it "sets the name, description, and attributes" do 17 | expect(output_adapter.name).to eq(name) 18 | expect(output_adapter.description).to eq(description) 19 | expect(output_adapter.attributes).to eq(attributes) 20 | end 21 | end 22 | 23 | describe "#properties" do 24 | it "returns an array with one OpenStruct object" do 25 | properties = output_adapter.properties 26 | expect(properties).to be_an(Array) 27 | expect(properties.size).to eq(1) 28 | expect(properties.first).to be_an(OpenStruct) 29 | end 30 | 31 | it "sets the correct attributes for the main property" do 32 | property = output_adapter.properties.first 33 | expect(property.name).to eq(name) 34 | expect(property.description).to eq(description) 35 | expect(property.required).to eq(true) 36 | expect(property.type).to eq("array") 37 | end 38 | 39 | it "sets the correct nested properties for items" do 40 | items = output_adapter.properties.first.items 41 | expect(items).to be_a(OpenStruct) 42 | expect(items.type).to eq("object") 43 | expect(items.properties).to be_an(Array) 44 | expect(items.properties.size).to eq(2) 45 | expect(items.properties.first).to be_an(OpenStruct) 46 | expect(items.properties.first.name).to eq("field1") 47 | expect(items.properties.first.type).to eq("string") 48 | expect(items.properties.first.description).to eq("Description of field1") 49 | expect(items.properties.first.required).to eq(true) 50 | expect(items.properties.last.name).to eq("field2") 51 | expect(items.properties.last.type).to eq("string") 52 | expect(items.properties.last.description).to eq("Description of field2") 53 | expect(items.properties.last.required).to eq(true) 54 | end 55 | end 56 | 57 | describe "#materialize_result" do 58 | it "converts the raw result to an array of OpenStruct objects" do 59 | raw_result = [ 60 | { "field1" => "value1", "field2" => "value2" }, 61 | { "field1" => "value3", "field2" => "value4" } 62 | ] 63 | 64 | result = output_adapter.materialize_result(raw_result) 65 | expect(result).to be_an(Array) 66 | expect(result.size).to eq(2) 67 | expect(result.all? { |item| item.is_a?(OpenStruct) }).to be true 68 | expect(result.first.field1).to eq("value1") 69 | expect(result.first.field2).to eq("value2") 70 | expect(result.last.field1).to eq("value3") 71 | expect(result.last.field2).to eq("value4") 72 | end 73 | end 74 | end 75 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/gemini/generators/four_digit_passcode_generator/find_number.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent?key= 6 | body: 7 | encoding: UTF-8 8 | string: '{"contents":{"role":"user","parts":{"text":" You are an expert of 9 | common four digit passcodes\n\n Provide a four digit passcode that is uncommon 10 | and hard to guess\n"}},"generationConfig":{"responseMimeType":"application/json","responseSchema":{"type":"OBJECT","properties":{"four_digit_passcode":{"type":"integer","description":"an 11 | uncommon and difficult to guess four digit passcode"}},"required":["four_digit_passcode"]}}}' 12 | headers: 13 | Content-Type: 14 | - application/json 15 | Accept-Encoding: 16 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 17 | Accept: 18 | - "*/*" 19 | User-Agent: 20 | - Ruby 21 | response: 22 | status: 23 | code: 200 24 | message: OK 25 | headers: 26 | Content-Type: 27 | - application/json; charset=UTF-8 28 | Vary: 29 | - Origin 30 | - Referer 31 | - X-Origin 32 | Date: 33 | - Mon, 26 Aug 2024 20:23:48 GMT 34 | Server: 35 | - scaffolding on HTTPServer2 36 | Cache-Control: 37 | - private 38 | X-Xss-Protection: 39 | - '0' 40 | X-Frame-Options: 41 | - SAMEORIGIN 42 | X-Content-Type-Options: 43 | - nosniff 44 | Server-Timing: 45 | - gfet4t7; dur=391 46 | Alt-Svc: 47 | - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 48 | Transfer-Encoding: 49 | - chunked 50 | body: 51 | encoding: ASCII-8BIT 52 | string: | 53 | { 54 | "candidates": [ 55 | { 56 | "content": { 57 | "parts": [ 58 | { 59 | "text": "{\"four_digit_passcode\": 8901}\n" 60 | } 61 | ], 62 | "role": "model" 63 | }, 64 | "finishReason": "STOP", 65 | "index": 0, 66 | "safetyRatings": [ 67 | { 68 | "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", 69 | "probability": "NEGLIGIBLE" 70 | }, 71 | { 72 | "category": "HARM_CATEGORY_HATE_SPEECH", 73 | "probability": "NEGLIGIBLE" 74 | }, 75 | { 76 | "category": "HARM_CATEGORY_HARASSMENT", 77 | "probability": "NEGLIGIBLE" 78 | }, 79 | { 80 | "category": "HARM_CATEGORY_DANGEROUS_CONTENT", 81 | "probability": "LOW" 82 | } 83 | ] 84 | } 85 | ], 86 | "usageMetadata": { 87 | "promptTokenCount": 28, 88 | "candidatesTokenCount": 14, 89 | "totalTokenCount": 42 90 | } 91 | } 92 | recorded_at: Mon, 26 Aug 2024 20:23:48 GMT 93 | recorded_with: VCR 6.2.0 94 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | PATH 2 | remote: . 3 | specs: 4 | sublayer (0.2.9) 5 | activesupport 6 | httparty 7 | listen 8 | ostruct 9 | ruby-openai 10 | thor 11 | zeitwerk 12 | 13 | GEM 14 | remote: https://rubygems.org/ 15 | specs: 16 | activesupport (7.1.1) 17 | base64 18 | bigdecimal 19 | concurrent-ruby (~> 1.0, >= 1.0.2) 20 | connection_pool (>= 2.2.5) 21 | drb 22 | i18n (>= 1.6, < 2) 23 | minitest (>= 5.1) 24 | mutex_m 25 | tzinfo (~> 2.0) 26 | addressable (2.8.7) 27 | public_suffix (>= 2.0.2, < 7.0) 28 | base64 (0.1.1) 29 | bigdecimal (3.1.8) 30 | coderay (1.1.3) 31 | concurrent-ruby (1.2.2) 32 | connection_pool (2.4.1) 33 | crack (1.0.0) 34 | bigdecimal 35 | rexml 36 | csv (3.3.5) 37 | diff-lcs (1.5.0) 38 | dotenv (3.1.8) 39 | drb (2.1.1) 40 | ruby2_keywords 41 | event_stream_parser (0.3.0) 42 | faraday (2.7.11) 43 | base64 44 | faraday-net_http (>= 2.0, < 3.1) 45 | ruby2_keywords (>= 0.0.4) 46 | faraday-multipart (1.0.4) 47 | multipart-post (~> 2) 48 | faraday-net_http (3.0.2) 49 | ffi (1.16.3) 50 | hashdiff (1.1.0) 51 | httparty (0.23.1) 52 | csv 53 | mini_mime (>= 1.0.0) 54 | multi_xml (>= 0.5.2) 55 | i18n (1.14.1) 56 | concurrent-ruby (~> 1.0) 57 | listen (3.9.0) 58 | rb-fsevent (~> 0.10, >= 0.10.3) 59 | rb-inotify (~> 0.9, >= 0.9.10) 60 | method_source (1.0.0) 61 | mini_mime (1.1.5) 62 | minitest (5.20.0) 63 | multi_xml (0.7.2) 64 | bigdecimal (~> 3.1) 65 | multipart-post (2.3.0) 66 | mutex_m (0.1.2) 67 | ostruct (0.6.1) 68 | pry (0.14.1) 69 | coderay (~> 1.1) 70 | method_source (~> 1.0) 71 | public_suffix (6.0.0) 72 | rake (13.3.0) 73 | rb-fsevent (0.11.2) 74 | rb-inotify (0.10.1) 75 | ffi (~> 1.0) 76 | rexml (3.3.4) 77 | strscan 78 | rspec (3.12.0) 79 | rspec-core (~> 3.12.0) 80 | rspec-expectations (~> 3.12.0) 81 | rspec-mocks (~> 3.12.0) 82 | rspec-core (3.12.2) 83 | rspec-support (~> 3.12.0) 84 | rspec-expectations (3.12.3) 85 | diff-lcs (>= 1.2.0, < 2.0) 86 | rspec-support (~> 3.12.0) 87 | rspec-mocks (3.12.6) 88 | diff-lcs (>= 1.2.0, < 2.0) 89 | rspec-support (~> 3.12.0) 90 | rspec-support (3.12.1) 91 | ruby-openai (6.3.1) 92 | event_stream_parser (>= 0.3.0, < 2.0.0) 93 | faraday (>= 1) 94 | faraday-multipart (>= 1) 95 | ruby2_keywords (0.0.5) 96 | strscan (3.1.0) 97 | thor (1.3.1) 98 | tzinfo (2.0.6) 99 | concurrent-ruby (~> 1.0) 100 | vcr (6.2.0) 101 | webmock (3.23.1) 102 | addressable (>= 2.8.0) 103 | crack (>= 0.3.2) 104 | hashdiff (>= 0.4.0, < 2.0.0) 105 | zeitwerk (2.6.13) 106 | 107 | PLATFORMS 108 | arm64-darwin-22 109 | arm64-darwin-23 110 | arm64-darwin-24 111 | x86_64-linux 112 | 113 | DEPENDENCIES 114 | dotenv (~> 3.1) 115 | pry (~> 0.14) 116 | rake 117 | rspec (~> 3.12) 118 | sublayer! 119 | vcr (~> 6.0) 120 | webmock (~> 3) 121 | 122 | BUNDLED WITH 123 | 2.6.9 124 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/42.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"the_answer","description":"The 9 | answer to the given question","input_schema":{"type":"object","properties":{"the_answer":{"type":"string","description":"The 10 | answer to the given question"}},"required":["the_answer"]}}],"tool_choice":{"type":"tool","name":"the_answer"},"messages":[{"role":"user","content":"What 11 | is the meaning of life, the universe, and everything?"}]}' 12 | headers: 13 | X-Api-Key: 14 | - "" 15 | Anthropic-Version: 16 | - '2023-06-01' 17 | Content-Type: 18 | - application/json 19 | Anthropic-Beta: 20 | - tools-2024-04-04 21 | Accept-Encoding: 22 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 23 | Accept: 24 | - "*/*" 25 | User-Agent: 26 | - Ruby 27 | response: 28 | status: 29 | code: 200 30 | message: OK 31 | headers: 32 | Date: 33 | - Sun, 04 Aug 2024 19:33:15 GMT 34 | Content-Type: 35 | - application/json 36 | Transfer-Encoding: 37 | - chunked 38 | Connection: 39 | - keep-alive 40 | Anthropic-Ratelimit-Requests-Limit: 41 | - '1000' 42 | Anthropic-Ratelimit-Requests-Remaining: 43 | - '999' 44 | Anthropic-Ratelimit-Requests-Reset: 45 | - '2024-08-04T19:33:34Z' 46 | Anthropic-Ratelimit-Tokens-Limit: 47 | - '100000' 48 | Anthropic-Ratelimit-Tokens-Remaining: 49 | - '100000' 50 | Anthropic-Ratelimit-Tokens-Reset: 51 | - '2024-08-04T19:33:15Z' 52 | Request-Id: 53 | - req_01VUCrnjKtNN2kMwfiuLzT2z 54 | X-Cloud-Trace-Context: 55 | - 8fc5881086372399b7612335c855a900 56 | Via: 57 | - 1.1 google 58 | Cf-Cache-Status: 59 | - DYNAMIC 60 | Server: 61 | - cloudflare 62 | Cf-Ray: 63 | - 8ae0fa5abc5f7292-EWR 64 | body: 65 | encoding: ASCII-8BIT 66 | string: '{"id":"msg_017npncUrHeVNPbSVCDiNFL5","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01Vyi1yFkE6dCqenqvr17hQZ","name":"the_answer","input":{"the_answer":"According 67 | to the novel The Hitchhiker''s Guide to the Galaxy by Douglas Adams, the answer 68 | to \"the meaning of life, the universe, and everything\" is 42. This is the 69 | answer that is calculated by a powerful supercomputer called Deep Thought 70 | after 7.5 million years of computation. However, the meaning behind the number 71 | 42 is left somewhat ambiguous, as Deep Thought explains that the beings who 72 | requested the answer would not like or understand it. Ultimately, the book 73 | suggests that the meaning of life may not be a simple, knowable answer, but 74 | rather an ongoing journey of discovery, understanding, and acceptance of the 75 | inherent mysteries of existence."}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":449,"output_tokens":173}}' 76 | recorded_at: Sun, 04 Aug 2024 19:33:15 GMT 77 | recorded_with: VCR 6.2.0 78 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/gemini/generators/sentiment_from_text_generator/positive.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent?key= 6 | body: 7 | encoding: UTF-8 8 | string: '{"contents":{"role":"user","parts":{"text":" You are an expert 9 | at determining sentiment from text.\n\n You are tasked with analyzing 10 | the following text and determining its sentiment value.\n\n The text 11 | is:\n Matz is nice so we are nice\n"}},"generationConfig":{"responseMimeType":"application/json","responseSchema":{"type":"OBJECT","properties":{"sentiment_value":{"type":"string","description":"A 12 | sentiment value from the list","enum":["positive","negative","neutral"]}},"required":["sentiment_value"]}}}' 13 | headers: 14 | Content-Type: 15 | - application/json 16 | Accept-Encoding: 17 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 18 | Accept: 19 | - "*/*" 20 | User-Agent: 21 | - Ruby 22 | response: 23 | status: 24 | code: 200 25 | message: OK 26 | headers: 27 | Content-Type: 28 | - application/json; charset=UTF-8 29 | Vary: 30 | - Origin 31 | - Referer 32 | - X-Origin 33 | Date: 34 | - Mon, 26 Aug 2024 19:21:04 GMT 35 | Server: 36 | - scaffolding on HTTPServer2 37 | Cache-Control: 38 | - private 39 | X-Xss-Protection: 40 | - '0' 41 | X-Frame-Options: 42 | - SAMEORIGIN 43 | X-Content-Type-Options: 44 | - nosniff 45 | Server-Timing: 46 | - gfet4t7; dur=288 47 | Alt-Svc: 48 | - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 49 | Transfer-Encoding: 50 | - chunked 51 | body: 52 | encoding: ASCII-8BIT 53 | string: | 54 | { 55 | "candidates": [ 56 | { 57 | "content": { 58 | "parts": [ 59 | { 60 | "text": "{\"sentiment_value\": \"positive\"}\n" 61 | } 62 | ], 63 | "role": "model" 64 | }, 65 | "finishReason": "STOP", 66 | "index": 0, 67 | "safetyRatings": [ 68 | { 69 | "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", 70 | "probability": "NEGLIGIBLE" 71 | }, 72 | { 73 | "category": "HARM_CATEGORY_HATE_SPEECH", 74 | "probability": "NEGLIGIBLE" 75 | }, 76 | { 77 | "category": "HARM_CATEGORY_HARASSMENT", 78 | "probability": "NEGLIGIBLE" 79 | }, 80 | { 81 | "category": "HARM_CATEGORY_DANGEROUS_CONTENT", 82 | "probability": "NEGLIGIBLE" 83 | } 84 | ] 85 | } 86 | ], 87 | "usageMetadata": { 88 | "promptTokenCount": 45, 89 | "candidatesTokenCount": 8, 90 | "totalTokenCount": 53 91 | } 92 | } 93 | recorded_at: Mon, 26 Aug 2024 19:21:04 GMT 94 | recorded_with: VCR 6.2.0 95 | -------------------------------------------------------------------------------- /spec/generators/imaginary_movie_review_generator_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "generators/examples/imaginary_movie_review_generator" 4 | 5 | RSpec.describe ImaginaryMovieReviewGenerator do 6 | subject { described_class.new(num_reviews: 3) } 7 | 8 | context "OpenAI" do 9 | before do 10 | Sublayer.configuration.ai_provider = Sublayer::Providers::OpenAI 11 | Sublayer.configuration.ai_model = "gpt-4o" 12 | end 13 | 14 | it "generates a list of imaginary movie reviews" do 15 | VCR.use_cassette("openai/generators/imaginary_movie_review_generator/3_reviews") do 16 | reviews = subject.generate 17 | 18 | expect(reviews).to be_an(Array) 19 | expect(reviews.size).to eq(3) 20 | expect(reviews.first).to respond_to(:movie_title) 21 | expect(reviews.first).to respond_to(:reviewer_name) 22 | expect(reviews.first).to respond_to(:rating) 23 | expect(reviews.first).to respond_to(:brief_comment) 24 | 25 | expect(reviews.first.movie_title).to be_a(String) 26 | expect(reviews.first.reviewer_name).to be_a(String) 27 | expect(reviews.first.rating).to be_a(String) 28 | expect(reviews.first.brief_comment).to be_a(String) 29 | end 30 | end 31 | end 32 | 33 | context "OpenAI" do 34 | before do 35 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 36 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 37 | end 38 | 39 | it "generates a list of imaginary movie reviews" do 40 | VCR.use_cassette("claude/generators/imaginary_movie_review_generator/3_reviews") do 41 | reviews = subject.generate 42 | 43 | expect(reviews).to be_an(Array) 44 | expect(reviews.size).to eq(3) 45 | expect(reviews.first).to respond_to(:movie_title) 46 | expect(reviews.first).to respond_to(:reviewer_name) 47 | expect(reviews.first).to respond_to(:rating) 48 | expect(reviews.first).to respond_to(:brief_comment) 49 | 50 | expect(reviews.first.movie_title).to be_a(String) 51 | expect(reviews.first.reviewer_name).to be_a(String) 52 | expect(reviews.first.rating).to be_a(String) 53 | expect(reviews.first.brief_comment).to be_a(String) 54 | end 55 | end 56 | end 57 | 58 | context "Gemini" do 59 | before do 60 | Sublayer.configuration.ai_provider = Sublayer::Providers::Gemini 61 | Sublayer.configuration.ai_model = "gemini-1.5-pro-latest" 62 | end 63 | 64 | it "generates a list of imaginary movie reviews" do 65 | VCR.use_cassette("gemini/generators/imaginary_movie_review_generator/3_reviews") do 66 | reviews = subject.generate 67 | 68 | expect(reviews).to be_an(Array) 69 | expect(reviews.size).to eq(3) 70 | expect(reviews.first).to respond_to(:movie_title) 71 | expect(reviews.first).to respond_to(:reviewer_name) 72 | expect(reviews.first).to respond_to(:rating) 73 | expect(reviews.first).to respond_to(:brief_comment) 74 | 75 | expect(reviews.first.movie_title).to be_a(String) 76 | expect(reviews.first.reviewer_name).to be_a(String) 77 | expect(reviews.first.rating).to be_a(String) 78 | expect(reviews.first.brief_comment).to be_a(String) 79 | end 80 | end 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/gemini/generators/route_selection_from_user_intent_generator/route.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent?key= 6 | body: 7 | encoding: UTF-8 8 | string: '{"contents":{"role":"user","parts":{"text":" You are skilled 9 | at selecting routes based on user intent.\n\n Your task is to choose 10 | a route based on the following intent:\n\n The user''s intent is:\n I 11 | want to get all the users\n"}},"generationConfig":{"responseMimeType":"application/json","responseSchema":{"type":"OBJECT","properties":{"route":{"type":"string","description":"A 12 | route selected from the list","enum":["GET /","GET /users","GET /users/:id","POST 13 | /users","PUT /users/:id","DELETE /users/:id"]}},"required":["route"]}}}' 14 | headers: 15 | Content-Type: 16 | - application/json 17 | Accept-Encoding: 18 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 19 | Accept: 20 | - "*/*" 21 | User-Agent: 22 | - Ruby 23 | response: 24 | status: 25 | code: 200 26 | message: OK 27 | headers: 28 | Content-Type: 29 | - application/json; charset=UTF-8 30 | Vary: 31 | - Origin 32 | - Referer 33 | - X-Origin 34 | Date: 35 | - Mon, 26 Aug 2024 19:21:06 GMT 36 | Server: 37 | - scaffolding on HTTPServer2 38 | Cache-Control: 39 | - private 40 | X-Xss-Protection: 41 | - '0' 42 | X-Frame-Options: 43 | - SAMEORIGIN 44 | X-Content-Type-Options: 45 | - nosniff 46 | Server-Timing: 47 | - gfet4t7; dur=341 48 | Alt-Svc: 49 | - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 50 | Transfer-Encoding: 51 | - chunked 52 | body: 53 | encoding: ASCII-8BIT 54 | string: | 55 | { 56 | "candidates": [ 57 | { 58 | "content": { 59 | "parts": [ 60 | { 61 | "text": "{\"route\": \"GET /users\"}\n" 62 | } 63 | ], 64 | "role": "model" 65 | }, 66 | "finishReason": "STOP", 67 | "index": 0, 68 | "safetyRatings": [ 69 | { 70 | "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", 71 | "probability": "NEGLIGIBLE" 72 | }, 73 | { 74 | "category": "HARM_CATEGORY_HATE_SPEECH", 75 | "probability": "NEGLIGIBLE" 76 | }, 77 | { 78 | "category": "HARM_CATEGORY_HARASSMENT", 79 | "probability": "NEGLIGIBLE" 80 | }, 81 | { 82 | "category": "HARM_CATEGORY_DANGEROUS_CONTENT", 83 | "probability": "NEGLIGIBLE" 84 | } 85 | ] 86 | } 87 | ], 88 | "usageMetadata": { 89 | "promptTokenCount": 47, 90 | "candidatesTokenCount": 8, 91 | "totalTokenCount": 55 92 | } 93 | } 94 | recorded_at: Mon, 26 Aug 2024 19:21:06 GMT 95 | recorded_with: VCR 6.2.0 96 | -------------------------------------------------------------------------------- /spec/providers/claude_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | RSpec.describe Sublayer::Providers::Claude do 4 | let(:basic_output_adapter) { 5 | Sublayer::Components::OutputAdapters.create( 6 | type: :single_string, 7 | name: "the_answer", 8 | description: "The answer to the given question" 9 | ).extend(Sublayer::Components::OutputAdapters::Formattable) 10 | } 11 | 12 | before do 13 | Sublayer.configuration.ai_provider = described_class 14 | Sublayer.configuration.ai_model = "claude-3-haiku-20240307" 15 | end 16 | 17 | describe "#call" do 18 | it "calls the Claude API" do 19 | VCR.use_cassette("claude/42") do 20 | response = described_class.call( 21 | prompt: "What is the meaning of life, the universe, and everything?", 22 | output_adapter: basic_output_adapter 23 | ) 24 | 25 | expect(response).to be_a(String) 26 | expect(response.length).to be > 0 27 | end 28 | end 29 | 30 | context "logging" do 31 | let(:mock_logger) { instance_double(Sublayer::Logging::Base) } 32 | 33 | before do 34 | Sublayer.configuration.logger = mock_logger 35 | end 36 | 37 | after do 38 | Sublayer.configuration.logger = Sublayer::Logging::NullLogger.new 39 | end 40 | 41 | it "logs the request and response" do 42 | expect(mock_logger).to receive(:log).with(:info, "Claude API request", hash_including(:model, :prompt)) 43 | expect(mock_logger).to receive(:log).with(:info, "Claude API response", instance_of(Hash)) 44 | 45 | VCR.use_cassette("claude/42") do 46 | described_class.call( 47 | prompt: "What is the meaning of life, the universe, and everything?", 48 | output_adapter: basic_output_adapter 49 | ) 50 | end 51 | end 52 | 53 | end 54 | 55 | context "when the API returns a non-200 status code" do 56 | it "raises an error" do 57 | expect(HTTParty).to receive(:post).and_return(double(code: 500, body: "Internal Server Error")) 58 | 59 | expect { 60 | described_class.call( 61 | prompt: "What is the meaning of life, the universe, and everything?", 62 | output_adapter: basic_output_adapter 63 | ) 64 | }.to raise_error("Error generating with Claude, error: Internal Server Error") 65 | end 66 | end 67 | 68 | context "when the API doesn't call a function" do 69 | it "raises a no function called exception" do 70 | VCR.use_cassette("claude/no_function") do 71 | expect { 72 | described_class.call( 73 | prompt: "What is the meaning of life, the universe, and everything?", 74 | output_adapter: basic_output_adapter 75 | ) 76 | }.to raise_error(/No function called/) 77 | end 78 | end 79 | end 80 | 81 | context "When the response is too long" do 82 | it "raises a max tokens exception" do 83 | VCR.use_cassette("claude/max_tokens") do 84 | expect { 85 | described_class.call( 86 | prompt: "What is the meaning of life, the universe, and everything?", 87 | output_adapter: basic_output_adapter 88 | ) 89 | }.to raise_error(/Max tokens/) 90 | end 91 | end 92 | end 93 | end 94 | end 95 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/generators/sublayer_agent_generator.rb: -------------------------------------------------------------------------------- 1 | class SublayerAgentGenerator < Sublayer::Generators::Base 2 | llm_output_adapter type: :named_strings, 3 | name: "sublayer_agent", 4 | description: "The new sublayer agent based on the description and supporting information", 5 | attributes: [ 6 | { name: "code", description: "The code of the generated Sublayer agent" }, 7 | { name: "filename", description: "The filename of the generated sublayer agent snake cased with a .rb extension" } 8 | ] 9 | 10 | def initialize(description:, trigger:, goal:, check_status:, step:) 11 | @description = description 12 | @trigger_condition = trigger 13 | @goal = goal 14 | @check_status = check_status 15 | @step = step 16 | end 17 | 18 | def generate 19 | super 20 | end 21 | 22 | def prompt 23 | <<-PROMPT 24 | You are an expert ruby programmer and great at repurposing code examples to use for new situations. 25 | 26 | A Sublayer agent is a DSL for defining a feedback loop for an AI agent. The agents sit running like a daemon and are triggered to run and step toward their goal checking status along the way. 27 | 28 | One example of a Sublayer agent is this one for doing TDD with RSpec: 29 | 30 | #{example_agent} 31 | 32 | 33 | Sublayer Agents take advantage of other Sublayer components to perform their tasks. 34 | Sublayer::Actions are used to perform actions in the outside world, things like saving files, making external api calls, retrieving data, etc 35 | Sublayer::Generators are used to make calls to LLMs based on information they receive from Sublayer::Actions or other sources and return structured data for other Sublayer::Actions to do use in the outside world 36 | 37 | The Sublayer Agent DSL consists of 4 main parts: 38 | 1. trigger: What triggers the agent, currently built into the framework is the trigger_on_files_changed, but the trigger method also accepts a Sublayer::Triggers::Base subclass that defines an initialize method and a setup method that takes an agent as an argument 39 | The setup method then calls activate(agent) when the trigger condition is met 40 | 2. goal: What the agent is trying to achieve, this is a block that returns a boolean 41 | 3. check_status: A method that checks the status of the agent on its way toward the goal. Usually you update the goal condition here or can perform any Sublayer::Actions to examine the state of the outside world 42 | 4. step: A method that encapsulates the way the agent should work toward the goal. Sublayer::Actions and Sublayer::Generators are used heavily here. 43 | 44 | Your goal is to rely on the above information about how Sublayer Agents work to generate a new Sublayer agent based on the following information: 45 | 46 | Agent description: #{@description} 47 | Trigger condition: #{@trigger} 48 | Goal condition: #{@goal} 49 | Status check method: #{@check_status} 50 | Step action method: #{@step} 51 | 52 | You can assume that any Sublayer::Actions or Sublayer::Generators you need are available to you in the Sublayer framework 53 | 54 | Take a deep breath and think step by step before coding. You can do this! 55 | PROMPT 56 | end 57 | 58 | def example_agent 59 | File.read(File.join(__dir__, "example_agent.rb")) 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/code_from_description_generator/hello_world.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-haiku-20240307","max_tokens":4096,"tools":[{"name":"generated_code","description":"The 9 | generated code in the requested language","input_schema":{"type":"object","properties":{"generated_code":{"type":"string","description":"The 10 | generated code in the requested language"}},"required":["generated_code"]}}],"tool_choice":{"type":"tool","name":"generated_code"},"messages":[{"role":"user","content":" You 11 | are an expert programmer in ruby.\n\n You are tasked with writing code 12 | using the following technologies: ruby.\n\n The description of the 13 | task is a hello world app where I pass --who argument to set the ''world'' 14 | value using optparser\n\n Take a deep breath and think step by step 15 | before you start coding.\n"}]}' 16 | headers: 17 | X-Api-Key: 18 | - "" 19 | Anthropic-Version: 20 | - '2023-06-01' 21 | Content-Type: 22 | - application/json 23 | Anthropic-Beta: 24 | - tools-2024-04-04 25 | Accept-Encoding: 26 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 27 | Accept: 28 | - "*/*" 29 | User-Agent: 30 | - Ruby 31 | response: 32 | status: 33 | code: 200 34 | message: OK 35 | headers: 36 | Date: 37 | - Sun, 04 Aug 2024 19:33:06 GMT 38 | Content-Type: 39 | - application/json 40 | Transfer-Encoding: 41 | - chunked 42 | Connection: 43 | - keep-alive 44 | Anthropic-Ratelimit-Requests-Limit: 45 | - '1000' 46 | Anthropic-Ratelimit-Requests-Remaining: 47 | - '999' 48 | Anthropic-Ratelimit-Requests-Reset: 49 | - '2024-08-04T19:33:34Z' 50 | Anthropic-Ratelimit-Tokens-Limit: 51 | - '100000' 52 | Anthropic-Ratelimit-Tokens-Remaining: 53 | - '100000' 54 | Anthropic-Ratelimit-Tokens-Reset: 55 | - '2024-08-04T19:33:06Z' 56 | Request-Id: 57 | - req_01N2NVQK2ANfGYXfw6e4MGrH 58 | X-Cloud-Trace-Context: 59 | - 625af0deb82bda23aaf86316d8e19f4b 60 | Via: 61 | - 1.1 google 62 | Cf-Cache-Status: 63 | - DYNAMIC 64 | Server: 65 | - cloudflare 66 | Cf-Ray: 67 | - 8ae0fa23c82d435d-EWR 68 | body: 69 | encoding: ASCII-8BIT 70 | string: '{"id":"msg_01GNwSQikVCoYqdatMErDw8J","type":"message","role":"assistant","model":"claude-3-haiku-20240307","content":[{"type":"tool_use","id":"toolu_01WGbAcHYKbMpoYmQQGbELcm","name":"generated_code","input":{"generated_code":"# 71 | Hello World App in Ruby with Optparser\n\nrequire ''optparse''\n\noptions 72 | = {}\nOptionParser.new do |opts|\n opts.banner = \"Usage: hello_world.rb 73 | [options]\"\n\n opts.on(\"-w\", \"--who VALUE\", \"Set the ''world'' value\") 74 | do |v|\n options[:who] = v\n end\n\n opts.on(\"-h\", \"--help\", \"Prints 75 | this help\") do\n puts opts\n exit\n end\nend.parse!\n\nwho = options[:who] 76 | || \"world\"\nputs \"Hello, #{who}!\""}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":508,"output_tokens":174}}' 77 | recorded_at: Sun, 04 Aug 2024 19:33:06 GMT 78 | recorded_with: VCR 6.2.0 79 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | Sublayer is a Ruby gem and CLI framework for building AI-powered applications. It provides base classes for Generators, Actions, Tasks, and Agents that work with multiple AI providers (OpenAI, Claude, Gemini). 8 | 9 | ## Development Commands 10 | 11 | ```bash 12 | # Setup 13 | bin/setup # Install dependencies and configure dummy API keys 14 | bundle install # Install gem dependencies 15 | 16 | # Testing 17 | rake spec # Run RSpec test suite 18 | rake # Default task (runs specs) 19 | 20 | # Building and releasing 21 | rake build # Build gem package 22 | rake install # Install gem locally 23 | rake release # Release to RubyGems 24 | 25 | # CLI usage (after install) 26 | sublayer new PROJECT_NAME --template=[CLI|GithubAction|QuickScript] --provider=[OpenAI|Claude|Gemini] 27 | sublayer generate:generator # Generate new Generator class 28 | sublayer generate:agent # Generate new Agent class 29 | sublayer generate:action # Generate new Action class 30 | ``` 31 | 32 | ## Architecture 33 | 34 | The framework is built around four core concepts: 35 | 36 | - **Generators** (`lib/sublayer/generators/base.rb`) - Generate specific outputs based on input data 37 | - **Actions** (`lib/sublayer/actions/base.rb`) - Perform operations to get inputs or use generated outputs 38 | - **Agents** (`lib/sublayer/agents/base.rb`) - Autonomous entities with triggers and goal conditions 39 | - **Tasks** (`lib/sublayer/tasks/base.rb`) - Task management components 40 | 41 | **AI Provider System**: The framework supports multiple AI providers through a unified interface: 42 | - OpenAI (default: gpt-4o) - `lib/sublayer/providers/open_ai.rb` 43 | - Claude (Anthropic) - `lib/sublayer/providers/claude.rb` 44 | - Gemini (Google, beta) - `lib/sublayer/providers/gemini.rb` 45 | 46 | Provider configuration is done via: 47 | ```ruby 48 | Sublayer.configuration.ai_provider = Sublayer::Providers::Claude 49 | Sublayer.configuration.ai_model = "claude-3-5-sonnet-20240620" 50 | ``` 51 | 52 | **CLI System**: The CLI (`lib/sublayer/cli/`) includes commands and project templates. Templates generate different project types (CLI apps, GitHub Actions, Quick Scripts) with appropriate scaffolding. 53 | 54 | **Trigger System**: Event-based activation system (`lib/sublayer/triggers/`) for file changes and other events. 55 | 56 | ## Testing 57 | 58 | - **Framework**: RSpec with VCR for HTTP request recording/mocking 59 | - **API Keys**: Tests use dummy keys, sensitive data is filtered from VCR cassettes 60 | - **Structure**: Tests organized by component type in `spec/` directory 61 | - **Examples**: Example implementations in `spec/generators/examples/` and `spec/agents/examples/` 62 | 63 | ## Environment Setup 64 | 65 | Required environment variables for AI providers: 66 | ```bash 67 | OPENAI_API_KEY=your_key_here 68 | ANTHROPIC_API_KEY=your_key_here 69 | GEMINI_API_KEY=your_key_here 70 | ``` 71 | 72 | ## Key Files 73 | 74 | - `lib/sublayer.rb` - Main entry point and configuration 75 | - `bin/sublayer` - CLI executable 76 | - `sublayer.gemspec` - Gem specification 77 | - `lib/sublayer/cli/templates/` - Project scaffolding templates 78 | 79 | ## Notes 80 | 81 | - Framework is pre-1.0 with breaking changes expected in minor releases 82 | - Ruby >= 3.2.0 required 83 | - Gemini provider is marked as unstable/beta -------------------------------------------------------------------------------- /spec/vcr_cassettes/claude/generators/blog_post_keyword_suggestions_generator/ai_in_healthcare.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.anthropic.com/v1/messages 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"claude-3-5-sonnet-20240620","max_tokens":4096,"tools":[{"name":"suggestions","description":"List 9 | of keyword suggestions","input_schema":{"type":"object","properties":{"suggestions":{"type":"array","description":"List 10 | of keyword suggestions","items":{"type":"string"}}},"required":["suggestions"]}}],"tool_choice":{"type":"tool","name":"suggestions"},"messages":[{"role":"user","content":" You 11 | are an SEO expect tasked with suggesting keywords for a blog post.\n\n The 12 | blog post topic is: Artificial Intelligence in Healthcare\n\n Please suggest 13 | relevant keywords or key phrases for this post''s topic.\n Each keyword 14 | or phrase should be concise and directly related to the topic.\n\n Provide 15 | your suggestions as a list of strings.\n"}]}' 16 | headers: 17 | X-Api-Key: 18 | - "" 19 | Anthropic-Version: 20 | - '2023-06-01' 21 | Content-Type: 22 | - application/json 23 | Anthropic-Beta: 24 | - tools-2024-04-04 25 | Accept-Encoding: 26 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 27 | Accept: 28 | - "*/*" 29 | User-Agent: 30 | - Ruby 31 | response: 32 | status: 33 | code: 200 34 | message: OK 35 | headers: 36 | Date: 37 | - Sun, 04 Aug 2024 19:33:10 GMT 38 | Content-Type: 39 | - application/json 40 | Transfer-Encoding: 41 | - chunked 42 | Connection: 43 | - keep-alive 44 | Anthropic-Ratelimit-Requests-Limit: 45 | - '1000' 46 | Anthropic-Ratelimit-Requests-Remaining: 47 | - '999' 48 | Anthropic-Ratelimit-Requests-Reset: 49 | - '2024-08-04T19:33:34Z' 50 | Anthropic-Ratelimit-Tokens-Limit: 51 | - '80000' 52 | Anthropic-Ratelimit-Tokens-Remaining: 53 | - '80000' 54 | Anthropic-Ratelimit-Tokens-Reset: 55 | - '2024-08-04T19:33:10Z' 56 | Request-Id: 57 | - req_01JGRHGbpF5REqcgdSpAoe2c 58 | X-Cloud-Trace-Context: 59 | - e5518c6c0c14f27fca305a19c1ccfb58 60 | Via: 61 | - 1.1 google 62 | Cf-Cache-Status: 63 | - DYNAMIC 64 | Server: 65 | - cloudflare 66 | Cf-Ray: 67 | - 8ae0fa2d2d6a438b-EWR 68 | body: 69 | encoding: ASCII-8BIT 70 | string: '{"id":"msg_015yAHnScvRUHJDbrfTnTHKg","type":"message","role":"assistant","model":"claude-3-5-sonnet-20240620","content":[{"type":"tool_use","id":"toolu_013bEzN6oGYm18vQEugxRNNe","name":"suggestions","input":{"suggestions":["AI 71 | in healthcare","Machine learning in medicine","Healthcare automation","Medical 72 | diagnosis AI","Predictive analytics in healthcare","AI-powered medical imaging","Personalized 73 | medicine AI","Healthcare robotics","AI drug discovery","Electronic health 74 | records AI","Telemedicine and AI","AI-assisted surgery","Healthcare data analysis","AI 75 | patient monitoring","Medical chatbots","AI in clinical trials","Healthcare 76 | cybersecurity AI","AI for disease prediction","Smart hospitals","AI-powered 77 | health wearables"]}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":430,"output_tokens":197}}' 78 | recorded_at: Sun, 04 Aug 2024 19:33:10 GMT 79 | recorded_with: VCR 6.2.0 80 | -------------------------------------------------------------------------------- /lib/sublayer/cli/commands/agent.rb: -------------------------------------------------------------------------------- 1 | require_relative "./generators/sublayer_agent_generator" 2 | 3 | module Sublayer 4 | module Commands 5 | class Agent < Thor::Group 6 | include Thor::Actions 7 | 8 | class_option :description, type: :string, desc: "Description of the agent you want to generate", aliases: :d 9 | class_option :provider, type: :string, desc: "AI provider (OpenAI, Claude, or Gemini)", aliases: :p 10 | class_option :model, type: :string, desc: "AI model name to use (e.g. gpt-4o, claude-3-haiku-20240307, gemini-1.5-flash-latest)", aliases: :m 11 | 12 | def self.banner 13 | "sublayer generate:agent" 14 | end 15 | 16 | def confirm_usage_of_ai_api 17 | puts "You are about to generate a new agent that uses an AI API to generate content." 18 | puts "Please ensure you have the necessary API keys and that you are aware of the costs associated with using the API." 19 | exit unless yes?("Do you want to continue?") 20 | end 21 | 22 | def determine_available_providers 23 | @available_providers = [] 24 | 25 | @available_providers << "OpenAI" if ENV["OPENAI_API_KEY"] 26 | @available_providers << "Claude" if ENV["ANTHROPIC_API_KEY"] 27 | @available_providers << "Gemini" if ENV["GEMINI_API_KEY"] 28 | end 29 | 30 | def ask_for_agent_details 31 | @ai_provider = options[:provider] || ask("Select an AI provider:", default: "OpenAI", limited_to: @available_providers) 32 | @ai_model = options[:model] || select_ai_model 33 | @description = options[:description] || ask("Enter a description for the Sublayer Agent you'd like to create:") 34 | @trigger = ask("What should trigger this agent to start acting?") 35 | @goal = ask("What is the agent's goal condition?") 36 | @check_status = ask("How should the agent check its status toward the goal?") 37 | @step = ask("How does the agent take a step toward its goal?") 38 | end 39 | 40 | def generate_agent 41 | Sublayer.configuration.ai_provider = Object.const_get("Sublayer::Providers::#{@ai_provider}") 42 | Sublayer.configuration.ai_model = @ai_model 43 | 44 | say "Generating Sublayer Agent..." 45 | 46 | @results = SublayerAgentGenerator.new( 47 | description: @description, 48 | trigger: @trigger_explanation, 49 | goal: @goal, 50 | check_status: @check_status, 51 | step: @step 52 | ).generate 53 | end 54 | 55 | def determine_destination_folder 56 | @destination_folder = if File.directory?("./agents") 57 | "./agents" 58 | elsif Dir.glob("./lib/**/agents").any? 59 | Dir.glob("./lib/**/agents").first 60 | else 61 | "./" 62 | end 63 | end 64 | 65 | def save_agent_to_destination_folder 66 | create_file File.join(@destination_folder, @results.filename), @results.code 67 | end 68 | 69 | private 70 | 71 | def select_ai_model 72 | case @ai_provider 73 | when "OpenAI" 74 | ask("Which OpenAI model would you like to use?", default: "gpt-4o") 75 | when "Claude" 76 | ask("Which Anthropic model would you like to use?", default: "claude-3-5-sonnet-20240620") 77 | when "Gemini" 78 | ask("Which Google model would you like to use?", default: "gemini-1.5-flash-latest") 79 | end 80 | end 81 | end 82 | end 83 | end 84 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/gemini/generators/code_from_description_generator/hello_world.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent?key= 6 | body: 7 | encoding: UTF-8 8 | string: '{"contents":{"role":"user","parts":{"text":" You are an expert 9 | programmer in ruby.\n\n You are tasked with writing code using the 10 | following technologies: ruby.\n\n The description of the task is a 11 | hello world app where I pass --who argument to set the ''world'' value using 12 | optparser\n\n Take a deep breath and think step by step before you 13 | start coding.\n"}},"generationConfig":{"responseMimeType":"application/json","responseSchema":{"type":"OBJECT","properties":{"generated_code":{"type":"string","description":"The 14 | generated code in the requested language"}},"required":["generated_code"]}}}' 15 | headers: 16 | Content-Type: 17 | - application/json 18 | Accept-Encoding: 19 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 20 | Accept: 21 | - "*/*" 22 | User-Agent: 23 | - Ruby 24 | response: 25 | status: 26 | code: 200 27 | message: OK 28 | headers: 29 | Content-Type: 30 | - application/json; charset=UTF-8 31 | Vary: 32 | - Origin 33 | - Referer 34 | - X-Origin 35 | Date: 36 | - Mon, 26 Aug 2024 20:11:39 GMT 37 | Server: 38 | - scaffolding on HTTPServer2 39 | Cache-Control: 40 | - private 41 | X-Xss-Protection: 42 | - '0' 43 | X-Frame-Options: 44 | - SAMEORIGIN 45 | X-Content-Type-Options: 46 | - nosniff 47 | Server-Timing: 48 | - gfet4t7; dur=814 49 | Alt-Svc: 50 | - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 51 | Transfer-Encoding: 52 | - chunked 53 | body: 54 | encoding: ASCII-8BIT 55 | string: | 56 | { 57 | "candidates": [ 58 | { 59 | "content": { 60 | "parts": [ 61 | { 62 | "text": "{\"generated_code\": \"require 'optparse'\\n\\noptions = {}\\nOptionParser.new do |opts|\\n opts.on('-w', '--who WHO', 'Who to greet') do |who|\\n options[:who] = who\\n end\\nend.parse!\\n\\nwho = options[:who] || 'world'\\nputs \\\"Hello, #{who}!\\\"\"}\n" 63 | } 64 | ], 65 | "role": "model" 66 | }, 67 | "finishReason": "STOP", 68 | "index": 0, 69 | "safetyRatings": [ 70 | { 71 | "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", 72 | "probability": "NEGLIGIBLE" 73 | }, 74 | { 75 | "category": "HARM_CATEGORY_HATE_SPEECH", 76 | "probability": "NEGLIGIBLE" 77 | }, 78 | { 79 | "category": "HARM_CATEGORY_HARASSMENT", 80 | "probability": "NEGLIGIBLE" 81 | }, 82 | { 83 | "category": "HARM_CATEGORY_DANGEROUS_CONTENT", 84 | "probability": "NEGLIGIBLE" 85 | } 86 | ] 87 | } 88 | ], 89 | "usageMetadata": { 90 | "promptTokenCount": 70, 91 | "candidatesTokenCount": 87, 92 | "totalTokenCount": 157 93 | } 94 | } 95 | recorded_at: Mon, 26 Aug 2024 20:11:39 GMT 96 | recorded_with: VCR 6.2.0 97 | -------------------------------------------------------------------------------- /spec/vcr_cassettes/openai/no_function.yml: -------------------------------------------------------------------------------- 1 | --- 2 | http_interactions: 3 | - request: 4 | method: post 5 | uri: https://api.openai.com/v1/chat/completions 6 | body: 7 | encoding: UTF-8 8 | string: '{"model":"gpt-4o","messages":[{"role":"user","content":"Write a description 9 | of a historical event that happened in the past on this day 2024-08-04"}],"tool_choice":{"type":"function","function":{"name":"historical_event_description"}},"tools":[{"type":"function","function":{"name":"historical_event_description","description":"the 10 | historical event on this day in the past","parameters":{"type":"object","properties":{"historical_event_description":{"type":"string","description":"the 11 | historical event on this day in the past"}}},"required":["historical_event_description"]}}]}' 12 | headers: 13 | Content-Type: 14 | - application/json 15 | Authorization: 16 | - Bearer 17 | Accept-Encoding: 18 | - gzip;q=1.0,deflate;q=0.6,identity;q=0.3 19 | Accept: 20 | - "*/*" 21 | User-Agent: 22 | - Ruby 23 | response: 24 | status: 25 | code: 200 26 | message: OK 27 | headers: 28 | Date: 29 | - Sun, 04 Aug 2024 19:07:15 GMT 30 | Content-Type: 31 | - application/json 32 | Transfer-Encoding: 33 | - chunked 34 | Connection: 35 | - keep-alive 36 | Openai-Organization: 37 | - sublayer 38 | Openai-Processing-Ms: 39 | - '370' 40 | Openai-Version: 41 | - '2020-10-01' 42 | Strict-Transport-Security: 43 | - max-age=15552000; includeSubDomains; preload 44 | X-Ratelimit-Limit-Requests: 45 | - '10000' 46 | X-Ratelimit-Limit-Tokens: 47 | - '2000000' 48 | X-Ratelimit-Remaining-Requests: 49 | - '9999' 50 | X-Ratelimit-Remaining-Tokens: 51 | - '1999960' 52 | X-Ratelimit-Reset-Requests: 53 | - 6ms 54 | X-Ratelimit-Reset-Tokens: 55 | - 1ms 56 | X-Request-Id: 57 | - req_4045ed44e22cf0a1dae2a8bc36d65fb6 58 | Cf-Cache-Status: 59 | - DYNAMIC 60 | Set-Cookie: 61 | - __cf_bm=OpsvLFN0f7WTSDYGkptMkiDzx4JTKvoaFzzGNBXuG9o-1722798435-1.0.1.1-rwKnwdNG73T8znCXNwNL201V2bAQksCE_QjPxyr8bQEJpTwOtRu29rxIFRibUcSVuLrXu4ucwe99uu2IMag3tQ; 62 | path=/; expires=Sun, 04-Aug-24 19:37:15 GMT; domain=.api.openai.com; HttpOnly; 63 | Secure; SameSite=None 64 | - _cfuvid=ylodMDWsJdtQkaNJJJ.b1FkIOYXQsx6aO3QJLEpcgnQ-1722798435653-0.0.1.1-604800000; 65 | path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None 66 | X-Content-Type-Options: 67 | - nosniff 68 | Server: 69 | - cloudflare 70 | Cf-Ray: 71 | - 8ae0d44af9e942bd-EWR 72 | Alt-Svc: 73 | - h3=":443"; ma=86400 74 | body: 75 | encoding: ASCII-8BIT 76 | string: | 77 | { 78 | "id": "chatcmpl-9sarTPHshk6O2zbu0B0hSWCXs2j7d", 79 | "object": "chat.completion", 80 | "created": 1722798435, 81 | "model": "gpt-4o-2024-05-13", 82 | "choices": [ 83 | { 84 | "index": 0, 85 | "message": { 86 | "role": "assistant", 87 | "content": "Some crazy stuff happened" 88 | }, 89 | "logprobs": null, 90 | "finish_reason": "stop" 91 | } 92 | ], 93 | "usage": { 94 | "prompt_tokens": 95, 95 | "completion_tokens": 1, 96 | "total_tokens": 96 97 | }, 98 | "system_fingerprint": "fp_c832e4513b" 99 | } 100 | recorded_at: Sun, 04 Aug 2024 19:07:15 GMT 101 | recorded_with: VCR 6.2.0 102 | --------------------------------------------------------------------------------