├── .rspec ├── lib ├── vsm │ ├── version.rb │ ├── generator │ │ ├── templates │ │ │ ├── bin_setup.erb │ │ │ ├── lib_version_rb.erb │ │ │ ├── Rakefile.erb │ │ │ ├── gitignore.erb │ │ │ ├── Gemfile.erb │ │ │ ├── lib_name_rb.erb │ │ │ ├── lib_ports_chat_tty_rb.erb │ │ │ ├── bin_console.erb │ │ │ ├── gemspec.erb │ │ │ ├── exe_name.erb │ │ │ ├── lib_tools_read_file_rb.erb │ │ │ ├── README_md.erb │ │ │ └── lib_organism_rb.erb │ │ └── new_project.rb │ ├── roles │ │ ├── governance.rb │ │ ├── identity.rb │ │ ├── operations.rb │ │ ├── coordination.rb │ │ └── intelligence.rb │ ├── executors │ │ ├── fiber_executor.rb │ │ └── thread_executor.rb │ ├── message.rb │ ├── tool │ │ ├── capsule.rb │ │ ├── descriptor.rb │ │ └── acts_as_tool.rb │ ├── port.rb │ ├── homeostat.rb │ ├── runtime.rb │ ├── drivers │ │ ├── family.rb │ │ ├── gemini │ │ │ └── async_driver.rb │ │ ├── openai │ │ │ └── async_driver.rb │ │ └── anthropic │ │ │ └── async_driver.rb │ ├── meta │ │ ├── snapshot_cache.rb │ │ ├── support.rb │ │ └── snapshot_builder.rb │ ├── observability │ │ └── ledger.rb │ ├── async_channel.rb │ ├── mcp │ │ ├── remote_tool_capsule.rb │ │ ├── client.rb │ │ └── jsonrpc.rb │ ├── capsule.rb │ ├── dsl_mcp.rb │ ├── lens │ │ ├── stats.rb │ │ ├── event_hub.rb │ │ ├── tui.rb │ │ └── server.rb │ ├── meta.rb │ ├── cli.rb │ ├── lens.rb │ ├── dsl.rb │ └── ports │ │ ├── mcp │ │ └── server_stdio.rb │ │ └── chat_tty.rb └── vsm.rb ├── sig └── vsm.rbs ├── bin ├── setup └── console ├── Rakefile ├── .gitignore ├── Gemfile ├── spec ├── vsm_spec.rb ├── vsm │ ├── homeostat_spec.rb │ ├── acts_as_tool_spec.rb │ ├── monitoring_spec.rb │ ├── executors_spec.rb │ ├── async_channel_spec.rb │ ├── tool_descriptor_spec.rb │ ├── capsule_dsl_spec.rb │ ├── identity_alert_spec.rb │ ├── coordination_spec.rb │ ├── operations_spec.rb │ └── integration │ │ └── tool_flow_spec.rb ├── spec_helper.rb ├── vsm_smoke_spec.rb ├── meta │ ├── snapshot_builder_spec.rb │ └── meta_tools_spec.rb └── support │ └── fakes.rb ├── .claude └── settings.local.json ├── exe └── vsm ├── .github └── workflows │ └── main.yml ├── LICENSE.txt ├── examples ├── 06_mcp_mount_reflection.rb ├── 01_echo_tool.rb ├── 02b_anthropic_streaming.rb ├── 02c_gemini_streaming.rb ├── 09_mcp_with_llm_calls.rb ├── 08_custom_chattty.rb ├── 05_mcp_server_and_chattty.rb ├── 10_meta_read_only.rb ├── 02_openai_streaming.rb ├── 03b_anthropic_tools.rb ├── 03c_gemini_tools.rb ├── 07_connect_claude_mcp.rb └── 03_openai_tools.rb ├── vsm.gemspec ├── Gemfile.lock ├── CLAUDE.md ├── mcp_update.md └── llms.txt /.rspec: -------------------------------------------------------------------------------- 1 | --format documentation 2 | --color 3 | --require spec_helper 4 | -------------------------------------------------------------------------------- /lib/vsm/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Vsm 4 | VERSION = "0.2.0" 5 | end 6 | -------------------------------------------------------------------------------- /sig/vsm.rbs: -------------------------------------------------------------------------------- 1 | module Vsm 2 | VERSION: String 3 | # See the writing guide of rbs: https://github.com/ruby/rbs#guides 4 | end 5 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/bin_setup.erb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | bundle install 5 | 6 | echo "OK" 7 | 8 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/lib_version_rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= module_name %> 4 | VERSION = "0.1.0" 5 | end 6 | 7 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/Rakefile.erb: -------------------------------------------------------------------------------- 1 | require "bundler/gem_tasks" 2 | task :default do 3 | sh "bundle exec rspec" if File.exist?("spec") 4 | end 5 | 6 | -------------------------------------------------------------------------------- /bin/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | IFS=$'\n\t' 4 | set -vx 5 | 6 | bundle install 7 | 8 | # Do any other automated setup that you need to do here 9 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/gem_tasks" 4 | require "rspec/core/rake_task" 5 | 6 | RSpec::Core::RakeTask.new(:spec) 7 | 8 | task default: %i[spec] 9 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/gitignore.erb: -------------------------------------------------------------------------------- 1 | .bundle/ 2 | vendor/bundle/ 3 | pkg/ 4 | .vsm.log.jsonl 5 | *.gem 6 | .DS_Store 7 | /.ruby-version 8 | /.ruby-gemset 9 | /.env 10 | 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.bundle/ 2 | /.yardoc 3 | /_yardoc/ 4 | /coverage/ 5 | /doc/ 6 | /pkg/ 7 | /spec/reports/ 8 | /tmp/ 9 | 10 | # rspec failure tracking 11 | .rspec_status 12 | *.gem 13 | .vsm* 14 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source "https://rubygems.org" 4 | 5 | # Specify your gem's dependencies in vsm.gemspec 6 | gemspec 7 | 8 | gem "irb" 9 | gem "rake", "~> 13.0" 10 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/Gemfile.erb: -------------------------------------------------------------------------------- 1 | source "https://rubygems.org" 2 | 3 | gem "vsm", "<%= vsm_version_constraint %>" 4 | 5 | group :development do 6 | gem "rake" 7 | gem "rspec" 8 | end 9 | 10 | -------------------------------------------------------------------------------- /lib/vsm/roles/governance.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | class Governance 4 | def observe(bus); end 5 | def enforce(message) 6 | yield message 7 | end 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/lib_name_rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "vsm" 4 | require_relative "<%= lib_name %>/organism" 5 | require_relative "<%= lib_name %>/ports/chat_tty" 6 | 7 | module <%= module_name %> 8 | end 9 | 10 | -------------------------------------------------------------------------------- /spec/vsm_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | RSpec.describe Vsm do 4 | it "has a version number" do 5 | expect(Vsm::VERSION).not_to be nil 6 | end 7 | 8 | it "does something useful" do 9 | expect(true).to eq(true) 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/vsm/executors/fiber_executor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | module Executors 4 | module FiberExecutor 5 | def self.call(tool, args) 6 | tool.run(args) # runs in current Async task 7 | end 8 | end 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/lib_ports_chat_tty_rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= module_name %> 4 | module Ports 5 | class ChatTTY < VSM::Ports::ChatTTY 6 | def banner(io) 7 | io.puts "\e[96m<%= lib_name %>\e[0m — Ctrl-C to exit" 8 | end 9 | end 10 | end 11 | end 12 | 13 | -------------------------------------------------------------------------------- /bin/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require "bundler/setup" 5 | require "vsm" 6 | 7 | # You can add fixtures and/or initialization code here to make experimenting 8 | # with your gem easier. You can also use a different console, if you like. 9 | 10 | require "irb" 11 | IRB.start(__FILE__) 12 | -------------------------------------------------------------------------------- /lib/vsm/roles/identity.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | class Identity 4 | def initialize(identity:, invariants: []) 5 | @identity, @invariants = identity, invariants 6 | end 7 | def observe(bus); end 8 | def handle(message, bus:, **) = false 9 | def alert(message); end 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/vsm/message.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | # kind: :user, :assistant_delta, :assistant, :tool_call, :tool_result, :plan, :policy, :audit, :confirm_request, :confirm_response 4 | # path: optional addressing, e.g., [:airb, :operations, :fs] 5 | Message = Struct.new(:kind, :payload, :path, :corr_id, :meta, keyword_init: true) 6 | end 7 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/bin_console.erb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require "bundler/setup" if File.exist?(File.expand_path("../Gemfile", __dir__)) 5 | $LOAD_PATH.unshift File.expand_path("../lib", __dir__) 6 | require "<%= lib_name %>" 7 | 8 | puts "Starting console with <%= module_name %> loaded" 9 | require 'irb' 10 | IRB.start 11 | 12 | -------------------------------------------------------------------------------- /lib/vsm/tool/capsule.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | class ToolCapsule 4 | include ActsAsTool 5 | attr_writer :governance 6 | def governance = @governance || (raise "governance not injected") 7 | # Subclasses implement: 8 | # def run(args) ... end 9 | # Optional: 10 | # def execution_mode = :fiber | :thread 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /spec/vsm/homeostat_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::Homeostat do 5 | it "flags algedonic messages" do 6 | h = described_class.new 7 | m = VSM::Message.new(kind: :user, payload: "x", meta: { severity: :algedonic }) 8 | expect(h.alarm?(m)).to be true 9 | expect(h.alarm?(VSM::Message.new(kind: :user, payload: "x"))).to be false 10 | end 11 | end 12 | 13 | -------------------------------------------------------------------------------- /.claude/settings.local.json: -------------------------------------------------------------------------------- 1 | { 2 | "permissions": { 3 | "allow": [ 4 | "Bash(bundle exec rspec:*)", 5 | "Bash(cat:*)", 6 | "Bash(gem list)", 7 | "Bash(bundle exec rake:*)", 8 | "Bash(gem install:*)", 9 | "Bash(ruby test_tool_calls:*)" 10 | ], 11 | "deny": [], 12 | "ask": [], 13 | "additionalDirectories": [ 14 | "/Users/swerner/Development/gems/airb" 15 | ] 16 | } 17 | } -------------------------------------------------------------------------------- /lib/vsm/port.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | class Port 4 | def initialize(capsule:) = (@capsule = capsule) 5 | def ingress(_event) = raise NotImplementedError 6 | def egress_subscribe = @capsule.bus.subscribe { |m| render_out(m) if should_render?(m) } 7 | def should_render?(message) = [:assistant, :tool_result].include?(message.kind) 8 | def render_out(_message) = nil 9 | end 10 | end 11 | 12 | -------------------------------------------------------------------------------- /lib/vsm/homeostat.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | class Homeostat 4 | attr_reader :limits 5 | 6 | def initialize 7 | @limits = { tokens: 8_000, time_ms: 15_000, bytes: 2_000_000 } 8 | @usage = Hash.new(0) 9 | end 10 | 11 | def usage_snapshot 12 | @usage.dup 13 | end 14 | 15 | def alarm?(message) 16 | message.meta&.dig(:severity) == :algedonic 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/vsm/runtime.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "async" 3 | 4 | module VSM 5 | module Runtime 6 | def self.start(capsule, ports: []) 7 | Async do |task| 8 | capsule.run 9 | ports.each do |p| 10 | p.egress_subscribe if p.respond_to?(:egress_subscribe) 11 | task.async { p.loop } if p.respond_to?(:loop) 12 | end 13 | task.sleep 14 | end 15 | end 16 | end 17 | end 18 | 19 | -------------------------------------------------------------------------------- /lib/vsm/drivers/family.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | module Drivers 4 | module Family 5 | def self.of(driver) 6 | case driver 7 | when VSM::Drivers::OpenAI::AsyncDriver then :openai 8 | when VSM::Drivers::Anthropic::AsyncDriver then :anthropic 9 | when VSM::Drivers::Gemini::AsyncDriver then :gemini 10 | else :openai 11 | end 12 | end 13 | end 14 | end 15 | end 16 | 17 | -------------------------------------------------------------------------------- /lib/vsm/executors/thread_executor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | module Executors 4 | module ThreadExecutor 5 | def self.call(tool, args) 6 | q = Queue.new 7 | Thread.new do 8 | begin 9 | q << [:ok, tool.run(args)] 10 | rescue => e 11 | q << [:err, e] 12 | end 13 | end 14 | tag, val = q.pop 15 | tag == :ok ? val : raise(val) 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /exe/vsm: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | # Keep CLI independent of any project's Bundler context so we resolve this 5 | # gem's dependencies rather than a host app's Gemfile. 6 | ENV.delete('BUNDLE_GEMFILE') 7 | ENV.delete('BUNDLE_BIN_PATH') 8 | if (rubyopt = ENV['RUBYOPT']) 9 | ENV['RUBYOPT'] = rubyopt.split.reject { |x| x.include?('bundler/setup') }.join(' ') 10 | end 11 | ENV.delete('RUBYGEMS_GEMDEPS') 12 | 13 | require 'vsm' 14 | require 'vsm/cli' 15 | 16 | VSM::CLI.start(ARGV) 17 | 18 | -------------------------------------------------------------------------------- /lib/vsm/tool/descriptor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | module Tool 4 | Descriptor = Struct.new(:name, :description, :schema, keyword_init: true) do 5 | def to_openai_tool 6 | { type: "function", function: { name:, description:, parameters: schema } } 7 | end 8 | def to_anthropic_tool 9 | { name:, description:, input_schema: schema } 10 | end 11 | def to_gemini_tool 12 | { name:, description:, parameters: schema } 13 | end 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/vsm/meta/snapshot_cache.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "thread" 4 | 5 | module VSM 6 | module Meta 7 | class SnapshotCache 8 | def initialize(builder) 9 | @builder = builder 10 | @mutex = Mutex.new 11 | @snapshot = nil 12 | end 13 | 14 | def fetch 15 | @mutex.synchronize do 16 | @snapshot ||= { generated_at: Time.now.utc, data: @builder.call } 17 | end 18 | end 19 | 20 | def invalidate! 21 | @mutex.synchronize { @snapshot = nil } 22 | end 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/setup" 4 | require "rspec" 5 | require "async/rspec" # provides Async::RSpec helpers 6 | require "securerandom" 7 | 8 | # Load the gem under test: 9 | require "vsm" 10 | 11 | # Load shared fakes/helpers: 12 | Dir[File.expand_path("support/**/*.rb", __dir__)].sort.each { |f| require f } 13 | 14 | RSpec.configure do |config| 15 | config.example_status_persistence_file_path = ".rspec_status" 16 | config.disable_monkey_patching! 17 | config.expect_with :rspec do |c| 18 | c.syntax = :expect 19 | end 20 | end 21 | 22 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Ruby 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | pull_request: 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | name: Ruby ${{ matrix.ruby }} 14 | strategy: 15 | matrix: 16 | ruby: 17 | - '3.4.5' 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | - name: Set up Ruby 22 | uses: ruby/setup-ruby@v1 23 | with: 24 | ruby-version: ${{ matrix.ruby }} 25 | bundler-cache: true 26 | - name: Run the default task 27 | run: bundle exec rake 28 | -------------------------------------------------------------------------------- /spec/vsm/acts_as_tool_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::ActsAsTool do 5 | it "provides descriptor from class macros" do 6 | klass = Class.new(VSM::ToolCapsule) do 7 | tool_name "alpha" 8 | tool_description "desc" 9 | tool_schema({ type: "object", properties: {}, required: [] }) 10 | def run(_) = "ok" 11 | end 12 | 13 | d = klass.new.tool_descriptor 14 | expect(d.name).to eq("alpha") 15 | expect(d.description).to eq("desc") 16 | expect(d.schema).to include(type: "object") 17 | end 18 | end 19 | 20 | -------------------------------------------------------------------------------- /lib/vsm/observability/ledger.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "json" 3 | require "time" 4 | 5 | module VSM 6 | class Monitoring 7 | LOG = File.expand_path(".vsm.log.jsonl", Dir.pwd) 8 | 9 | def observe(bus) 10 | bus.subscribe do |msg| 11 | event = { 12 | ts: Time.now.utc.iso8601, 13 | kind: msg.kind, 14 | path: msg.path, 15 | corr_id: msg.corr_id, 16 | meta: msg.meta 17 | } 18 | File.open(LOG, "a") { |f| f.puts(event.to_json) } rescue nil 19 | end 20 | end 21 | 22 | def handle(*) = false 23 | end 24 | end 25 | 26 | -------------------------------------------------------------------------------- /lib/vsm/tool/acts_as_tool.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | module ActsAsTool 4 | def self.included(base) = base.extend(ClassMethods) 5 | 6 | module ClassMethods 7 | def tool_name(value = nil); @tool_name = value if value; @tool_name; end 8 | def tool_description(value = nil); @tool_description = value if value; @tool_description; end 9 | def tool_schema(value = nil); @tool_schema = value if value; @tool_schema; end 10 | end 11 | 12 | def tool_descriptor 13 | VSM::Tool::Descriptor.new( 14 | name: self.class.tool_name, 15 | description: self.class.tool_description, 16 | schema: self.class.tool_schema 17 | ) 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /spec/vsm/monitoring_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | require "tmpdir" 4 | 5 | RSpec.describe VSM::Monitoring do 6 | include Async::RSpec::Reactor 7 | 8 | it "writes a JSONL event per message" do 9 | Dir.mktmpdir do |dir| 10 | stub_const("VSM::Monitoring::LOG", File.join(dir, "vsm.log.jsonl")) 11 | mon = described_class.new 12 | bus = VSM::AsyncChannel.new 13 | mon.observe(bus) 14 | 15 | Async do |task| 16 | bus.emit VSM::Message.new(kind: :user, payload: "hi", meta: { session_id: "s" }) 17 | task.sleep 0.05 18 | 19 | data = File.read(VSM::Monitoring::LOG) 20 | expect(data).to include("\"kind\":\"user\"") 21 | expect(data).to include("\"session_id\":\"s\"") 22 | end 23 | end 24 | end 25 | end 26 | 27 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/gemspec.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "lib/<%= lib_name %>/version" 4 | 5 | Gem::Specification.new do |spec| 6 | spec.name = "<%= lib_name %>" 7 | spec.version = <%= module_name %>::VERSION 8 | spec.authors = ["Your Name"] 9 | spec.email = ["you@example.com"] 10 | 11 | spec.summary = "VSM app scaffold" 12 | spec.description = "A minimal VSM-based agent app with ChatTTY and sample tools." 13 | spec.license = "MIT" 14 | 15 | spec.required_ruby_version = ">= 3.2" 16 | 17 | spec.files = Dir.chdir(File.expand_path(__dir__)) do 18 | Dir["{bin,exe,lib}/**/*", "README.md", "LICENSE.txt", "Rakefile", "Gemfile"].select { |f| File.file?(f) } 19 | end 20 | spec.bindir = "exe" 21 | spec.executables = ["<%= exe_name %>"] 22 | spec.require_paths = ["lib"] 23 | end 24 | 25 | -------------------------------------------------------------------------------- /spec/vsm/executors_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe "Executors" do 5 | it "FiberExecutor runs run() inline" do 6 | tool = FakeEchoTool.new 7 | expect(VSM::Executors::FiberExecutor.call(tool, { "text" => "hi" })).to eq("echo: hi") 8 | end 9 | 10 | it "ThreadExecutor runs code on a separate thread and returns result" do 11 | tool = SlowTool.new 12 | t0 = Process.clock_gettime(Process::CLOCK_MONOTONIC) 13 | result = VSM::Executors::ThreadExecutor.call(tool, { "id" => 7 }) 14 | t1 = Process.clock_gettime(Process::CLOCK_MONOTONIC) 15 | expect(result).to eq("slow-7") 16 | expect(t1 - t0).to be >= 0.25 17 | end 18 | 19 | it "ThreadExecutor surfaces exceptions" do 20 | tool = ErrorTool.new 21 | expect { VSM::Executors::ThreadExecutor.call(tool, {}) }.to raise_error(RuntimeError, /kapow/) 22 | end 23 | end 24 | 25 | -------------------------------------------------------------------------------- /spec/vsm/async_channel_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::AsyncChannel do 5 | include Async::RSpec::Reactor 6 | 7 | it "broadcasts to subscribers and supports pop" do 8 | chan = described_class.new(context: { foo: "bar" }) 9 | seen = [] 10 | chan.subscribe { |m| seen << m.kind } 11 | 12 | m1 = VSM::Message.new(kind: :user, payload: "hi") 13 | m2 = VSM::Message.new(kind: :assistant, payload: "yo") 14 | 15 | Async do |task| 16 | chan.emit(m1) 17 | chan.emit(m2) 18 | 19 | # pop returns them in order 20 | expect(chan.pop).to eq(m1) 21 | expect(chan.pop).to eq(m2) 22 | 23 | # fan-out happened (in async tasks) 24 | task.sleep 0.05 # allow fan-out tasks to run 25 | expect(seen).to include(:user, :assistant) 26 | expect(chan.context[:foo]).to eq("bar") 27 | end 28 | end 29 | end 30 | 31 | -------------------------------------------------------------------------------- /spec/vsm/tool_descriptor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::Tool::Descriptor do 5 | let(:schema) { { "type" => "object", "properties" => { "x" => { "type" => "string" } }, "required" => ["x"] } } 6 | subject(:desc) { described_class.new(name: "t", description: "d", schema:) } 7 | 8 | it "converts to OpenAI tool" do 9 | t = desc.to_openai_tool 10 | expect(t[:type]).to eq("function") 11 | expect(t[:function][:name]).to eq("t") 12 | expect(t[:function][:parameters]).to eq(schema) 13 | end 14 | 15 | it "converts to Anthropic tool" do 16 | t = desc.to_anthropic_tool 17 | expect(t[:name]).to eq("t") 18 | expect(t[:input_schema]).to eq(schema) 19 | end 20 | 21 | it "converts to Gemini function declaration" do 22 | t = desc.to_gemini_tool 23 | expect(t[:name]).to eq("t") 24 | expect(t[:parameters]).to eq(schema) 25 | end 26 | end 27 | 28 | -------------------------------------------------------------------------------- /lib/vsm/meta/support.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module VSM 4 | module Meta 5 | module Support 6 | CONFIG_IVAR = :@__vsm_constructor_args 7 | 8 | module_function 9 | 10 | def record_constructor_args(instance, args) 11 | copied = copy_args(args) 12 | instance.instance_variable_set(CONFIG_IVAR, copied) 13 | instance 14 | end 15 | 16 | def fetch_constructor_args(instance) 17 | instance.instance_variable_get(CONFIG_IVAR) 18 | end 19 | 20 | def copy_args(args) 21 | return {} if args.nil? 22 | case args 23 | when Hash 24 | args.transform_values { copy_args(_1) } 25 | when Array 26 | args.map { copy_args(_1) } 27 | when Symbol, Numeric, NilClass, TrueClass, FalseClass 28 | args 29 | else 30 | args.dup rescue args 31 | end 32 | end 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/exe_name.erb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | # Keep CLI independent of any project's Bundler context. 5 | ENV.delete('BUNDLE_GEMFILE') 6 | ENV.delete('BUNDLE_BIN_PATH') 7 | if (rubyopt = ENV['RUBYOPT']) 8 | ENV['RUBYOPT'] = rubyopt.split.reject { |x| x.include?('bundler/setup') }.join(' ') 9 | end 10 | ENV.delete('RUBYGEMS_GEMDEPS') 11 | 12 | $stdout.sync = true 13 | $stderr.sync = true 14 | 15 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 16 | require "vsm" 17 | require "<%= lib_name %>" 18 | 19 | capsule = <%= module_name %>::Organism.build 20 | 21 | hub = nil 22 | if ENV["VSM_LENS"] == "1" 23 | hub = VSM::Lens.attach!( 24 | capsule, 25 | host: "127.0.0.1", 26 | port: (ENV["VSM_LENS_PORT"] || 9292).to_i, 27 | token: ENV["VSM_LENS_TOKEN"] 28 | ) 29 | puts "Lens: http://127.0.0.1:#{ENV['VSM_LENS_PORT'] || 9292}" 30 | end 31 | 32 | port = <%= module_name %>::Ports::ChatTTY.new(capsule: capsule) 33 | VSM::Runtime.start(capsule, ports: [port]) 34 | 35 | -------------------------------------------------------------------------------- /spec/vsm/capsule_dsl_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe "Capsule and DSL" do 5 | include Async::RSpec::Reactor 6 | 7 | it "builds a capsule with operations & injects governance into tools" do 8 | gov = FakeGovernance.new 9 | cap = VSM::DSL::Builder.new(:demo).tap do |b| 10 | b.identity(klass: VSM::Identity, args: { identity: "demo", invariants: [] }) 11 | b.governance(klass: FakeGovernance, args: {}) 12 | b.coordination(klass: VSM::Coordination) 13 | b.intelligence(klass: VSM::Intelligence, args: { driver: FakeDriver.new }) 14 | b.operations do 15 | capsule :echo, klass: FakeEchoTool 16 | end 17 | b.monitoring(klass: VSM::Monitoring) 18 | end.build 19 | 20 | # Every child tool gets governance object injected 21 | child = cap.children["echo"] 22 | expect(child.governance).to be_a(VSM::Governance) 23 | 24 | # Bus context exposes operations_children for intelligence 25 | expect(cap.bus.context[:operations_children].keys).to include("echo") 26 | end 27 | end 28 | 29 | -------------------------------------------------------------------------------- /lib/vsm/async_channel.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module VSM 4 | class AsyncChannel 5 | attr_reader :context 6 | 7 | def initialize(context: {}) 8 | @queue = Async::Queue.new 9 | @subs = [] 10 | @context = context 11 | end 12 | 13 | def emit(message) 14 | begin 15 | @queue.enqueue(message) 16 | rescue StandardError 17 | # If no async scheduler is available in this thread, best-effort enqueue later. 18 | end 19 | @subs.each do |blk| 20 | begin 21 | Async { blk.call(message) } 22 | rescue StandardError 23 | # Fallback when no Async task is active in this thread 24 | begin 25 | blk.call(message) 26 | rescue StandardError 27 | # ignore subscriber errors 28 | end 29 | end 30 | end 31 | end 32 | 33 | def pop = @queue.dequeue 34 | 35 | def subscribe(&blk) 36 | @subs << blk 37 | blk 38 | end 39 | 40 | def unsubscribe(subscriber) 41 | @subs.delete(subscriber) 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/lib_tools_read_file_rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module <%= module_name %> 4 | module Tools 5 | class ReadFile < VSM::ToolCapsule 6 | tool_name "read_file" 7 | tool_description "Read the contents of a UTF-8 text file at a relative path within the current workspace." 8 | tool_schema({ 9 | type: "object", 10 | properties: { 11 | path: { type: "string", description: "Relative path to a text file (UTF-8)." } 12 | }, 13 | required: ["path"] 14 | }) 15 | 16 | def run(args) 17 | rel = args["path"].to_s 18 | raise "path required" if rel.strip.empty? 19 | root = Dir.pwd 20 | full = File.expand_path(File.join(root, rel)) 21 | # Prevent escaping outside workspace root 22 | unless full.start_with?(root + File::SEPARATOR) || full == root 23 | raise "outside workspace" 24 | end 25 | File.read(full, mode: "r:UTF-8") 26 | rescue Errno::ENOENT 27 | raise "file not found: #{rel}" 28 | end 29 | end 30 | end 31 | end 32 | 33 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2025 Scott Werner 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /spec/vsm/identity_alert_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::Identity do 5 | include Async::RSpec::Reactor 6 | 7 | it "receives alert for algedonic messages through capsule dispatch" do 8 | spy = SpyIdentity.new(identity: "top") 9 | cap = VSM::Capsule.new( 10 | name: :top, 11 | roles: { 12 | identity: spy, 13 | governance: VSM::Governance.new, 14 | coordination: VSM::Coordination.new, 15 | intelligence: VSM::Intelligence.new(driver: FakeDriver.new), 16 | operations: VSM::Operations.new 17 | }, 18 | children: {} 19 | ) 20 | 21 | Async do |task| 22 | # Start capsule in background 23 | capsule_task = task.async { cap.run } 24 | task.sleep(0.01) # Let capsule start 25 | 26 | cap.bus.emit VSM::Message.new(kind: :user, payload: "oops", meta: { severity: :algedonic }) 27 | task.sleep(0.05) # Let message process 28 | 29 | # Stop the capsule 30 | capsule_task.stop 31 | 32 | expect(spy.alerts.size).to be >= 1 33 | expect(spy.alerts.first.kind).to eq(:user) 34 | end 35 | end 36 | end 37 | 38 | -------------------------------------------------------------------------------- /lib/vsm/roles/operations.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "../executors/fiber_executor" 4 | require_relative "../executors/thread_executor" 5 | 6 | module VSM 7 | class Operations 8 | EXECUTORS = { 9 | fiber: Executors::FiberExecutor, 10 | thread: Executors::ThreadExecutor 11 | }.freeze 12 | 13 | def observe(bus); end 14 | 15 | def handle(message, bus:, children:, **) 16 | return false unless message.kind == :tool_call 17 | 18 | name = message.payload[:tool].to_s 19 | tool_capsule = children.fetch(name) { raise "unknown tool capsule: #{name}" } 20 | mode = tool_capsule.respond_to?(:execution_mode) ? tool_capsule.execution_mode : :fiber 21 | executor = EXECUTORS.fetch(mode) 22 | 23 | Async do 24 | result = executor.call(tool_capsule, message.payload[:args]) 25 | bus.emit Message.new(kind: :tool_result, payload: result, corr_id: message.corr_id, meta: message.meta) 26 | rescue => e 27 | bus.emit Message.new(kind: :tool_result, payload: "ERROR: #{e.class}: #{e.message}", corr_id: message.corr_id, meta: message.meta) 28 | end 29 | 30 | true 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /lib/vsm/mcp/remote_tool_capsule.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | module MCP 4 | class RemoteToolCapsule < VSM::ToolCapsule 5 | attr_writer :bus 6 | 7 | def initialize(client:, remote_name:, descriptor:) 8 | @client = client 9 | @remote_name = remote_name 10 | @descriptor = descriptor # { name:, description:, input_schema: } 11 | end 12 | 13 | def tool_descriptor 14 | VSM::Tool::Descriptor.new( 15 | name: @descriptor[:name], 16 | description: @descriptor[:description], 17 | schema: @descriptor[:input_schema] 18 | ) 19 | end 20 | 21 | def execution_mode 22 | :thread 23 | end 24 | 25 | def run(args) 26 | @bus&.emit VSM::Message.new(kind: :progress, payload: "mcp call #{@client.name}.#{@remote_name}", path: [:mcp, :client, @client.name, @remote_name]) 27 | out = @client.call_tool(name: @remote_name, arguments: args || {}) 28 | @bus&.emit VSM::Message.new(kind: :progress, payload: "mcp result #{@client.name}.#{@remote_name}", path: [:mcp, :client, @client.name, @remote_name]) 29 | out.to_s 30 | rescue => e 31 | "ERROR: #{e.class}: #{e.message}" 32 | end 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /spec/vsm_smoke_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "vsm" 3 | 4 | RSpec.describe VSM do 5 | include Async::RSpec::Reactor 6 | 7 | it "builds a capsule and routes a tool call" do 8 | class T < VSM::ToolCapsule 9 | tool_name "t"; tool_description "d"; tool_schema({ type: "object", properties: {}, required: [] }) 10 | def run(_args) = "ok" 11 | end 12 | 13 | cap = VSM::DSL.define(:test) do 14 | identity klass: VSM::Identity, args: { identity: "t", invariants: [] } 15 | governance klass: VSM::Governance 16 | coordination klass: VSM::Coordination 17 | intelligence klass: VSM::Intelligence, args: { driver: FakeDriver.new } 18 | operations do 19 | capsule :t, klass: T 20 | end 21 | end 22 | 23 | # Test operations component directly instead of full capsule 24 | ops = cap.roles[:operations] 25 | bus = cap.bus 26 | children = cap.children 27 | 28 | q = Queue.new 29 | bus.subscribe { |m| q << m if m.kind == :tool_result } 30 | 31 | msg = VSM::Message.new(kind: :tool_call, payload: { tool: "t", args: {} }, corr_id: "1") 32 | expect(ops.handle(msg, bus:, children:)).to be true 33 | 34 | result = q.pop 35 | expect(result.payload).to eq("ok") 36 | end 37 | end 38 | 39 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/README_md.erb: -------------------------------------------------------------------------------- 1 | # <%= module_name %> 2 | 3 | A minimal VSM app scaffold. Starts a capsule with a ChatTTY interface, an LLM-backed intelligence (OpenAI by default), and a `read_file` tool. 4 | 5 | ## Quickstart 6 | 7 | ```bash 8 | bundle install 9 | OPENAI_API_KEY=... bundle exec exe/<%= exe_name %> 10 | ``` 11 | 12 | Ask the assistant questions, or request reading a file, e.g.: 13 | 14 | ``` 15 | read README.md 16 | ``` 17 | 18 | You can customize the banner and prompt in `lib/<%= lib_name %>/ports/chat_tty.rb` and add tools under `lib/<%= lib_name %>/tools`. 19 | 20 | ## LLM Configuration 21 | 22 | This scaffold includes LLM wiring. Configure provider via env vars (or choose at generation time): 23 | 24 | - `<%= env_prefix %>_PROVIDER` — `openai` (default), `anthropic`, or `gemini` 25 | - `<%= env_prefix %>_MODEL` — defaults to `<%= default_model %>` if not set 26 | - API key env var depends on provider: 27 | - `OPENAI_API_KEY` 28 | - `ANTHROPIC_API_KEY` 29 | - `GEMINI_API_KEY` 30 | 31 | Run: 32 | 33 | ```bash 34 | <%= env_prefix %>_PROVIDER=<%= provider %> <%= env_prefix %>_MODEL=<%= default_model %> \ 35 | OPENAI_API_KEY=... bundle exec exe/<%= exe_name %> 36 | ``` 37 | 38 | ## Lens (optional) 39 | 40 | Set `VSM_LENS=1` to launch the Lens UI and print its URL. You can change `VSM_LENS_PORT` and provide `VSM_LENS_TOKEN`. 41 | -------------------------------------------------------------------------------- /lib/vsm/capsule.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "async" 3 | module VSM 4 | class Capsule 5 | attr_reader :name, :bus, :homeostat, :roles, :children 6 | 7 | def initialize(name:, roles:, children: {}) 8 | @name = name.to_sym 9 | @roles = roles 10 | @children = children 11 | ctx = { operations_children: children.transform_keys(&:to_s) } 12 | @bus = AsyncChannel.new(context: ctx) 13 | @homeostat = Homeostat.new 14 | # Inject bus into children that accept it, to enable richer observability 15 | @children.each_value { |c| c.bus = @bus if c.respond_to?(:bus=) } 16 | wire_observers! 17 | end 18 | 19 | def run 20 | Async do 21 | loop do 22 | message = @bus.pop 23 | roles[:coordination].stage(message) 24 | roles[:coordination].drain(@bus) { |m| dispatch(m) } 25 | end 26 | end 27 | end 28 | 29 | def dispatch(message) 30 | return roles[:identity].alert(message) if homeostat.alarm?(message) 31 | roles[:governance].enforce(message) { route(_1) } 32 | end 33 | 34 | def route(message) 35 | roles[:operations].handle(message, bus: @bus, children: @children) || 36 | roles[:intelligence].handle(message, bus: @bus) || 37 | roles[:identity].handle(message, bus: @bus) 38 | end 39 | 40 | private 41 | 42 | def wire_observers! 43 | roles.values.each { |r| r.respond_to?(:observe) && r.observe(@bus) } 44 | end 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /lib/vsm/roles/coordination.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | module VSM 3 | class Coordination 4 | def initialize 5 | @queue = [] 6 | @floor_by_session = nil 7 | @turn_waiters = {} # session_id => Async::Queue 8 | end 9 | 10 | def observe(bus) 11 | # Note: staging is handled by the capsule loop, not by subscription 12 | # This method exists for consistency but doesn't auto-stage messages 13 | end 14 | 15 | def stage(message) = (@queue << message) 16 | 17 | def drain(bus) 18 | return if @queue.empty? 19 | @queue.sort_by! { order(_1) } 20 | @queue.shift(@queue.size).each do |msg| 21 | yield msg 22 | if msg.kind == :assistant && (sid = msg.meta&.dig(:session_id)) && @turn_waiters[sid] 23 | @turn_waiters[sid].enqueue(:done) 24 | end 25 | end 26 | end 27 | 28 | def grant_floor!(session_id) = (@floor_by_session = session_id) 29 | 30 | def wait_for_turn_end(session_id) 31 | q = (@turn_waiters[session_id] ||= Async::Queue.new) 32 | q.dequeue 33 | end 34 | 35 | def order(m) 36 | base = 37 | case m.kind 38 | when :user then 0 39 | when :tool_result then 1 40 | when :plan then 2 41 | when :assistant_delta then 3 42 | when :assistant then 4 43 | else 9 44 | end 45 | sid = m.meta&.dig(:session_id) 46 | sid == @floor_by_session ? base - 1 : base 47 | end 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /lib/vsm/dsl_mcp.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require_relative "dsl" 3 | require_relative "mcp/client" 4 | require_relative "mcp/remote_tool_capsule" 5 | 6 | module VSM 7 | module DSL 8 | class Builder 9 | class ChildrenBuilder 10 | # Reflect tools from a remote MCP server and add them as tool capsules. 11 | # Options: 12 | # include: Array whitelist of tool names 13 | # exclude: Array blacklist of tool names 14 | # prefix: String prefix for local names to avoid collisions 15 | # env: Hash environment passed to the server process 16 | # cwd: Working directory for spawning the process 17 | # 18 | # Example: 19 | # mcp_server :smith, cmd: "smith-server --stdio", include: %w[search read], prefix: "smith_" 20 | def mcp_server(name, cmd:, env: {}, include: nil, exclude: nil, prefix: nil, cwd: nil) 21 | client = VSM::MCP::Client.new(cmd: cmd, env: env, cwd: cwd, name: name.to_s).start 22 | tools = client.list_tools 23 | tools.each do |t| 24 | tool_name = t[:name] 25 | next if include && !Array(include).include?(tool_name) 26 | next if exclude && Array(exclude).include?(tool_name) 27 | local_name = [prefix, tool_name].compact.join 28 | capsule = VSM::MCP::RemoteToolCapsule.new(client: client, remote_name: tool_name, descriptor: t) 29 | @children[local_name] = capsule 30 | end 31 | end 32 | end 33 | end 34 | end 35 | end 36 | 37 | -------------------------------------------------------------------------------- /lib/vsm/lens/stats.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "time" 3 | 4 | module VSM 5 | module Lens 6 | class Stats 7 | def initialize(hub:, capsule:) 8 | @sessions = Hash.new { |h,k| h[k] = { count: 0, last: nil, kinds: Hash.new(0) } } 9 | @kinds = Hash.new(0) 10 | @capsule = capsule 11 | 12 | queue, snapshot = hub.subscribe 13 | snapshot.each { |ev| ingest(ev) } 14 | 15 | @thread = Thread.new do 16 | loop do 17 | ev = queue.pop 18 | ingest(ev) 19 | end 20 | end 21 | end 22 | 23 | def state 24 | { 25 | ts: Time.now.utc.iso8601(6), 26 | sessions: sort_sessions(@sessions), 27 | kinds: @kinds.dup, 28 | tools: tool_inventory, 29 | budgets: { 30 | limits: @capsule.homeostat.limits, 31 | usage: @capsule.homeostat.usage_snapshot 32 | } 33 | } 34 | end 35 | 36 | private 37 | 38 | def ingest(ev) 39 | @kinds[ev[:kind]] += 1 40 | sid = ev.dig(:meta, :session_id) 41 | return unless sid 42 | @sessions[sid][:count] += 1 43 | @sessions[sid][:last] = ev[:ts] 44 | @sessions[sid][:kinds][ev[:kind]] += 1 45 | end 46 | 47 | def sort_sessions(h) 48 | h.sort_by { |_sid, s| s[:last].to_s }.reverse.to_h 49 | end 50 | 51 | def tool_inventory 52 | ops = @capsule.bus.context[:operations_children] || {} 53 | ops.keys.sort 54 | end 55 | end 56 | end 57 | end 58 | 59 | -------------------------------------------------------------------------------- /lib/vsm.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "async" 4 | require "async/queue" 5 | 6 | require_relative "vsm/version" 7 | 8 | require_relative "vsm/message" 9 | require_relative "vsm/async_channel" 10 | require_relative "vsm/homeostat" 11 | require_relative "vsm/observability/ledger" 12 | 13 | require_relative "vsm/roles/operations" 14 | require_relative "vsm/roles/coordination" 15 | require_relative "vsm/roles/intelligence" 16 | require_relative "vsm/roles/governance" 17 | require_relative "vsm/roles/identity" 18 | 19 | require_relative "vsm/tool/descriptor" 20 | require_relative "vsm/tool/acts_as_tool" 21 | require_relative "vsm/tool/capsule" 22 | 23 | require_relative "vsm/meta" 24 | 25 | require_relative "vsm/executors/fiber_executor" 26 | require_relative "vsm/executors/thread_executor" 27 | 28 | require_relative "vsm/capsule" 29 | require_relative "vsm/dsl" 30 | require_relative "vsm/port" 31 | require_relative "vsm/runtime" 32 | 33 | require_relative "vsm/drivers/openai/async_driver" 34 | require_relative "vsm/drivers/anthropic/async_driver" 35 | require_relative "vsm/drivers/gemini/async_driver" 36 | require_relative "vsm/drivers/family" 37 | 38 | require_relative "vsm/lens" 39 | 40 | # Optional/built-in ports and MCP integration 41 | require_relative "vsm/ports/chat_tty" 42 | require_relative "vsm/ports/mcp/server_stdio" 43 | require_relative "vsm/mcp/jsonrpc" 44 | require_relative "vsm/mcp/client" 45 | require_relative "vsm/mcp/remote_tool_capsule" 46 | require_relative "vsm/dsl_mcp" 47 | 48 | module Vsm 49 | class Error < StandardError; end 50 | # Your code goes here... 51 | end 52 | -------------------------------------------------------------------------------- /lib/vsm/generator/templates/lib_organism_rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "securerandom" 4 | 5 | require_relative "tools/read_file" 6 | 7 | module <%= module_name %> 8 | module Organism 9 | def self.build 10 | # Provider selection via env (default injected at generation time) 11 | provider = (ENV["<%= env_prefix %>_PROVIDER"] || "<%= provider %>").downcase 12 | driver = 13 | case provider 14 | when "anthropic" 15 | VSM::Drivers::Anthropic::AsyncDriver.new( 16 | api_key: ENV.fetch("ANTHROPIC_API_KEY"), 17 | model: ENV["<%= env_prefix %>_MODEL"] || "<%= default_model %>" 18 | ) 19 | when "gemini" 20 | VSM::Drivers::Gemini::AsyncDriver.new( 21 | api_key: ENV.fetch("GEMINI_API_KEY"), 22 | model: ENV["<%= env_prefix %>_MODEL"] || "<%= default_model %>" 23 | ) 24 | else 25 | VSM::Drivers::OpenAI::AsyncDriver.new( 26 | api_key: ENV.fetch("OPENAI_API_KEY"), 27 | model: ENV["<%= env_prefix %>_MODEL"] || "<%= default_model %>" 28 | ) 29 | end 30 | 31 | VSM::DSL.define(:<%= lib_name %>) do 32 | identity klass: VSM::Identity, args: { identity: "<%= lib_name %>", invariants: [] } 33 | governance klass: VSM::Governance 34 | coordination klass: VSM::Coordination 35 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: "You are a helpful assistant. Use tools when helpful." } 36 | monitoring klass: VSM::Monitoring 37 | 38 | operations do 39 | capsule :read_file, klass: <%= module_name %>::Tools::ReadFile 40 | end 41 | end 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /examples/06_mcp_mount_reflection.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 3 | require "vsm" 4 | require "vsm/dsl_mcp" 5 | require "vsm/ports/chat_tty" 6 | require "securerandom" 7 | 8 | # This example mounts a remote MCP server (we use example 05 as the server) 9 | # and exposes its tools locally via dynamic reflection. Type: echo: hello 10 | 11 | class DemoIntelligence < VSM::Intelligence 12 | def handle(message, bus:, **) 13 | case message.kind 14 | when :user 15 | if message.payload =~ /\Aecho:\s*(.+)\z/ 16 | bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta) 17 | else 18 | bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta) 19 | end 20 | true 21 | when :tool_result 22 | bus.emit VSM::Message.new(kind: :assistant, payload: "(done)", meta: message.meta) 23 | true 24 | else 25 | false 26 | end 27 | end 28 | end 29 | 30 | server_cmd = "ruby #{File.expand_path("05_mcp_server_and_chattty.rb", __dir__)}" 31 | 32 | cap = VSM::DSL.define(:mcp_mount_demo) do 33 | identity klass: VSM::Identity, args: { identity: "mcp_mount_demo", invariants: [] } 34 | governance klass: VSM::Governance 35 | coordination klass: VSM::Coordination 36 | intelligence klass: DemoIntelligence 37 | monitoring klass: VSM::Monitoring 38 | operations do 39 | # Reflect the remote server's tools; include only :echo and expose as local name "echo" 40 | mcp_server :demo_server, cmd: server_cmd, include: %w[echo] 41 | end 42 | end 43 | 44 | banner = ->(io) { io.puts "\e[96mMCP mount demo\e[0m — type 'echo: hi' (Ctrl-C to exit)" } 45 | VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner)]) 46 | -------------------------------------------------------------------------------- /spec/meta/snapshot_builder_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "spec_helper" 4 | 5 | RSpec.describe VSM::Meta::SnapshotBuilder do 6 | class SnapshotTestIdentity < VSM::Identity 7 | def initialize(identity:, invariants: []) 8 | super 9 | end 10 | end 11 | 12 | class SnapshotTestTool < VSM::ToolCapsule 13 | tool_name "snapshot_test" 14 | tool_description "Tool for testing snapshot builder" 15 | tool_schema({ type: "object", properties: {}, required: [] }) 16 | 17 | def run(_args) 18 | "ok" 19 | end 20 | end 21 | 22 | let(:capsule) do 23 | VSM::DSL.define(:snapshot_host) do 24 | identity klass: SnapshotTestIdentity, args: { identity: "snapshot_host", invariants: ["stay"] } 25 | governance klass: VSM::Governance, args: {} 26 | coordination klass: VSM::Coordination, args: {} 27 | intelligence klass: VSM::Intelligence, args: {} 28 | monitoring klass: VSM::Monitoring, args: {} 29 | operations do 30 | capsule :snapshot_test, klass: SnapshotTestTool 31 | end 32 | end 33 | end 34 | 35 | let(:snapshot) { described_class.new(root: capsule).call } 36 | 37 | it "captures root capsule metadata" do 38 | expect(snapshot[:name]).to eq("snapshot_host") 39 | expect(snapshot[:roles].keys).to include("identity", "governance", "operations") 40 | end 41 | 42 | it "captures constructor args for roles" do 43 | expect(snapshot[:roles]["identity"][:constructor_args]).to eq({ identity: "snapshot_host", invariants: ["stay"] }) 44 | end 45 | 46 | it "captures tool child details" do 47 | tool = snapshot[:operations][:children]["snapshot_test"] 48 | expect(tool[:kind]).to eq("tool") 49 | expect(tool[:tool][:name]).to eq("snapshot_test") 50 | expect(tool[:source_locations].map { _1[:method] }).to include("run") 51 | end 52 | end 53 | 54 | -------------------------------------------------------------------------------- /examples/01_echo_tool.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 3 | require "vsm" 4 | require "vsm/ports/chat_tty" 5 | require "securerandom" 6 | 7 | class EchoTool < VSM::ToolCapsule 8 | tool_name "echo" 9 | tool_description "Echoes a message" 10 | tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] }) 11 | 12 | def run(args) 13 | "you said: #{args["text"]}" 14 | end 15 | end 16 | 17 | # Minimal “intelligence” that triggers a tool when user types "echo: ..." 18 | class DemoIntelligence < VSM::Intelligence 19 | def handle(message, bus:, **) 20 | case message.kind 21 | when :user 22 | if message.payload =~ /\Aecho:\s*(.+)\z/ 23 | bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta) 24 | else 25 | bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta) 26 | end 27 | true 28 | when :tool_result 29 | # Complete the turn after tool execution 30 | bus.emit VSM::Message.new(kind: :assistant, payload: "(done)", meta: message.meta) 31 | true 32 | else 33 | false 34 | end 35 | end 36 | end 37 | 38 | cap = VSM::DSL.define(:demo) do 39 | identity klass: VSM::Identity, args: { identity: "demo", invariants: [] } 40 | governance klass: VSM::Governance 41 | coordination klass: VSM::Coordination 42 | intelligence klass: DemoIntelligence 43 | monitoring klass: VSM::Monitoring 44 | operations do 45 | capsule :echo, klass: EchoTool 46 | end 47 | end 48 | 49 | # Use the built-in, customizable ChatTTY port 50 | banner = ->(io) { io.puts "\e[96mEcho demo\e[0m — type 'echo: hello' (Ctrl-C to exit)" } 51 | VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner)]) 52 | -------------------------------------------------------------------------------- /examples/02b_anthropic_streaming.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Example: Anthropic streaming demo (no tools) 4 | 5 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 6 | require "securerandom" 7 | require "vsm" 8 | 9 | MODEL = ENV["AIRB_MODEL"] || "claude-sonnet-4-0" 10 | 11 | driver = VSM::Drivers::Anthropic::AsyncDriver.new( 12 | api_key: ENV.fetch("ANTHROPIC_API_KEY"), 13 | model: MODEL 14 | ) 15 | 16 | system_prompt = "You are a concise assistant. Answer briefly." 17 | 18 | cap = VSM::DSL.define(:anthropic_stream_demo) do 19 | identity klass: VSM::Identity, args: { identity: "anthropic_stream_demo", invariants: [] } 20 | governance klass: VSM::Governance 21 | coordination klass: VSM::Coordination 22 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 23 | operations klass: VSM::Operations 24 | monitoring klass: VSM::Monitoring 25 | end 26 | 27 | class StreamTTY < VSM::Port 28 | def should_render?(message) 29 | [:assistant_delta, :assistant].include?(message.kind) || message.kind == :tool_calls 30 | end 31 | 32 | def loop 33 | sid = SecureRandom.uuid 34 | puts "anthropic streaming demo — type to chat (Ctrl-C to exit)" 35 | print "You: " 36 | while (line = $stdin.gets&.chomp) 37 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 38 | @capsule.roles[:coordination].wait_for_turn_end(sid) 39 | print "You: " 40 | end 41 | end 42 | 43 | def render_out(msg) 44 | case msg.kind 45 | when :assistant_delta 46 | print msg.payload 47 | $stdout.flush 48 | when :assistant 49 | puts "" 50 | puts "(turn #{msg.meta&.dig(:turn_id)})" 51 | when :tool_calls 52 | puts "\n(tool_calls #{msg.payload&.size || 0})" 53 | end 54 | end 55 | end 56 | 57 | VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)]) 58 | 59 | -------------------------------------------------------------------------------- /examples/02c_gemini_streaming.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Example: Gemini streaming demo (no tools) 4 | 5 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 6 | require "securerandom" 7 | require "vsm" 8 | 9 | MODEL = ENV["AIRB_MODEL"] || "gemini-2.5-flash" 10 | 11 | driver = VSM::Drivers::Gemini::AsyncDriver.new( 12 | api_key: ENV.fetch("GEMINI_API_KEY"), 13 | model: MODEL, 14 | streaming: true 15 | ) 16 | 17 | system_prompt = "You are a concise assistant. Answer briefly." 18 | 19 | cap = VSM::DSL.define(:gemini_stream_demo) do 20 | identity klass: VSM::Identity, args: { identity: "gemini_stream_demo", invariants: [] } 21 | governance klass: VSM::Governance 22 | coordination klass: VSM::Coordination 23 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 24 | operations klass: VSM::Operations 25 | monitoring klass: VSM::Monitoring 26 | end 27 | 28 | class StreamTTY < VSM::Port 29 | def should_render?(message) 30 | [:assistant_delta, :assistant].include?(message.kind) || message.kind == :tool_calls 31 | end 32 | 33 | def loop 34 | sid = SecureRandom.uuid 35 | puts "gemini streaming demo — type to chat (Ctrl-C to exit)" 36 | print "You: " 37 | while (line = $stdin.gets&.chomp) 38 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 39 | @capsule.roles[:coordination].wait_for_turn_end(sid) 40 | print "You: " 41 | end 42 | end 43 | 44 | def render_out(msg) 45 | case msg.kind 46 | when :assistant_delta 47 | print msg.payload 48 | $stdout.flush 49 | when :assistant 50 | puts "" 51 | puts "(turn #{msg.meta&.dig(:turn_id)})" 52 | when :tool_calls 53 | puts "\n(tool_calls #{msg.payload&.size || 0})" 54 | end 55 | end 56 | end 57 | 58 | VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)]) 59 | 60 | 61 | -------------------------------------------------------------------------------- /examples/09_mcp_with_llm_calls.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 3 | require "vsm" 4 | require "vsm/dsl_mcp" 5 | require "vsm/ports/chat_tty" 6 | 7 | # Example: Use an LLM driver (OpenAI) to automatically call tools exposed by an MCP server. 8 | # 9 | # Prereqs: 10 | # - OPENAI_API_KEY must be set 11 | # - An MCP server available on your PATH, e.g. `claude mcp serve` 12 | # 13 | # Usage: 14 | # OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/09_mcp_with_llm_calls.rb 15 | # Type a question; the model will choose tools from the reflected MCP server. 16 | 17 | MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini" 18 | 19 | driver = VSM::Drivers::OpenAI::AsyncDriver.new( 20 | api_key: ENV.fetch("OPENAI_API_KEY"), 21 | model: MODEL 22 | ) 23 | 24 | system_prompt = <<~PROMPT 25 | You are a helpful assistant. You have access to the listed tools. 26 | When a tool can help, call it with appropriate JSON arguments. 27 | Keep final answers concise. 28 | PROMPT 29 | 30 | cap = VSM::DSL.define(:mcp_with_llm) do 31 | identity klass: VSM::Identity, args: { identity: "mcp_with_llm", invariants: [] } 32 | governance klass: VSM::Governance 33 | coordination klass: VSM::Coordination 34 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 35 | monitoring klass: VSM::Monitoring 36 | operations do 37 | # Reflect tools from an external MCP server (e.g., Claude Code). 38 | # If your server requires strict LSP framing, run with VSM_MCP_LSP=1. 39 | # You can also prefix names to avoid collisions: prefix: "claude_" 40 | mcp_server :claude, cmd: ["claude", "mcp", "serve"] 41 | end 42 | end 43 | 44 | banner = ->(io) do 45 | io.puts "\e[96mLLM + MCP tools\e[0m — Ask a question; model may call tools." 46 | end 47 | 48 | VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner, prompt: "You> ")]) 49 | 50 | -------------------------------------------------------------------------------- /spec/vsm/coordination_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::Coordination do 5 | include Async::RSpec::Reactor 6 | 7 | let(:coord) { described_class.new } 8 | let(:bus) { VSM::AsyncChannel.new } 9 | 10 | it "orders messages and signals turn end on :assistant" do 11 | coord.observe(bus) 12 | sid = "s1" 13 | m_user = VSM::Message.new(kind: :user, payload: "hi", meta: { session_id: sid }) 14 | m_toolr = VSM::Message.new(kind: :tool_result, payload: "ok", meta: { session_id: sid }) 15 | m_asst = VSM::Message.new(kind: :assistant, payload: "done", meta: { session_id: sid }) 16 | 17 | # All async operations need to be inside async context 18 | seq = [] 19 | Async do |task| 20 | # Manually stage messages since observe no longer auto-stages 21 | coord.stage(m_toolr) 22 | coord.stage(m_asst) 23 | coord.stage(m_user) 24 | 25 | coord.drain(bus) { |m| seq << m.kind } 26 | expect(seq).to eq([:user, :tool_result, :assistant]) 27 | 28 | # Set up waiter before staging assistant message that will signal it 29 | task.async do 30 | coord.wait_for_turn_end(sid) 31 | seq << :unblocked 32 | end 33 | 34 | # Stage another assistant message to trigger the turn-end signal 35 | coord.stage(VSM::Message.new(kind: :assistant, payload: "final", meta: { session_id: sid })) 36 | coord.drain(bus) { |_| } 37 | task.sleep 0.05 38 | expect(seq).to include(:unblocked) 39 | end 40 | end 41 | 42 | it "gives floor priority" do 43 | coord.grant_floor!("floor") 44 | m1 = VSM::Message.new(kind: :assistant, meta: { session_id: "other" }) 45 | m2 = VSM::Message.new(kind: :assistant, meta: { session_id: "floor" }) 46 | coord.stage(m1) 47 | coord.stage(m2) 48 | out = [] 49 | coord.drain(bus) { |m| out << m.meta[:session_id] } 50 | expect(out.first).to eq("floor") 51 | end 52 | end 53 | 54 | -------------------------------------------------------------------------------- /lib/vsm/meta.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "meta/support" 4 | require_relative "meta/snapshot_builder" 5 | require_relative "meta/snapshot_cache" 6 | require_relative "meta/tools" 7 | 8 | module VSM 9 | module Meta 10 | DEFAULT_TOOL_MAP = { 11 | "meta_summarize_self" => Tools::SummarizeSelf, 12 | "meta_list_tools" => Tools::ListTools, 13 | "meta_explain_tool" => Tools::ExplainTool, 14 | "meta_explain_role" => Tools::ExplainRole 15 | }.freeze 16 | 17 | module_function 18 | 19 | def attach!(capsule, prefix: "", only: nil, except: nil) 20 | cache = SnapshotCache.new(SnapshotBuilder.new(root: capsule)) 21 | installed = {} 22 | 23 | tool_map = select_tools(only:, except:).transform_keys { |name| "#{prefix}#{name}" } 24 | tool_map.each do |tool_name, klass| 25 | tool = klass.new(root: capsule, snapshot_cache: cache) 26 | if capsule.roles[:governance] && tool.respond_to?(:governance=) 27 | tool.governance = capsule.roles[:governance] 28 | end 29 | register_tool(capsule, tool_name, tool) 30 | installed[tool_name] = tool 31 | end 32 | 33 | cache.fetch 34 | installed 35 | end 36 | 37 | def select_tools(only:, except:) 38 | map = DEFAULT_TOOL_MAP 39 | if only && !Array(only).empty? 40 | keys = Array(only).map(&:to_s) 41 | map = map.select { |name, _| keys.include?(name) } 42 | end 43 | if except && !Array(except).empty? 44 | rejects = Array(except).map(&:to_s) 45 | map = map.reject { |name, _| rejects.include?(name) } 46 | end 47 | map 48 | end 49 | 50 | def register_tool(capsule, name, tool) 51 | key = name.to_s 52 | capsule.children[key] = tool 53 | context_children = capsule.bus.context[:operations_children] 54 | if context_children.is_a?(Hash) 55 | context_children[key] = tool 56 | end 57 | end 58 | end 59 | end 60 | -------------------------------------------------------------------------------- /spec/vsm/operations_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe VSM::Operations do 5 | include Async::RSpec::Reactor 6 | 7 | let(:ops) { described_class.new } 8 | let(:bus) { VSM::AsyncChannel.new } 9 | let(:children) { { "echo" => FakeEchoTool.new, "slow" => SlowTool.new, "boom" => ErrorTool.new } } 10 | 11 | it "routes tool_call to child and emits tool_result" do 12 | results = Queue.new 13 | bus.subscribe { |m| results << m if m.kind == :tool_result } 14 | 15 | msg = VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => "ok" } }, corr_id: "1") 16 | expect(ops.handle(msg, bus:, children:)).to be true 17 | 18 | out = results.pop 19 | expect(out.corr_id).to eq("1") 20 | expect(out.payload).to eq("echo: ok") 21 | end 22 | 23 | it "runs multiple slow tools in parallel via threads" do 24 | results = Queue.new 25 | bus.subscribe { |m| results << m if m.kind == :tool_result } 26 | 27 | t0 = Process.clock_gettime(Process::CLOCK_MONOTONIC) 28 | 2.times do |i| 29 | msg = VSM::Message.new(kind: :tool_call, payload: { tool: "slow", args: { "id" => i } }, corr_id: i.to_s) 30 | ops.handle(msg, bus:, children:) 31 | end 32 | 33 | # Wait for both results to come back 34 | msgs = 2.times.map { results.pop } 35 | 36 | ids = msgs.map(&:payload).sort 37 | expect(ids).to eq(["slow-0", "slow-1"]) 38 | 39 | total = Process.clock_gettime(Process::CLOCK_MONOTONIC) - t0 40 | expect(total).to be < 0.6 # proves parallelism vs serial 0.5+ 41 | end 42 | 43 | it "handles tool errors by emitting error result" do 44 | seen = [] 45 | bus.subscribe { |m| seen << m if m.kind == :tool_result } 46 | msg = VSM::Message.new(kind: :tool_call, payload: { tool: "boom", args: {} }, corr_id: "x") 47 | ops.handle(msg, bus:, children:) 48 | Async { |t| t.sleep 0.05 } 49 | expect(seen.first.payload).to match(/ERROR: RuntimeError: kapow/) 50 | end 51 | end 52 | 53 | -------------------------------------------------------------------------------- /lib/vsm/lens/event_hub.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "json" 3 | require "time" 4 | require "securerandom" 5 | 6 | module VSM 7 | module Lens 8 | class EventHub 9 | DEFAULT_BUFFER = 500 10 | 11 | def initialize(buffer_size: DEFAULT_BUFFER) 12 | @subs = [] # Array of SizedQueue 13 | @mutex = Mutex.new 14 | @buffer = [] 15 | @buffer_size = buffer_size 16 | end 17 | 18 | def publish(message) 19 | event = format_event(message) 20 | @mutex.synchronize do 21 | @buffer << event 22 | @buffer.shift(@buffer.size - @buffer_size) if @buffer.size > @buffer_size 23 | @subs.each { |q| try_push(q, event) } 24 | end 25 | end 26 | 27 | def subscribe 28 | q = SizedQueue.new(100) 29 | snapshot = nil 30 | @mutex.synchronize do 31 | @subs << q 32 | snapshot = @buffer.dup 33 | end 34 | [q, snapshot] 35 | end 36 | 37 | def unsubscribe(queue) 38 | @mutex.synchronize { @subs.delete(queue) } 39 | end 40 | 41 | private 42 | 43 | def try_push(queue, event) 44 | queue.push(event) 45 | rescue ThreadError 46 | # queue full; drop event to avoid blocking the pipeline 47 | end 48 | 49 | def format_event(msg) 50 | { 51 | id: SecureRandom.uuid, 52 | ts: Time.now.utc.iso8601(6), 53 | kind: msg.kind, 54 | path: msg.path, 55 | corr_id: msg.corr_id, 56 | meta: msg.meta, 57 | # Small preview to avoid huge payloads; the UI can request details later if you add a /event/:id endpoint 58 | payload: preview(msg.payload) 59 | } 60 | end 61 | 62 | def preview(payload) 63 | case payload 64 | when String 65 | payload.bytesize > 2_000 ? payload.byteslice(0, 2_000) + "… (truncated)" : payload 66 | else 67 | payload 68 | end 69 | end 70 | end 71 | end 72 | end 73 | 74 | -------------------------------------------------------------------------------- /examples/08_custom_chattty.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 3 | require "vsm" 4 | require "vsm/ports/chat_tty" 5 | require "securerandom" 6 | 7 | # Demonstrates subclassing ChatTTY to customize the banner and output formatting. 8 | 9 | class EchoTool < VSM::ToolCapsule 10 | tool_name "echo" 11 | tool_description "Echoes back the provided text" 12 | tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] }) 13 | def run(args) 14 | "you said: #{args["text"]}" 15 | end 16 | end 17 | 18 | class DemoIntelligence < VSM::Intelligence 19 | def handle(message, bus:, **) 20 | return false unless message.kind == :user 21 | if message.payload =~ /\Aecho:\s*(.+)\z/ 22 | bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta) 23 | else 24 | bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta) 25 | end 26 | true 27 | end 28 | end 29 | 30 | class FancyTTY < VSM::Ports::ChatTTY 31 | def banner(io) 32 | io.puts "\e[95m\n ███ CUSTOM CHAT ███\n\e[0m" 33 | end 34 | 35 | def render_out(m) 36 | case m.kind 37 | when :assistant_delta 38 | @streaming = true 39 | @out.print m.payload 40 | @out.flush 41 | when :assistant 42 | @out.puts unless @streaming 43 | @streaming = false 44 | when :tool_call 45 | @out.puts "\n\e[90m→ calling #{m.payload[:tool]}\e[0m" 46 | when :tool_result 47 | @out.puts "\e[92m✓ #{m.payload}\e[0m" 48 | end 49 | end 50 | end 51 | 52 | cap = VSM::DSL.define(:fancy_chat) do 53 | identity klass: VSM::Identity, args: { identity: "fancy_chat", invariants: [] } 54 | governance klass: VSM::Governance 55 | coordination klass: VSM::Coordination 56 | intelligence klass: DemoIntelligence 57 | monitoring klass: VSM::Monitoring 58 | operations do 59 | capsule :echo, klass: EchoTool 60 | end 61 | end 62 | 63 | VSM::Runtime.start(cap, ports: [FancyTTY.new(capsule: cap, prompt: "Me: ")]) 64 | -------------------------------------------------------------------------------- /vsm.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "lib/vsm/version" 4 | 5 | Gem::Specification.new do |spec| 6 | spec.name = "vsm" 7 | spec.version = Vsm::VERSION 8 | spec.authors = ["Scott Werner"] 9 | spec.email = ["scott@sublayer.com"] 10 | 11 | spec.summary = "Async, recursive agent framework for Ruby (Viable System Model): capsules, tools-as-capsules, streaming tool calls, and observability." 12 | spec.description = <<~DESC 13 | VSM is a small Ruby framework for building agentic systems using a 14 | Viable System Model–style architecture. It gives you Capsules: self‑contained components 15 | composed of five named systems (Operations, Coordination, Intelligence, Governance, 16 | Identity) plus an async runtime so many capsules can run concurrently. 17 | DESC 18 | 19 | spec.homepage = "https://github.com/sublayerapp/vsm" 20 | spec.license = "MIT" 21 | spec.required_ruby_version = ">= 3.4" 22 | 23 | spec.metadata["homepage_uri"] = spec.homepage 24 | spec.metadata["source_code_uri"] = "https://github.com/sublayerapp/vsm" 25 | 26 | # Specify which files should be added to the gem when it is released. 27 | # The `git ls-files -z` loads the files in the RubyGem that have been added into git. 28 | gemspec = File.basename(__FILE__) 29 | spec.files = IO.popen(%w[git ls-files -z], chdir: __dir__, err: IO::NULL) do |ls| 30 | ls.readlines("\x0", chomp: true).reject do |f| 31 | (f == gemspec) || 32 | f.start_with?(*%w[bin/ test/ spec/ features/ .git .github appveyor Gemfile]) 33 | end 34 | end 35 | spec.bindir = "exe" 36 | spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) } 37 | spec.require_paths = ["lib"] 38 | 39 | spec.add_dependency "async", "~> 2.27" 40 | spec.add_dependency "async-http", "~> 0.90" 41 | spec.add_dependency "rack", "~> 3.2" 42 | 43 | spec.add_development_dependency "rspec", "~> 3.13" 44 | spec.add_development_dependency "async-rspec", "~> 1.17" 45 | 46 | # Uncomment to register a new dependency of your gem 47 | # spec.add_dependency "example-gem", "~> 1.0" 48 | 49 | # For more information and examples about making a new gem, check out our 50 | # guide at: https://bundler.io/guides/creating_gem.html 51 | end 52 | -------------------------------------------------------------------------------- /lib/vsm/cli.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'optparse' 4 | require_relative 'generator/new_project' 5 | 6 | module VSM 7 | class CLI 8 | def self.start(argv = ARGV) 9 | new.run(argv) 10 | end 11 | 12 | def run(argv) 13 | cmd = argv.shift 14 | case cmd 15 | when 'new' 16 | run_new(argv) 17 | when nil, '-h', '--help', 'help' 18 | puts help_text 19 | else 20 | warn "Unknown command: #{cmd}\n" 21 | puts help_text 22 | exit 1 23 | end 24 | end 25 | 26 | private 27 | 28 | def run_new(argv) 29 | opts = { 30 | path: nil, 31 | git: false, 32 | bundle: false, 33 | provider: 'openai', 34 | model: nil, 35 | force: false 36 | } 37 | parser = OptionParser.new do |o| 38 | o.banner = "Usage: vsm new [options]" 39 | o.on('--path PATH', 'Target directory (default: ./)') { |v| opts[:path] = v } 40 | o.on('--git', 'Run git init and initial commit') { opts[:git] = true } 41 | o.on('--bundle', 'Run bundle install after generation') { opts[:bundle] = true } 42 | o.on('--with-llm PROVIDER', %w[openai anthropic gemini], 'LLM provider: openai (default), anthropic, or gemini') { |v| opts[:provider] = v } 43 | o.on('--model NAME', 'Default model name') { |v| opts[:model] = v } 44 | o.on('--force', 'Overwrite existing directory') { opts[:force] = true } 45 | o.on('-h', '--help', 'Show help') { puts o; exit 0 } 46 | end 47 | 48 | name = nil 49 | begin 50 | parser.order!(argv) 51 | name = argv.shift 52 | rescue OptionParser::ParseError => e 53 | warn e.message 54 | puts parser 55 | exit 1 56 | end 57 | 58 | unless name && !name.strip.empty? 59 | warn 'Please provide a project name, e.g., vsm new my_app' 60 | puts parser 61 | exit 1 62 | end 63 | 64 | VSM::Generator::NewProject.run(name: name, **opts) 65 | end 66 | 67 | def help_text 68 | <<~TXT 69 | VSM CLI 70 | 71 | Commands: 72 | vsm new [options] Create a new VSM app skeleton 73 | 74 | Run `vsm new --help` for options. 75 | TXT 76 | end 77 | end 78 | end 79 | -------------------------------------------------------------------------------- /examples/05_mcp_server_and_chattty.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 3 | require "vsm" 4 | require "securerandom" 5 | require "vsm/ports/chat_tty" 6 | require "vsm/ports/mcp/server_stdio" 7 | 8 | # A simple local tool we can expose to both ChatTTY and MCP stdio. 9 | class EchoTool < VSM::ToolCapsule 10 | tool_name "echo" 11 | tool_description "Echoes back the provided text" 12 | tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] }) 13 | def run(args) 14 | "you said: #{args["text"]}" 15 | end 16 | end 17 | 18 | # Minimal intelligence that triggers the echo tool when user types: echo: ... 19 | class DemoIntelligence < VSM::Intelligence 20 | def handle(message, bus:, **) 21 | case message.kind 22 | when :user 23 | if message.payload =~ /\Aecho:\s*(.+)\z/ 24 | bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta) 25 | else 26 | bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta) 27 | end 28 | true 29 | when :tool_result 30 | bus.emit VSM::Message.new(kind: :assistant, payload: "(done)", meta: message.meta) 31 | true 32 | else 33 | false 34 | end 35 | end 36 | end 37 | 38 | cap = VSM::DSL.define(:demo_mcp_server_and_chat) do 39 | identity klass: VSM::Identity, args: { identity: "demo", invariants: [] } 40 | governance klass: VSM::Governance 41 | coordination klass: VSM::Coordination 42 | intelligence klass: DemoIntelligence 43 | monitoring klass: VSM::Monitoring 44 | operations do 45 | capsule :echo, klass: EchoTool 46 | end 47 | end 48 | 49 | # Run both ports together: MCP stdio (machine) + ChatTTY (human). 50 | banner = ->(io) { io.puts "\e[96mVSM demo\e[0m — type 'echo: hi' (Ctrl-C to exit)" } 51 | ports = [VSM::Ports::MCP::ServerStdio.new(capsule: cap)] 52 | if $stdout.tty? 53 | # Only enable interactive ChatTTY when attached to a TTY to avoid 54 | # interfering when this example is spawned as a background MCP server. 55 | begin 56 | tty = File.open("/dev/tty", "r+") 57 | rescue StandardError 58 | tty = nil 59 | end 60 | ports << VSM::Ports::ChatTTY.new(capsule: cap, banner: banner, input: tty, output: tty) 61 | end 62 | 63 | VSM::Runtime.start(cap, ports: ports) 64 | -------------------------------------------------------------------------------- /lib/vsm/lens.rb: -------------------------------------------------------------------------------- 1 | # lib/vsm/lens.rb 2 | # frozen_string_literal: true 3 | require_relative "lens/event_hub" 4 | require_relative "lens/server" 5 | require_relative "lens/stats" 6 | require_relative "lens/tui" 7 | 8 | module VSM 9 | module Lens 10 | # Starts a tiny Rack server (Puma or WEBrick) and streams events via SSE. 11 | # Returns the EventHub so other lenses (e.g., TUI) can also subscribe. 12 | def self.attach!(capsule, host: "127.0.0.1", port: 9292, token: nil) 13 | hub = EventHub.new 14 | capsule.bus.subscribe { |msg| hub.publish(msg) } 15 | 16 | require_relative "lens/stats" 17 | stats = Stats.new(hub: hub, capsule: capsule) 18 | server = Server.new(hub: hub, token: token, stats: stats) 19 | 20 | 21 | Thread.new do 22 | app = server.rack_app 23 | 24 | # Prefer Puma if present: 25 | if try_run_puma(app, host, port) 26 | # ok 27 | elsif try_run_webrick(app, host, port) 28 | # ok 29 | else 30 | warn <<~MSG 31 | vsm-lens: no Rack handler found. Install one of: 32 | - `bundle add puma` 33 | - or `bundle add webrick` 34 | Then re-run with VSM_LENS=1. 35 | MSG 36 | end 37 | end 38 | 39 | hub 40 | end 41 | 42 | def self.try_run_puma(app, host, port) 43 | begin 44 | require "rack/handler/puma" 45 | rescue LoadError 46 | return false 47 | end 48 | Thread.new do 49 | Rack::Handler::Puma.run(app, Host: host, Port: port, Silent: true) 50 | end 51 | true 52 | rescue => e 53 | warn "vsm-lens: Puma failed to start: #{e.class}: #{e.message}" 54 | false 55 | end 56 | 57 | def self.try_run_webrick(app, host, port) 58 | begin 59 | require "webrick" # provide the server 60 | require "rack/handler/webrick" # rack adapter (Rack 3 loads this if webrick gem is present) 61 | rescue LoadError 62 | return false 63 | end 64 | Thread.new do 65 | Rack::Handler::WEBrick.run( 66 | app, 67 | Host: host, Port: port, 68 | AccessLog: [], 69 | Logger: WEBrick::Log.new($stderr, WEBrick::Log::WARN) 70 | ) 71 | end 72 | true 73 | rescue => e 74 | warn "vsm-lens: WEBrick failed to start: #{e.class}: #{e.message}" 75 | false 76 | end 77 | end 78 | end 79 | 80 | -------------------------------------------------------------------------------- /examples/10_meta_read_only.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Demo: use OpenAI tool-calling to let an LLM inspect the running capsule via 4 | # the read-only meta tools. Set OPENAI_API_KEY (and optionally AIRB_MODEL) then: 5 | # bundle exec ruby examples/10_meta_read_only.rb 6 | # Ask things like "What can you do?" or "Explain meta_demo_tool" and the model 7 | # will call the meta tools to gather context before replying. 8 | 9 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 10 | 11 | require "securerandom" 12 | require "vsm" 13 | 14 | MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini" 15 | API_KEY = ENV["OPENAI_API_KEY"] or abort "OPENAI_API_KEY required for this demo" 16 | 17 | class MetaDemoTool < VSM::ToolCapsule 18 | tool_name "meta_demo_tool" 19 | tool_description "Simple tool included alongside meta tools" 20 | tool_schema({ type: "object", properties: {}, additionalProperties: false }) 21 | 22 | def run(_args) 23 | "hello from demo tool" 24 | end 25 | end 26 | 27 | driver = VSM::Drivers::OpenAI::AsyncDriver.new(api_key: API_KEY, model: MODEL) 28 | 29 | SYSTEM_PROMPT = <<~PROMPT 30 | You are the steward of a VSM capsule. You have access to built-in reflection 31 | tools that describe the organism and its operations: 32 | - meta_summarize_self: overview of the current capsule and its roles 33 | - meta_list_tools: list available tools with schemas 34 | - meta_explain_tool: show implementation details for a named tool 35 | - meta_explain_role: show capsule-specific details and code for a VSM role 36 | When the user asks about capabilities, available tools, or how something 37 | works, call the appropriate meta_* tool first, then respond with a clear, 38 | human-friendly summary that cites relevant tool names. Be concise but 39 | complete. 40 | PROMPT 41 | 42 | cap = VSM::DSL.define(:meta_demo_llm) do 43 | identity klass: VSM::Identity, args: { identity: "meta_demo_llm", invariants: [] } 44 | governance klass: VSM::Governance, args: {} 45 | coordination klass: VSM::Coordination, args: {} 46 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: SYSTEM_PROMPT } 47 | monitoring klass: VSM::Monitoring, args: {} 48 | operations do 49 | meta_tools 50 | capsule :meta_demo_tool, klass: MetaDemoTool 51 | end 52 | end 53 | 54 | ports = [VSM::Ports::ChatTTY.new(capsule: cap, banner: ->(io) { io.puts "Meta demo ready. Try asking 'What can you do?'" })] 55 | 56 | VSM::Runtime.start(cap, ports: ports) 57 | -------------------------------------------------------------------------------- /lib/vsm/mcp/client.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "open3" 3 | require "shellwords" 4 | require_relative "jsonrpc" 5 | 6 | module VSM 7 | module MCP 8 | class Client 9 | attr_reader :name 10 | 11 | def initialize(cmd:, env: {}, cwd: nil, name: nil) 12 | @cmd, @env, @cwd, @name = cmd, env, cwd, (name || cmd.split.first) 13 | @stdin = @stdout = @stderr = @wait_thr = nil 14 | @rpc = nil 15 | @stderr_thread = nil 16 | end 17 | 18 | def start 19 | opts = {} 20 | opts[:chdir] = @cwd if @cwd 21 | args = @cmd.is_a?(Array) ? @cmd : Shellwords.split(@cmd.to_s) 22 | @stdin, @stdout, @stderr, @wait_thr = Open3.popen3(@env || {}, *args, **opts) 23 | # Drain stderr to avoid blocking if the server writes logs 24 | @stderr_thread = Thread.new do 25 | begin 26 | @stderr.each_line { |_line| } 27 | rescue StandardError 28 | end 29 | end 30 | @rpc = JSONRPC::Stdio.new(r: @stdout, w: @stdin) 31 | self 32 | end 33 | 34 | def stop 35 | begin 36 | @stdin&.close 37 | rescue StandardError 38 | end 39 | begin 40 | @stdout&.close 41 | rescue StandardError 42 | end 43 | begin 44 | @stderr&.close 45 | rescue StandardError 46 | end 47 | begin 48 | @stderr_thread&.kill 49 | rescue StandardError 50 | end 51 | begin 52 | @wait_thr&.kill 53 | rescue StandardError 54 | end 55 | nil 56 | end 57 | 58 | # Returns Array with symbol keys: :name, :description, :input_schema 59 | def list_tools 60 | raw = @rpc.request("tools/list") 61 | arr = (raw && raw["tools"]) || [] 62 | arr.map do |t| 63 | { 64 | name: t["name"].to_s, 65 | description: t["description"].to_s, 66 | input_schema: (t["input_schema"] || {}) 67 | } 68 | end 69 | end 70 | 71 | # Returns a String (first text content) or a JSON string fallback 72 | def call_tool(name:, arguments: {}) 73 | res = @rpc.request("tools/call", { "name" => name, "arguments" => arguments }) 74 | content = Array(res["content"]) 75 | item = content.find { |c| c["type"] == "text" } || content.first 76 | item ? (item["text"] || item.to_s) : res.to_s 77 | end 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /examples/02_openai_streaming.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Example: OpenAI streaming demo (no tools) 4 | # 5 | # Usage: 6 | # OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/02_openai_streaming.rb 7 | # VSM_DEBUG_STREAM=1 to see low-level logs 8 | 9 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 10 | require "securerandom" 11 | require "vsm" 12 | 13 | MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini" 14 | 15 | driver = VSM::Drivers::OpenAI::AsyncDriver.new( 16 | api_key: ENV.fetch("OPENAI_API_KEY"), 17 | model: MODEL 18 | ) 19 | 20 | system_prompt = <<~PROMPT 21 | You are a concise assistant. Answer briefly. 22 | PROMPT 23 | 24 | cap = VSM::DSL.define(:openai_stream_demo) do 25 | identity klass: VSM::Identity, args: { identity: "openai_stream_demo", invariants: [] } 26 | governance klass: VSM::Governance 27 | coordination klass: VSM::Coordination 28 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 29 | operations klass: VSM::Operations 30 | monitoring klass: VSM::Monitoring 31 | end 32 | 33 | if ENV["VSM_LENS"] == "1" 34 | VSM::Lens.attach!(cap, port: (ENV["VSM_LENS_PORT"] || 9292).to_i, token: ENV["VSM_LENS_TOKEN"]) rescue nil 35 | end 36 | 37 | class StreamTTY < VSM::Port 38 | def should_render?(message) 39 | [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind) 40 | end 41 | 42 | def loop 43 | sid = SecureRandom.uuid 44 | puts "openai streaming demo — type to chat (Ctrl-C to exit)" 45 | print "You: " 46 | while (line = $stdin.gets&.chomp) 47 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 48 | @capsule.roles[:coordination].wait_for_turn_end(sid) 49 | print "You: " 50 | end 51 | end 52 | 53 | def render_out(msg) 54 | case msg.kind 55 | when :assistant_delta 56 | # Stream without newline 57 | print msg.payload 58 | $stdout.flush 59 | when :assistant 60 | puts "" # end the line after streaming 61 | # The :assistant event carries the full final text again; avoid re-printing it 62 | # because we've already streamed the deltas above. Just show the turn marker. 63 | puts "(turn #{msg.meta&.dig(:turn_id)})" 64 | when :tool_result 65 | puts "\nTool> #{msg.payload}" 66 | when :tool_call 67 | puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id}) #{msg.payload[:args].inspect}" 68 | end 69 | end 70 | end 71 | 72 | VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)]) 73 | 74 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | PATH 2 | remote: . 3 | specs: 4 | vsm (0.2.0) 5 | async (~> 2.27) 6 | async-http (~> 0.90) 7 | rack (~> 3.2) 8 | 9 | GEM 10 | remote: https://rubygems.org/ 11 | specs: 12 | async (2.27.2) 13 | console (~> 1.29) 14 | fiber-annotation 15 | io-event (~> 1.11) 16 | metrics (~> 0.12) 17 | traces (~> 0.15) 18 | async-http (0.90.1) 19 | async (>= 2.10.2) 20 | async-pool (~> 0.11) 21 | io-endpoint (~> 0.14) 22 | io-stream (~> 0.6) 23 | metrics (~> 0.12) 24 | protocol-http (~> 0.49) 25 | protocol-http1 (~> 0.30) 26 | protocol-http2 (~> 0.22) 27 | traces (~> 0.10) 28 | async-pool (0.11.0) 29 | async (>= 2.0) 30 | async-rspec (1.17.1) 31 | rspec (~> 3.0) 32 | rspec-files (~> 1.0) 33 | rspec-memory (~> 1.0) 34 | console (1.33.0) 35 | fiber-annotation 36 | fiber-local (~> 1.1) 37 | json 38 | date (3.4.1) 39 | diff-lcs (1.6.2) 40 | erb (5.0.2) 41 | fiber-annotation (0.2.0) 42 | fiber-local (1.1.0) 43 | fiber-storage 44 | fiber-storage (1.0.1) 45 | io-console (0.8.1) 46 | io-endpoint (0.15.2) 47 | io-event (1.12.1) 48 | io-stream (0.10.0) 49 | irb (1.15.2) 50 | pp (>= 0.6.0) 51 | rdoc (>= 4.0.0) 52 | reline (>= 0.4.2) 53 | json (2.13.2) 54 | metrics (0.13.0) 55 | pp (0.6.2) 56 | prettyprint 57 | prettyprint (0.2.0) 58 | protocol-hpack (1.5.1) 59 | protocol-http (0.51.1) 60 | protocol-http1 (0.34.1) 61 | protocol-http (~> 0.22) 62 | protocol-http2 (0.22.1) 63 | protocol-hpack (~> 1.4) 64 | protocol-http (~> 0.47) 65 | psych (5.2.6) 66 | date 67 | stringio 68 | rack (3.2.0) 69 | rake (13.3.0) 70 | rdoc (6.14.2) 71 | erb 72 | psych (>= 4.0.0) 73 | reline (0.6.2) 74 | io-console (~> 0.5) 75 | rspec (3.13.1) 76 | rspec-core (~> 3.13.0) 77 | rspec-expectations (~> 3.13.0) 78 | rspec-mocks (~> 3.13.0) 79 | rspec-core (3.13.5) 80 | rspec-support (~> 3.13.0) 81 | rspec-expectations (3.13.5) 82 | diff-lcs (>= 1.2.0, < 2.0) 83 | rspec-support (~> 3.13.0) 84 | rspec-files (1.1.3) 85 | rspec (~> 3.0) 86 | rspec-memory (1.0.4) 87 | rspec (~> 3.0) 88 | rspec-mocks (3.13.5) 89 | diff-lcs (>= 1.2.0, < 2.0) 90 | rspec-support (~> 3.13.0) 91 | rspec-support (3.13.4) 92 | stringio (3.1.7) 93 | traces (0.16.2) 94 | 95 | PLATFORMS 96 | arm64-darwin-23 97 | ruby 98 | 99 | DEPENDENCIES 100 | async-rspec (~> 1.17) 101 | irb 102 | rake (~> 13.0) 103 | rspec (~> 3.13) 104 | vsm! 105 | 106 | BUNDLED WITH 107 | 2.7.2 108 | -------------------------------------------------------------------------------- /lib/vsm/dsl.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "meta" 4 | module VSM 5 | module DSL 6 | class Builder 7 | def initialize(name) 8 | @name = name 9 | @roles = {} 10 | @children = {} 11 | @after_build = [] 12 | end 13 | 14 | def identity(klass: VSM::Identity, args: {}) = assign_role(:identity, klass, args) 15 | def governance(klass: VSM::Governance, args: {}) = assign_role(:governance, klass, args) 16 | def coordination(klass: VSM::Coordination, args: {}) = assign_role(:coordination, klass, args) 17 | def intelligence(klass: VSM::Intelligence, args: {}) = assign_role(:intelligence, klass, args) 18 | def operations(klass: VSM::Operations, args: {}, &blk) 19 | @roles[:operations] = instantiate(klass, args) 20 | if blk 21 | builder = ChildrenBuilder.new(self) 22 | builder.instance_eval(&blk) 23 | @children.merge!(builder.result) 24 | end 25 | end 26 | 27 | def monitoring(klass: VSM::Monitoring, args: {}) = assign_role(:monitoring, klass, args) 28 | 29 | def build 30 | # Inject governance into tool capsules if they accept it 31 | @children.each_value do |child| 32 | child.governance = @roles[:governance] if child.respond_to?(:governance=) 33 | end 34 | capsule = VSM::Capsule.new(name: @name, roles: @roles, children: @children) 35 | @after_build.each { _1.call(capsule) } 36 | capsule 37 | end 38 | 39 | class ChildrenBuilder 40 | def initialize(parent) 41 | @parent = parent 42 | @children = {} 43 | end 44 | def capsule(name, klass:, args: {}) 45 | instance = klass.new(**args) 46 | VSM::Meta::Support.record_constructor_args(instance, args) 47 | @children[name.to_s] = instance 48 | end 49 | def meta_tools(prefix: "", only: nil, except: nil) 50 | @parent.__send__(:after_build) do |capsule| 51 | VSM::Meta.attach!(capsule, prefix: prefix, only: only, except: except) 52 | end 53 | result 54 | end 55 | def result = @children 56 | def method_missing(*) = result 57 | def respond_to_missing?(*) = true 58 | end 59 | 60 | private 61 | 62 | def after_build(&block) 63 | @after_build << block if block 64 | end 65 | 66 | def assign_role(key, klass, args) 67 | @roles[key] = instantiate(klass, args) 68 | end 69 | 70 | def instantiate(klass, args) 71 | instance = klass.new(**args) 72 | VSM::Meta::Support.record_constructor_args(instance, args) 73 | end 74 | end 75 | 76 | def self.define(name, &blk) 77 | Builder.new(name).tap { |b| b.instance_eval(&blk) }.build 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /lib/vsm/lens/tui.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "io/console" 3 | require "json" 4 | 5 | module VSM 6 | module Lens 7 | module TUI 8 | # Start a simple TUI that renders the last N events and sessions. 9 | # Usage: 10 | # hub = VSM::Lens.attach!(capsule) 11 | # VSM::Lens::TUI.start(hub) 12 | def self.start(hub, ring_max: 500) 13 | queue, snapshot = hub.subscribe 14 | ring = snapshot.last(ring_max) 15 | 16 | reader = Thread.new do 17 | loop { ring << queue.pop; ring.shift if ring.size > ring_max } 18 | end 19 | 20 | trap("INT") { exit } 21 | trap("TERM") { exit } 22 | 23 | STDIN.raw do 24 | loop do 25 | draw(ring) 26 | # Non-blocking single-char read; press 'q' to quit 27 | ch = if IO.select([STDIN], nil, nil, 0.1) then STDIN.read_nonblock(1) rescue nil end 28 | exit if ch == "q" 29 | end 30 | end 31 | ensure 32 | reader&.kill 33 | end 34 | 35 | def self.draw(ring) 36 | cols, rows = IO.console.winsize.reverse # => [rows, cols] 37 | rows ||= 24; cols ||= 80 38 | system("printf", "\e[2J\e[H") # clear 39 | 40 | # Split: left sessions, right timeline 41 | left_w = [28, cols * 0.3].max.to_i 42 | right_w = cols - left_w - 1 43 | puts header("VSM Lens TUI — press 'q' to quit", cols) 44 | 45 | # Sessions (left) 46 | sessions = Hash.new { |h,k| h[k] = { count: 0, last: "" } } 47 | ring.each do |ev| 48 | sid = ev.dig(:meta, :session_id) or next 49 | sessions[sid][:count] += 1 50 | sessions[sid][:last] = ev[:ts] 51 | end 52 | sess_lines = sessions.sort_by { |_id, s| s[:last].to_s }.reverse.first(rows-3).map do |sid, s| 53 | "#{sid[0,8]} #{s[:count].to_s.rjust(5)} #{s[:last]}" 54 | end 55 | 56 | puts box("Sessions", sess_lines, left_w) 57 | 58 | # Timeline (right) 59 | tl = ring.last(rows-3).map do |ev| 60 | kind = ev[:kind].to_s.ljust(16) 61 | sid = ev.dig(:meta, :session_id)&.slice(0,8) || "–" 62 | txt = case ev[:payload] 63 | when String then ev[:payload].gsub(/\s+/, " ")[0, right_w-40] 64 | else ev[:payload].to_s[0, right_w-40] 65 | end 66 | "#{ev[:ts]} #{kind} #{sid} #{txt}" 67 | end 68 | puts box("Timeline", tl, right_w) 69 | end 70 | 71 | def self.header(text, width) 72 | "\e[7m #{text.ljust(width-2)} \e[0m" 73 | end 74 | 75 | def self.box(title, lines, width) 76 | out = +"+" + "-"*(width-2) + "+\n" 77 | out << "| #{title.ljust(width-4)} |\n" 78 | out << "+" + "-"*(width-2) + "+\n" 79 | lines.each do |l| 80 | out << "| #{l.ljust(width-4)} |\n" 81 | end 82 | out << "+" + "-"*(width-2) + "+\n" 83 | out 84 | end 85 | end 86 | end 87 | end 88 | 89 | -------------------------------------------------------------------------------- /lib/vsm/mcp/jsonrpc.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "json" 3 | require "monitor" 4 | 5 | module VSM 6 | module MCP 7 | module JSONRPC 8 | # Minimal NDJSON (one JSON per line) JSON-RPC transport over IO. 9 | # Note: MCP servers often speak LSP framing; we can add that later. 10 | class Stdio 11 | include MonitorMixin 12 | 13 | def initialize(r:, w:) 14 | @r = r 15 | @w = w 16 | @seq = 0 17 | mon_initialize 18 | end 19 | 20 | def request(method, params = {}) 21 | id = next_id 22 | write({ jsonrpc: "2.0", id: id, method: method, params: params }) 23 | loop do 24 | msg = read 25 | next unless msg 26 | if msg["id"].to_s == id.to_s 27 | err = msg["error"] 28 | raise(err.is_a?(Hash) ? (err["message"] || err.inspect) : err.to_s) if err 29 | return msg["result"] 30 | end 31 | end 32 | end 33 | 34 | def notify(method, params = {}) 35 | write({ jsonrpc: "2.0", method: method, params: params }) 36 | end 37 | 38 | def read 39 | line = @r.gets 40 | return nil unless line 41 | # Handle LSP-style framing: "Content-Length: N" followed by blank line and JSON body. 42 | if line =~ /\AContent-Length:\s*(\d+)\s*\r?\n?\z/i 43 | length = Integer($1) 44 | # Consume optional additional headers until blank line 45 | while (hdr = @r.gets) 46 | break if hdr.strip.empty? 47 | end 48 | body = read_exact(length) 49 | $stderr.puts("[mcp-rpc] < #{body}") if ENV["VSM_MCP_DEBUG"] == "1" 50 | return JSON.parse(body) 51 | end 52 | # Otherwise assume NDJSON (one JSON object per line) 53 | $stderr.puts("[mcp-rpc] < #{line.strip}") if ENV["VSM_MCP_DEBUG"] == "1" 54 | JSON.parse(line) 55 | end 56 | 57 | def write(obj) 58 | body = JSON.dump(obj) 59 | $stderr.puts("[mcp-rpc] > #{body}") if ENV["VSM_MCP_DEBUG"] == "1" 60 | synchronize do 61 | # Prefer NDJSON for broad compatibility; some servers require LSP. 62 | # If VSM_MCP_LSP=1, use Content-Length framing. 63 | if ENV["VSM_MCP_LSP"] == "1" 64 | @w.write("Content-Length: #{body.bytesize}\r\n\r\n") 65 | @w.write(body) 66 | @w.flush 67 | else 68 | @w.puts(body) 69 | @w.flush 70 | end 71 | end 72 | end 73 | 74 | private 75 | 76 | def next_id 77 | synchronize { @seq += 1; @seq.to_s } 78 | end 79 | 80 | def read_exact(n) 81 | data = +"" 82 | while data.bytesize < n 83 | chunk = @r.read(n - data.bytesize) 84 | break unless chunk 85 | data << chunk 86 | end 87 | data 88 | end 89 | end 90 | end 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /examples/03b_anthropic_tools.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Example: Anthropic tool-calling demo (list_files/read_file) 4 | 5 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 6 | require "securerandom" 7 | require "vsm" 8 | 9 | MODEL = ENV["AIRB_MODEL"] || "claude-sonnet-4-0" 10 | 11 | class ListFiles < VSM::ToolCapsule 12 | tool_name "list_files" 13 | tool_description "List files in a directory" 14 | tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] }) 15 | def run(args) 16 | path = args["path"].to_s.strip 17 | path = "." if path.empty? 18 | Dir.children(path).sort.take(200).join("\n") 19 | rescue => e 20 | "ERROR: #{e.class}: #{e.message}" 21 | end 22 | end 23 | 24 | class ReadFile < VSM::ToolCapsule 25 | tool_name "read_file" 26 | tool_description "Read a small text file" 27 | tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] }) 28 | def run(args) 29 | path = args["path"].to_s 30 | raise "path required" if path.empty? 31 | raise "too large" if File.size(path) > 200_000 32 | File.read(path) 33 | rescue => e 34 | "ERROR: #{e.class}: #{e.message}" 35 | end 36 | end 37 | 38 | driver = VSM::Drivers::Anthropic::AsyncDriver.new( 39 | api_key: ENV.fetch("ANTHROPIC_API_KEY"), 40 | model: MODEL 41 | ) 42 | 43 | system_prompt = <<~PROMPT 44 | You are a coding assistant with two tools: list_files and read_file. 45 | Prefer to call tools when appropriate. Keep answers brief. 46 | PROMPT 47 | 48 | cap = VSM::DSL.define(:anthropic_tools_demo) do 49 | identity klass: VSM::Identity, args: { identity: "anthropic_tools_demo", invariants: [] } 50 | governance klass: VSM::Governance 51 | coordination klass: VSM::Coordination 52 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 53 | monitoring klass: VSM::Monitoring 54 | operations do 55 | capsule :list_files, klass: ListFiles 56 | capsule :read_file, klass: ReadFile 57 | end 58 | end 59 | 60 | class ToolTTY < VSM::Port 61 | def should_render?(message) 62 | [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind) 63 | end 64 | 65 | def loop 66 | sid = SecureRandom.uuid 67 | puts "anthropic tools demo — type to chat (Ctrl-C to exit)" 68 | print "You: " 69 | while (line = $stdin.gets&.chomp) 70 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 71 | @capsule.roles[:coordination].wait_for_turn_end(sid) 72 | print "You: " 73 | end 74 | end 75 | 76 | def render_out(msg) 77 | case msg.kind 78 | when :assistant_delta 79 | print msg.payload 80 | $stdout.flush 81 | when :assistant 82 | puts "" 83 | puts "(turn #{msg.meta&.dig(:turn_id)})" 84 | when :tool_call 85 | puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})" 86 | when :tool_result 87 | puts "\nTool> (completed)" 88 | end 89 | end 90 | end 91 | 92 | VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)]) 93 | 94 | -------------------------------------------------------------------------------- /examples/03c_gemini_tools.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Example: Gemini tool-calling demo (list_files/read_file) with streaming enabled 4 | 5 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 6 | require "securerandom" 7 | require "vsm" 8 | 9 | MODEL = ENV["AIRB_MODEL"] || "gemini-2.5-flash" 10 | 11 | class ListFiles < VSM::ToolCapsule 12 | tool_name "list_files" 13 | tool_description "List files in a directory" 14 | tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] }) 15 | def run(args) 16 | path = args["path"].to_s.strip 17 | path = "." if path.empty? 18 | Dir.children(path).sort.take(200).join("\n") 19 | rescue => e 20 | "ERROR: #{e.class}: #{e.message}" 21 | end 22 | end 23 | 24 | class ReadFile < VSM::ToolCapsule 25 | tool_name "read_file" 26 | tool_description "Read a small text file" 27 | tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] }) 28 | def run(args) 29 | path = args["path"].to_s 30 | raise "path required" if path.empty? 31 | raise "too large" if File.size(path) > 200_000 32 | File.read(path) 33 | rescue => e 34 | "ERROR: #{e.class}: #{e.message}" 35 | end 36 | end 37 | 38 | driver = VSM::Drivers::Gemini::AsyncDriver.new( 39 | api_key: ENV.fetch("GEMINI_API_KEY"), 40 | model: MODEL, 41 | streaming: true 42 | ) 43 | 44 | system_prompt = <<~PROMPT 45 | You are a coding assistant with two tools: list_files and read_file. 46 | Prefer to call tools when appropriate. Keep answers brief. 47 | PROMPT 48 | 49 | cap = VSM::DSL.define(:gemini_tools_demo) do 50 | identity klass: VSM::Identity, args: { identity: "gemini_tools_demo", invariants: [] } 51 | governance klass: VSM::Governance 52 | coordination klass: VSM::Coordination 53 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 54 | monitoring klass: VSM::Monitoring 55 | operations do 56 | capsule :list_files, klass: ListFiles 57 | capsule :read_file, klass: ReadFile 58 | end 59 | end 60 | 61 | class ToolTTY < VSM::Port 62 | def should_render?(message) 63 | [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind) 64 | end 65 | 66 | def loop 67 | sid = SecureRandom.uuid 68 | puts "gemini tools demo — type to chat (Ctrl-C to exit)" 69 | print "You: " 70 | while (line = $stdin.gets&.chomp) 71 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 72 | @capsule.roles[:coordination].wait_for_turn_end(sid) 73 | print "You: " 74 | end 75 | end 76 | 77 | def render_out(msg) 78 | case msg.kind 79 | when :assistant_delta 80 | print msg.payload 81 | $stdout.flush 82 | when :assistant 83 | puts "" 84 | puts "(turn #{msg.meta&.dig(:turn_id)})" 85 | when :tool_call 86 | puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})" 87 | when :tool_result 88 | puts "\nTool> (completed)" 89 | end 90 | end 91 | end 92 | 93 | VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)]) 94 | 95 | 96 | -------------------------------------------------------------------------------- /examples/07_connect_claude_mcp.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 3 | require "json" 4 | require "securerandom" 5 | require "vsm" 6 | require "vsm/dsl_mcp" 7 | require "vsm/ports/chat_tty" 8 | 9 | # Example: Connect to an external MCP server (Claude Code) 10 | # 11 | # Prereqs: 12 | # - Install Claude CLI and log in. 13 | # - Ensure `claude mcp serve` works in your shell. 14 | # 15 | # IMPORTANT: Many MCP servers (including Claude) use LSP-style Content-Length 16 | # framing over stdio. The minimal transport in this repo currently uses NDJSON 17 | # (one JSON per line). If this example hangs or fails, it's due to framing 18 | # mismatch; swap the transport to LSP framing in lib/vsm/mcp/jsonrpc.rb. 19 | # 20 | # Usage: 21 | # ruby examples/07_connect_claude_mcp.rb 22 | # Then type: 23 | # list 24 | # call: some_tool {"arg1":"value"} 25 | # 26 | # This example avoids requiring any LLM API keys by letting you call tools manually 27 | # via a simple chat convention. 28 | 29 | # Intelligence that recognizes two commands: 30 | # - "list" → prints available tools 31 | # - "call: NAME {json}" → invokes the reflected tool with JSON args 32 | class ManualMCPIntelligence < VSM::Intelligence 33 | def handle(message, bus:, **) 34 | return false unless message.kind == :user 35 | line = message.payload.to_s.strip 36 | if line == "list" 37 | # Inspect operations children for tool descriptors 38 | ops = bus.context[:operations_children] || {} 39 | tools = ops.values.select { _1.respond_to?(:tool_descriptor) }.map { _1.tool_descriptor.name } 40 | bus.emit VSM::Message.new(kind: :assistant, payload: tools.any? ? "tools: #{tools.join(", ")}" : "(no tools)", meta: message.meta) 41 | return true 42 | elsif line.start_with?("call:") 43 | if line =~ /\Acall:\s*(\S+)\s*(\{.*\})?\z/ 44 | tool = $1 45 | json = $2 46 | args = json ? (JSON.parse(json) rescue {}) : {} 47 | bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: tool, args: args }, corr_id: SecureRandom.uuid, meta: message.meta) 48 | return true 49 | else 50 | bus.emit VSM::Message.new(kind: :assistant, payload: "usage: call: NAME {json}", meta: message.meta) 51 | return true 52 | end 53 | else 54 | bus.emit VSM::Message.new(kind: :assistant, payload: "Commands: list | call: NAME {json}", meta: message.meta) 55 | return true 56 | end 57 | end 58 | end 59 | 60 | cap = VSM::DSL.define(:claude_mcp_client) do 61 | identity klass: VSM::Identity, args: { identity: "claude_mcp_client", invariants: [] } 62 | governance klass: VSM::Governance 63 | coordination klass: VSM::Coordination 64 | intelligence klass: ManualMCPIntelligence 65 | monitoring klass: VSM::Monitoring 66 | operations do 67 | # Reflect all available tools from the external server. 68 | # Tip: if tool names collide with locals, use prefix: "claude_". 69 | mcp_server :claude, cmd: ["claude", "mcp", "serve"] 70 | end 71 | end 72 | 73 | banner = ->(io) do 74 | io.puts "\e[96mMCP client (Claude)\e[0m" 75 | io.puts "Type 'list' or 'call: NAME {json}'" 76 | end 77 | 78 | VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner, prompt: "You> ")]) 79 | -------------------------------------------------------------------------------- /lib/vsm/ports/mcp/server_stdio.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "json" 3 | require "async" 4 | 5 | module VSM 6 | module Ports 7 | module MCP 8 | # Exposes the capsule's tools as an MCP server over stdio (NDJSON JSON-RPC). 9 | # Implemented methods: tools/list, tools/call. 10 | class ServerStdio < VSM::Port 11 | def initialize(capsule:) 12 | super(capsule: capsule) 13 | @waiters = {} 14 | @waiters_mutex = Mutex.new 15 | end 16 | 17 | def egress_subscribe 18 | # Single subscriber that resolves tool_result waiters by corr_id 19 | @capsule.bus.subscribe do |m| 20 | next unless m.kind == :tool_result 21 | q = nil 22 | @waiters_mutex.synchronize { q = @waiters.delete(m.corr_id.to_s) } 23 | q&.enqueue(m) 24 | end 25 | super 26 | end 27 | 28 | def loop 29 | $stdout.sync = true 30 | while (line = $stdin.gets) 31 | begin 32 | req = JSON.parse(line) 33 | rescue => e 34 | write_err(nil, code: -32700, message: "Parse error: #{e.message}") 35 | next 36 | end 37 | 38 | id = req["id"] 39 | method = req["method"] 40 | params = req["params"] || {} 41 | case method 42 | when "tools/list" 43 | write_ok(id, { tools: list_tools }) 44 | when "tools/call" 45 | name = params["name"].to_s 46 | args = params["arguments"] || {} 47 | res = call_local_tool(id, name, args) 48 | write_ok(id, { content: [{ type: "text", text: res.to_s }] }) 49 | else 50 | write_err(id, code: -32601, message: "Method not found: #{method}") 51 | end 52 | end 53 | end 54 | 55 | private 56 | 57 | def list_tools 58 | ops = @capsule.bus.context[:operations_children] || {} 59 | ops.values 60 | .select { _1.respond_to?(:tool_descriptor) } 61 | .map { to_mcp_descriptor(_1.tool_descriptor) } 62 | end 63 | 64 | def to_mcp_descriptor(desc) 65 | { 66 | "name" => desc.name, 67 | "description" => desc.description, 68 | "input_schema" => desc.schema 69 | } 70 | end 71 | 72 | def call_local_tool(req_id, name, args) 73 | corr = req_id.to_s 74 | q = Async::Queue.new 75 | @waiters_mutex.synchronize { @waiters[corr] = q } 76 | @capsule.bus.emit VSM::Message.new( 77 | kind: :tool_call, 78 | payload: { tool: name, args: args }, 79 | corr_id: corr, 80 | meta: { session_id: "mcp:stdio" }, 81 | path: [:mcp, :server, name] 82 | ) 83 | msg = q.dequeue 84 | msg.payload 85 | ensure 86 | @waiters_mutex.synchronize { @waiters.delete(corr) } 87 | end 88 | 89 | def write_ok(id, result) 90 | puts JSON.dump({ jsonrpc: "2.0", id: id, result: result }) 91 | $stdout.flush 92 | end 93 | 94 | def write_err(id, code:, message:) 95 | puts JSON.dump({ jsonrpc: "2.0", id: id, error: { code: code, message: message } }) 96 | $stdout.flush 97 | end 98 | end 99 | end 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /examples/03_openai_tools.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Example: OpenAI tool-calling demo (list_files/read_file) 4 | # 5 | # Usage: 6 | # OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/03_openai_tools.rb 7 | # VSM_DEBUG_STREAM=1 to see low-level logs 8 | 9 | $LOAD_PATH.unshift(File.expand_path("../lib", __dir__)) 10 | require "securerandom" 11 | require "json" 12 | require "vsm" 13 | 14 | MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini" 15 | 16 | # Simple file tools scoped to current working directory 17 | class ListFiles < VSM::ToolCapsule 18 | tool_name "list_files" 19 | tool_description "List files in a directory" 20 | tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] }) 21 | def run(args) 22 | path = args["path"].to_s.strip 23 | path = "." if path.empty? 24 | entries = Dir.children(path).sort.take(200) 25 | entries.join("\n") 26 | rescue => e 27 | "ERROR: #{e.class}: #{e.message}" 28 | end 29 | end 30 | 31 | class ReadFile < VSM::ToolCapsule 32 | tool_name "read_file" 33 | tool_description "Read a small text file" 34 | tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] }) 35 | def run(args) 36 | path = args["path"].to_s 37 | raise "path required" if path.empty? 38 | raise "too large" if File.size(path) > 200_000 39 | File.read(path) 40 | rescue => e 41 | "ERROR: #{e.class}: #{e.message}" 42 | end 43 | end 44 | 45 | driver = VSM::Drivers::OpenAI::AsyncDriver.new( 46 | api_key: ENV.fetch("OPENAI_API_KEY"), 47 | model: MODEL 48 | ) 49 | 50 | system_prompt = <<~PROMPT 51 | You are a coding assistant with two tools: list_files and read_file. 52 | Prefer to call tools when appropriate. Keep answers brief. 53 | PROMPT 54 | 55 | cap = VSM::DSL.define(:openai_tools_demo) do 56 | identity klass: VSM::Identity, args: { identity: "openai_tools_demo", invariants: [] } 57 | governance klass: VSM::Governance 58 | coordination klass: VSM::Coordination 59 | intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt } 60 | monitoring klass: VSM::Monitoring 61 | operations do 62 | capsule :list_files, klass: ListFiles 63 | capsule :read_file, klass: ReadFile 64 | end 65 | end 66 | 67 | if ENV["VSM_LENS"] == "1" 68 | VSM::Lens.attach!(cap, port: (ENV["VSM_LENS_PORT"] || 9292).to_i, token: ENV["VSM_LENS_TOKEN"]) rescue nil 69 | end 70 | 71 | class ToolTTY < VSM::Port 72 | def should_render?(message) 73 | [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind) 74 | end 75 | 76 | def loop 77 | sid = SecureRandom.uuid 78 | puts "openai tools demo — type to chat (Ctrl-C to exit)" 79 | print "You: " 80 | while (line = $stdin.gets&.chomp) 81 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 82 | @capsule.roles[:coordination].wait_for_turn_end(sid) 83 | print "You: " 84 | end 85 | end 86 | 87 | def render_out(msg) 88 | case msg.kind 89 | when :assistant_delta 90 | print msg.payload 91 | $stdout.flush 92 | when :assistant 93 | puts "" 94 | puts "(turn #{msg.meta&.dig(:turn_id)})" 95 | when :tool_call 96 | puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})" 97 | when :tool_result 98 | puts "\nTool> (completed)" 99 | end 100 | end 101 | end 102 | 103 | VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)]) 104 | 105 | 106 | 107 | -------------------------------------------------------------------------------- /lib/vsm/ports/chat_tty.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "securerandom" 3 | require "io/console" 4 | require "async" 5 | 6 | 7 | module VSM 8 | module Ports 9 | # Generic, customizable chat TTY port. 10 | # - Safe to run alongside an MCP stdio port: prefers IO.console for I/O. 11 | # - Override banner(io) and render_out(message) to customize without 12 | # reimplementing the core input loop. 13 | class ChatTTY < VSM::Port 14 | DEFAULT_THEME = { 15 | you: "\e[94mYou\e[0m: ", 16 | tool: "\e[90m→ tool\e[0m ", 17 | turn: "\e[2m(turn %s)\e[0m" 18 | }.freeze 19 | 20 | def initialize(capsule:, input: nil, output: nil, banner: nil, prompt: nil, theme: {}, show_tool_results: false) 21 | super(capsule: capsule) 22 | # Prefer STDIN/STDOUT if they are TTY. If not, try /dev/tty. 23 | # Avoid IO.console to minimize kqueue/select issues under async. 24 | tty_io = nil 25 | if !$stdout.tty? 26 | begin 27 | tty_io = File.open("/dev/tty", "r+") 28 | rescue StandardError 29 | tty_io = nil 30 | end 31 | end 32 | 33 | @in = input || (tty_io || ($stdin.tty? ? $stdin : nil)) 34 | @out = output || (tty_io || ($stdout.tty? ? $stdout : $stderr)) 35 | @banner = banner # String or ->(io) {} 36 | @prompt = prompt || DEFAULT_THEME[:you] 37 | @theme = DEFAULT_THEME.merge(theme) 38 | @streaming = false 39 | @show_tool_results = show_tool_results 40 | end 41 | 42 | def should_render?(message) 43 | [:assistant_delta, :assistant, :tool_call, :tool_result].include?(message.kind) 44 | end 45 | 46 | def loop 47 | sid = SecureRandom.uuid 48 | @capsule.roles[:coordination].grant_floor!(sid) if @capsule.roles[:coordination].respond_to?(:grant_floor!) 49 | banner(@out) 50 | 51 | if @in.nil? 52 | @out.puts "(no interactive TTY; ChatTTY input disabled)" 53 | Async::Task.current.sleep # keep task alive for egress rendering 54 | return 55 | end 56 | 57 | @out.print @prompt 58 | while (line = @in.gets&.chomp) 59 | @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid }) 60 | if @capsule.roles[:coordination].respond_to?(:wait_for_turn_end) 61 | @capsule.roles[:coordination].wait_for_turn_end(sid) 62 | end 63 | @out.print @prompt 64 | end 65 | end 66 | 67 | def render_out(message) 68 | case message.kind 69 | when :assistant_delta 70 | @streaming = true 71 | @out.print(message.payload) 72 | @out.flush 73 | when :assistant 74 | # If we didn't stream content, print the final content now. 75 | unless @streaming 76 | txt = message.payload.to_s 77 | unless txt.empty? 78 | @out.puts 79 | @out.puts txt 80 | end 81 | end 82 | turn = message.meta&.dig(:turn_id) 83 | @out.puts(@theme[:turn] % turn) if turn 84 | @streaming = false 85 | when :tool_call 86 | @out.puts 87 | @out.puts "#{@theme[:tool]}#{message.payload[:tool]}" 88 | when :tool_result 89 | return unless @show_tool_results 90 | out = message.payload.to_s 91 | unless out.empty? 92 | @out.puts 93 | @out.puts out 94 | end 95 | end 96 | end 97 | 98 | # Overridable header/banner 99 | def banner(io) 100 | if @banner.respond_to?(:call) 101 | @banner.call(io) 102 | elsif @banner.is_a?(String) 103 | io.puts @banner 104 | else 105 | io.puts "vsm chat — Ctrl-C to exit" 106 | end 107 | end 108 | 109 | private 110 | end 111 | end 112 | end 113 | -------------------------------------------------------------------------------- /lib/vsm/meta/snapshot_builder.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "pathname" 4 | 5 | require_relative "support" 6 | 7 | module VSM 8 | module Meta 9 | class SnapshotBuilder 10 | ROLE_METHOD_HINTS = { 11 | identity: %i[handle alert observe initialize], 12 | governance: %i[enforce observe initialize], 13 | coordination: %i[stage drain order grant_floor! wait_for_turn_end initialize], 14 | operations: %i[handle observe initialize], 15 | intelligence: %i[handle system_prompt offer_tools? initialize], 16 | monitoring: %i[observe handle initialize] 17 | }.freeze 18 | 19 | def initialize(root:) 20 | @root = root 21 | end 22 | 23 | def call 24 | snapshot_capsule(@root, path: [@root.name.to_s]) 25 | end 26 | 27 | private 28 | 29 | def snapshot_capsule(capsule, path:) 30 | { 31 | kind: "capsule", 32 | name: capsule.name.to_s, 33 | class: capsule.class.name, 34 | path: path.dup, 35 | roles: snapshot_roles(capsule.roles), 36 | operations: snapshot_operations(capsule.children, path: path), 37 | meta: {} 38 | } 39 | end 40 | 41 | def snapshot_roles(roles) 42 | roles.each_with_object({}) do |(role_name, role_instance), acc| 43 | acc[role_name.to_s] = snapshot_role(role_name, role_instance) 44 | end 45 | end 46 | 47 | def snapshot_role(role_name, role_instance) 48 | { 49 | class: role_instance.class.name, 50 | constructor_args: Support.fetch_constructor_args(role_instance), 51 | source_locations: method_locations(role_instance.class, ROLE_METHOD_HINTS[role_name] || %i[initialize]), 52 | summary: nil 53 | } 54 | end 55 | 56 | def snapshot_operations(children, path:) 57 | return { children: {} } if children.nil? || children.empty? 58 | 59 | ops = {} 60 | children.each do |name, child| 61 | ops[name.to_s] = snapshot_child(child, path: path + [name.to_s]) 62 | end 63 | { children: ops } 64 | end 65 | 66 | def snapshot_child(child, path:) 67 | base = { 68 | name: path.last, 69 | class: child.class.name, 70 | path: path, 71 | constructor_args: Support.fetch_constructor_args(child), 72 | source_locations: [], 73 | roles: nil, 74 | operations: nil 75 | } 76 | 77 | if child.respond_to?(:roles) && child.respond_to?(:children) 78 | base[:kind] = "capsule" 79 | base[:roles] = snapshot_roles(child.roles) 80 | base[:operations] = snapshot_operations(child.children || {}, path: path) 81 | base[:source_locations] = method_locations(child.class, %i[initialize]) 82 | elsif child.respond_to?(:tool_descriptor) 83 | base[:kind] = "tool" 84 | descriptor = child.tool_descriptor 85 | base[:tool] = { 86 | name: descriptor.name, 87 | description: descriptor.description, 88 | schema: descriptor.schema 89 | } 90 | base[:source_locations] = method_locations(child.class, %i[run execution_mode initialize]) 91 | else 92 | base[:kind] = "object" 93 | base[:source_locations] = method_locations(child.class, %i[initialize]) 94 | end 95 | 96 | base 97 | end 98 | 99 | def method_locations(klass, candidates) 100 | candidates.filter_map do |meth| 101 | next unless klass.instance_methods.include?(meth) 102 | location = klass.instance_method(meth).source_location 103 | next if location.nil? 104 | { method: meth.to_s, path: relative_path(location[0]), line: location[1] } 105 | rescue NameError 106 | nil 107 | end 108 | end 109 | 110 | def relative_path(path) 111 | return path if path.nil? 112 | root = Pathname.new(Dir.pwd) 113 | begin 114 | Pathname.new(path).relative_path_from(root).to_s 115 | rescue ArgumentError 116 | path 117 | end 118 | end 119 | end 120 | end 121 | end 122 | -------------------------------------------------------------------------------- /spec/support/fakes.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # A simple tool that echoes text (fiber-safe) 4 | class FakeEchoTool < VSM::ToolCapsule 5 | tool_name "echo" 6 | tool_description "Echo back the text" 7 | tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] }) 8 | 9 | def run(args) = "echo: #{args["text"]}" 10 | end 11 | 12 | # A slow tool that sleeps; marks thread mode to allow parallelism 13 | class SlowTool < VSM::ToolCapsule 14 | tool_name "slow" 15 | tool_description "Sleep briefly and return id" 16 | tool_schema({ type: "object", properties: { id: { type: "integer" } }, required: ["id"] }) 17 | 18 | def execution_mode = :thread 19 | def run(args) 20 | sleep 0.25 21 | "slow-#{args["id"]}" 22 | end 23 | end 24 | 25 | # A tool that raises 26 | class ErrorTool < VSM::ToolCapsule 27 | tool_name "boom" 28 | tool_description "Always raises" 29 | tool_schema({ type: "object", properties: {}, required: [] }) 30 | 31 | def run(_args) 32 | raise "kapow" 33 | end 34 | end 35 | 36 | # A minimal intelligence that: 37 | # - on :user payload "echo " emits a tool_call to echo 38 | # - on :user payload "slow2" emits two parallel slow calls 39 | # - on :tool_result emits :assistant to finish the turn 40 | class FakeIntelligence < VSM::Intelligence 41 | def initialize 42 | @by_session = Hash.new { |h,k| h[k] = { pending: 0 } } 43 | end 44 | 45 | def handle(message, bus:, **) 46 | case message.kind 47 | when :user 48 | sid = message.meta&.dig(:session_id) 49 | case message.payload 50 | when /\Aecho\s+(.+)\z/ 51 | @by_session[sid][:pending] = 1 52 | bus.emit VSM::Message.new( 53 | kind: :tool_call, 54 | payload: { tool: "echo", args: { "text" => Regexp.last_match(1) } }, 55 | corr_id: SecureRandom.uuid, 56 | meta: { session_id: sid } 57 | ) 58 | true 59 | when "slow2" 60 | @by_session[sid][:pending] = 2 61 | 2.times do |i| 62 | bus.emit VSM::Message.new( 63 | kind: :tool_call, 64 | payload: { tool: "slow", args: { "id" => i } }, 65 | corr_id: "slow-#{i}", 66 | meta: { session_id: sid } 67 | ) 68 | end 69 | true 70 | when "boom" 71 | @by_session[sid][:pending] = 1 72 | bus.emit VSM::Message.new( 73 | kind: :tool_call, 74 | payload: { tool: "boom", args: {} }, 75 | corr_id: "boom", 76 | meta: { session_id: sid } 77 | ) 78 | true 79 | else 80 | bus.emit VSM::Message.new(kind: :assistant, payload: "unknown", meta: message.meta) 81 | true 82 | end 83 | when :tool_result 84 | sid = message.meta&.dig(:session_id) 85 | @by_session[sid][:pending] -= 1 86 | if @by_session[sid][:pending] <= 0 87 | bus.emit VSM::Message.new(kind: :assistant, payload: "done", meta: { session_id: sid }) 88 | end 89 | true 90 | else 91 | false 92 | end 93 | end 94 | end 95 | 96 | # A governance that denies writes outside a fake root and requests confirmation for "danger" 97 | class FakeGovernance < VSM::Governance 98 | attr_reader :confirm_requests 99 | 100 | def initialize(root: Dir.pwd) 101 | @root = File.expand_path(root) 102 | @confirm_requests = [] 103 | end 104 | 105 | def enforce(message) 106 | if message.kind == :tool_call && message.payload[:tool] == "echo" 107 | if (txt = message.payload.dig(:args, "text")) && txt.include?("danger") 108 | @confirm_requests << txt 109 | message.meta ||= {} 110 | message.meta[:needs_confirm] = true 111 | # In a real system Governance would emit a :confirm_request and await :confirm_response. 112 | # For tests we simply tag it and pass it through. 113 | end 114 | end 115 | yield message 116 | end 117 | end 118 | 119 | # Identity spy that records alerts 120 | class SpyIdentity < VSM::Identity 121 | attr_reader :alerts 122 | def initialize(identity: "spy", invariants: []) 123 | super(identity:, invariants:) 124 | @alerts = [] 125 | end 126 | def alert(message) 127 | @alerts << message 128 | end 129 | end 130 | 131 | # A fake driver for testing that doesn't make real API calls 132 | class FakeDriver 133 | def run!(conversation:, tools:, policy: {}, &emit) 134 | # Simple test driver that just emits a basic response 135 | yield(:assistant_final, "test response") if block_given? 136 | end 137 | end 138 | 139 | -------------------------------------------------------------------------------- /spec/meta/meta_tools_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "spec_helper" 4 | 5 | RSpec.describe "VSM meta read-only tools" do 6 | class MetaSpecTool < VSM::ToolCapsule 7 | tool_name "meta_spec_tool" 8 | tool_description "Spec helper tool" 9 | tool_schema({ type: "object", properties: {}, additionalProperties: false }) 10 | 11 | def run(_args) 12 | "ok" 13 | end 14 | end 15 | 16 | let(:host) do 17 | VSM::DSL.define(:meta_spec_host) do 18 | identity klass: VSM::Identity, args: { identity: "meta_spec_host", invariants: [] } 19 | governance klass: VSM::Governance, args: {} 20 | coordination klass: VSM::Coordination, args: {} 21 | intelligence klass: VSM::Intelligence, args: {} 22 | monitoring klass: VSM::Monitoring, args: {} 23 | operations do 24 | meta_tools 25 | capsule :meta_spec_tool, klass: MetaSpecTool 26 | end 27 | end 28 | end 29 | 30 | it "registers meta tools on the host capsule" do 31 | expect(host.children.keys).to include("meta_summarize_self", "meta_list_tools", "meta_explain_tool") 32 | end 33 | 34 | it "list tools returns descriptors" do 35 | list_tool = host.children.fetch("meta_list_tools") 36 | result = list_tool.run({}) 37 | names = result[:tools].map { _1[:tool_name] } 38 | expect(names).to include("meta_spec_tool") 39 | end 40 | 41 | it "explain tool returns source snippet" do 42 | explain = host.children.fetch("meta_explain_tool") 43 | result = explain.run({ "tool" => "meta_spec_tool" }) 44 | expect(result[:tool][:name]).to eq("meta_spec_tool") 45 | expect(result[:code][:snippet]).to include("def run") 46 | end 47 | 48 | it "explains a role and returns code snippets" do 49 | explain_role = host.children.fetch("meta_explain_role") 50 | result = explain_role.run({ "role" => "coordination" }) 51 | expect(result[:role][:name]).to eq("coordination") 52 | expect(result[:role][:class]).to include("VSM::Coordination") 53 | expect(result[:code]).to be_an(Array) 54 | expect(result[:code]).not_to be_empty 55 | expect(result[:code].first[:snippet]).to include("def ") 56 | expect(result[:vsm_summary].to_s.length).to be > 0 57 | end 58 | 59 | it "explains operations with child tools context" do 60 | explain_role = host.children.fetch("meta_explain_role") 61 | result = explain_role.run({ "role" => "operations" }) 62 | expect(result[:role][:name]).to eq("operations") 63 | children = result.dig(:role_specific, :children) 64 | expect(children).to be_an(Array) 65 | names = children.map { _1[:tool_name] } 66 | expect(names).to include("meta_spec_tool") 67 | end 68 | 69 | it "summarize self includes stats" do 70 | summarize = host.children.fetch("meta_summarize_self") 71 | result = summarize.run({}) 72 | expect(result[:stats][:total_tools]).to be >= 1 73 | expect(result[:capsule][:name]).to eq("meta_spec_host") 74 | end 75 | end 76 | 77 | RSpec.describe "meta tools prefix option" do 78 | it "registers tools with a prefix" do 79 | cap = VSM::DSL.define(:meta_prefixed) do 80 | identity klass: VSM::Identity, args: { identity: "meta_prefixed", invariants: [] } 81 | governance klass: VSM::Governance, args: {} 82 | coordination klass: VSM::Coordination, args: {} 83 | intelligence klass: VSM::Intelligence, args: {} 84 | operations do 85 | meta_tools prefix: "inspector_" 86 | end 87 | end 88 | 89 | expect(cap.children.keys).to include("inspector_meta_summarize_self") 90 | end 91 | end 92 | 93 | RSpec.describe "meta tools selection" do 94 | it "allows selecting subset via only" do 95 | cap = VSM::DSL.define(:meta_only) do 96 | identity klass: VSM::Identity, args: { identity: "meta_only", invariants: [] } 97 | governance klass: VSM::Governance, args: {} 98 | coordination klass: VSM::Coordination, args: {} 99 | intelligence klass: VSM::Intelligence, args: {} 100 | operations do 101 | meta_tools only: [:meta_list_tools] 102 | end 103 | end 104 | 105 | expect(cap.children.keys).to include("meta_list_tools") 106 | expect(cap.children.keys).not_to include("meta_summarize_self") 107 | end 108 | 109 | it "allows excluding tools" do 110 | cap = VSM::DSL.define(:meta_except) do 111 | identity klass: VSM::Identity, args: { identity: "meta_except", invariants: [] } 112 | governance klass: VSM::Governance, args: {} 113 | coordination klass: VSM::Coordination, args: {} 114 | intelligence klass: VSM::Intelligence, args: {} 115 | operations do 116 | meta_tools except: [:meta_explain_tool] 117 | end 118 | end 119 | 120 | expect(cap.children.keys).to include("meta_list_tools") 121 | expect(cap.children.keys).not_to include("meta_explain_tool") 122 | end 123 | end 124 | -------------------------------------------------------------------------------- /lib/vsm/generator/new_project.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'erb' 4 | require 'fileutils' 5 | require 'pathname' 6 | require_relative '../version' 7 | 8 | module VSM 9 | module Generator 10 | class NewProject 11 | TemplateRoot = File.expand_path('templates', __dir__) 12 | 13 | def self.run(name:, path: nil, git: false, bundle: false, provider: 'openai', model: nil, force: false) 14 | new(name: name, path: path, git: git, bundle: bundle, provider: provider, model: model, force: force).run 15 | end 16 | 17 | def initialize(name:, path:, git:, bundle:, provider:, model:, force:) 18 | @input_name = name 19 | @target_dir = File.expand_path(path || name) 20 | @git = git 21 | @bundle = bundle 22 | @provider = provider 23 | @model = model 24 | @force = force 25 | end 26 | 27 | def run 28 | prepare_target_dir! 29 | 30 | # Create directory tree 31 | mkdirs( 32 | 'exe', 33 | 'bin', 34 | "lib/#{lib_name}", 35 | "lib/#{lib_name}/ports", 36 | "lib/#{lib_name}/tools" 37 | ) 38 | 39 | # Render files 40 | write('README.md', render('README_md.erb')) 41 | write('.gitignore', render('gitignore.erb')) 42 | write('Gemfile', render('Gemfile.erb')) 43 | write('Rakefile', render('Rakefile.erb')) 44 | write("#{lib_name}.gemspec", render('gemspec.erb')) 45 | 46 | write("exe/#{exe_name}", render('exe_name.erb'), mode: 0o755) 47 | write('bin/console', render('bin_console.erb'), mode: 0o755) 48 | write('bin/setup', render('bin_setup.erb'), mode: 0o755) 49 | 50 | write("lib/#{lib_name}.rb", render('lib_name_rb.erb')) 51 | write("lib/#{lib_name}/version.rb", render('lib_version_rb.erb')) 52 | write("lib/#{lib_name}/organism.rb", render('lib_organism_rb.erb')) 53 | write("lib/#{lib_name}/ports/chat_tty.rb", render('lib_ports_chat_tty_rb.erb')) 54 | write("lib/#{lib_name}/tools/read_file.rb", render('lib_tools_read_file_rb.erb')) 55 | 56 | post_steps 57 | 58 | puts <<~DONE 59 | 60 | Created #{module_name} in #{@target_dir} 61 | 62 | Next steps: 63 | cd #{relative_target} 64 | bundle install 65 | bundle exec exe/#{exe_name} 66 | 67 | Add tools in lib/#{lib_name}/tools and customize banner in lib/#{lib_name}/ports/chat_tty.rb. 68 | DONE 69 | end 70 | 71 | private 72 | 73 | def mkdirs(*dirs) 74 | dirs.each { |d| FileUtils.mkdir_p(File.join(@target_dir, d)) } 75 | end 76 | 77 | def write(rel, content, mode: nil) 78 | full = File.join(@target_dir, rel) 79 | FileUtils.mkdir_p(File.dirname(full)) 80 | File.write(full, content) 81 | File.chmod(mode, full) if mode 82 | end 83 | 84 | def render(template_name) 85 | template_path = File.join(TemplateRoot, template_name) 86 | erb = ERB.new(File.read(template_path), trim_mode: '-') 87 | erb.result(binding) 88 | end 89 | 90 | def post_steps 91 | Dir.chdir(@target_dir) do 92 | if @git 93 | system('git', 'init') 94 | system('git', 'add', '-A') 95 | system('git', 'commit', '-m', 'init') 96 | end 97 | if @bundle 98 | system('bundle', 'install') 99 | end 100 | end 101 | end 102 | 103 | def prepare_target_dir! 104 | if Dir.exist?(@target_dir) 105 | if !@force && !(Dir.children(@target_dir) - %w[. ..]).empty? 106 | raise "Target directory already exists and is not empty: #{@target_dir} (use --force to overwrite)" 107 | end 108 | else 109 | FileUtils.mkdir_p(@target_dir) 110 | end 111 | end 112 | 113 | # --- Template helpers (available via binding) --- 114 | 115 | def module_name 116 | @module_name ||= @input_name.split(/[-_]/).map { |p| p.gsub(/[^a-zA-Z0-9]/, '').capitalize }.join 117 | end 118 | 119 | def lib_name 120 | @lib_name ||= @input_name.downcase.gsub('-', '_') 121 | end 122 | 123 | def exe_name 124 | @exe_name ||= @input_name.downcase.gsub('_', '-') 125 | end 126 | 127 | def env_prefix 128 | @env_prefix ||= @input_name.gsub('-', '_').upcase 129 | end 130 | 131 | def vsm_version_constraint 132 | parts = Vsm::VERSION.split('.') 133 | "~> #{parts[0]}.#{parts[1]}" 134 | end 135 | 136 | def provider 137 | (@provider || 'openai').downcase 138 | end 139 | 140 | def default_model 141 | return @model if @model && !@model.empty? 142 | case provider 143 | when 'anthropic' then 'claude-3-5-sonnet-latest' 144 | when 'gemini' then 'gemini-2.0-flash' 145 | else 'gpt-4o-mini' 146 | end 147 | end 148 | 149 | def relative_target 150 | Pathname.new(@target_dir).relative_path_from(Pathname.new(Dir.pwd)).to_s rescue @target_dir 151 | end 152 | end 153 | end 154 | end 155 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | VSM is a Ruby gem that provides an async, recursive agent framework based on the Viable System Model. It implements five named systems (Operations, Coordination, Intelligence, Governance, Identity) in a capsule-based architecture for building AI agents. 8 | 9 | ## Development Commands 10 | 11 | ### Testing 12 | ```bash 13 | bundle exec rspec # Run all tests 14 | bundle exec rspec spec/vsm_spec.rb # Run specific test file 15 | rake spec # Alternative test runner 16 | ``` 17 | 18 | ### Code Quality 19 | ```bash 20 | bundle exec rubocop # Run linter 21 | rake rubocop # Alternative linter runner 22 | rake # Run both tests and linter (default task) 23 | ``` 24 | 25 | ### Gem Development 26 | ```bash 27 | bundle install # Install dependencies 28 | bundle exec rake build # Build the gem 29 | bundle exec rake install # Install gem locally 30 | bundle exec rake release # Release gem (maintainers only) 31 | ``` 32 | 33 | ### Interactive Development 34 | ```bash 35 | bundle exec irb -r vsm # Start IRB with VSM loaded 36 | ``` 37 | 38 | ## Architecture 39 | 40 | ### Core Components 41 | 42 | **Capsule** (`lib/vsm/capsule.rb:4`) - Main building block containing five systems plus AsyncChannel bus. The core dispatch loop runs in `lib/vsm/capsule.rb:18` processing messages through coordination → governance → routing. 43 | 44 | **Five Systems** (Viable System Model roles): 45 | - **Operations** (`lib/vsm/roles/operations.rb:7`) - Handles `:tool_call` messages, routes to child capsules/tools 46 | - **Coordination** (`lib/vsm/roles/coordination.rb:3`) - Message scheduling, turn management, session floor control 47 | - **Intelligence** (`lib/vsm/roles/intelligence.rb`) - Planning/decision making (typically LLM integration) 48 | - **Governance** (`lib/vsm/roles/governance.rb`) - Policy enforcement, safety, budgets 49 | - **Identity** (`lib/vsm/roles/identity.rb`) - Purpose definition, invariants, escalation 50 | 51 | **AsyncChannel** (`lib/vsm/async_channel.rb:4`) - Non-blocking message bus built on `async` gem with pub/sub support. 52 | 53 | **DSL Builder** (`lib/vsm/dsl.rb:4`) - Declarative capsule construction with role injection and child management. 54 | 55 | ### Tool System 56 | 57 | **ToolCapsule** (`lib/vsm/tool/capsule.rb:3`) - Base class for tools that are capsules with ActsAsTool mixin. 58 | 59 | **ActsAsTool** (`lib/vsm/tool/acts_as_tool.rb:3`) - Mixin providing tool metadata (name, description, JSON schema) and descriptor generation. 60 | 61 | **Tool Descriptors** (`lib/vsm/tool/descriptor.rb`) - Provider-agnostic tool definitions with `to_openai_tool`, `to_anthropic_tool`, `to_gemini_tool` methods. 62 | 63 | ### Execution Models 64 | 65 | **Executors** (`lib/vsm/executors/`) - Pluggable execution strategies: 66 | - `FiberExecutor` - Default async/IO-bound execution 67 | - `ThreadExecutor` - CPU-bound or blocking library execution 68 | 69 | Tools specify execution mode via `execution_mode` method returning `:fiber` or `:thread`. 70 | 71 | ### Message Flow 72 | 73 | Messages follow this structure: 74 | ```ruby 75 | VSM::Message.new( 76 | kind: :user | :assistant | :assistant_delta | :tool_call | :tool_result | :plan | :policy | :audit, 77 | payload: any_data, 78 | corr_id: "correlation_id_for_tool_calls", 79 | meta: { session_id: "uuid", ... } 80 | ) 81 | ``` 82 | 83 | Core flow: User input → Intelligence (planning) → Operations (tool execution) → Coordination (scheduling) → Assistant output. 84 | 85 | ### Provider Integration 86 | 87 | LLM drivers are in `lib/vsm/drivers/` with async implementations for: 88 | - OpenAI (`drivers/openai/async_driver.rb`) 89 | - Anthropic (`drivers/anthropic/async_driver.rb`) 90 | - Gemini (`drivers/gemini/async_driver.rb`) 91 | 92 | Drivers are provider-agnostic and integrate through Intelligence implementations. 93 | 94 | ## Key Development Patterns 95 | 96 | ### Creating Tools 97 | Inherit from `VSM::ToolCapsule`, define tool metadata with class methods, implement `run(args)`: 98 | 99 | ```ruby 100 | class MyTool < VSM::ToolCapsule 101 | tool_name "my_tool" 102 | tool_description "Description" 103 | tool_schema({ type: "object", properties: { ... }, required: [...] }) 104 | 105 | def run(args) 106 | # Tool implementation 107 | end 108 | 109 | def execution_mode = :fiber # or :thread 110 | end 111 | ``` 112 | 113 | ### Building Capsules 114 | Use the DSL in `lib/vsm/dsl.rb:45` to define capsules with five systems: 115 | 116 | ```ruby 117 | capsule = VSM::DSL.define(:name) do 118 | identity klass: VSM::Identity, args: { identity: "name", invariants: [] } 119 | governance klass: VSM::Governance 120 | coordination klass: VSM::Coordination 121 | intelligence klass: MyIntelligence 122 | operations do 123 | capsule :tool_name, klass: ToolClass 124 | end 125 | end 126 | ``` 127 | 128 | ### Testing Approach 129 | - Unit test individual roles and tools directly 130 | - Test capsule message flow with queues to capture emissions 131 | - Use RSpec with async-rspec for fiber-based testing 132 | - Mock external dependencies (LLM APIs) in Intelligence tests 133 | 134 | The codebase follows Ruby conventions with frozen string literals, clear namespacing, and small focused classes following SOLID principles. -------------------------------------------------------------------------------- /spec/vsm/integration/tool_flow_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "spec_helper" 3 | 4 | RSpec.describe "End-to-end flow" do 5 | include Async::RSpec::Reactor 6 | 7 | let(:capsule) do 8 | VSM::DSL.define(:demo) do 9 | identity klass: VSM::Identity, args: { identity: "demo", invariants: [] } 10 | governance klass: FakeGovernance, args: {} 11 | coordination klass: VSM::Coordination 12 | intelligence klass: FakeIntelligence 13 | monitoring klass: VSM::Monitoring 14 | operations do 15 | capsule :echo, klass: FakeEchoTool 16 | capsule :slow, klass: SlowTool 17 | capsule :boom, klass: ErrorTool 18 | end 19 | end 20 | end 21 | 22 | it "completes an echo turn and signals turn-end to coordination" do 23 | sid = "s1" 24 | 25 | out = [] 26 | capsule.bus.subscribe { |m| out << m.kind } 27 | 28 | # Use a simplified approach - just test the components work together 29 | Async do |task| 30 | # Start the capsule loop in background 31 | capsule_task = task.async { capsule.run } 32 | 33 | # Give it time to start 34 | task.sleep(0.01) 35 | 36 | # Send user message 37 | capsule.bus.emit VSM::Message.new(kind: :user, payload: "echo hello", meta: { session_id: sid }) 38 | 39 | # Wait for the flow to complete 40 | capsule.roles[:coordination].wait_for_turn_end(sid) 41 | 42 | # Stop the capsule 43 | capsule_task.stop 44 | 45 | expect(out).to include(:tool_call, :tool_result, :assistant) 46 | end 47 | end 48 | 49 | it "runs two slow tools in parallel and finishes one turn" do 50 | # Create completely isolated classes for this test 51 | slow_tool_class = Class.new(VSM::ToolCapsule) do 52 | tool_name "slow" 53 | tool_description "Sleep briefly and return id" 54 | tool_schema({ type: "object", properties: { id: { type: "integer" } }, required: ["id"] }) 55 | 56 | def execution_mode = :thread 57 | def run(args) 58 | sleep 0.1 59 | "slow-#{args["id"]}" 60 | end 61 | end 62 | 63 | isolated_intelligence = Class.new(VSM::Intelligence) do 64 | def initialize 65 | @by_session = Hash.new { |h,k| h[k] = { pending: 0 } } 66 | end 67 | 68 | def handle(message, bus:, **) 69 | case message.kind 70 | when :user 71 | sid = message.meta&.dig(:session_id) 72 | if message.payload == "slow2" 73 | @by_session[sid][:pending] = 2 74 | 2.times do |i| 75 | bus.emit VSM::Message.new( 76 | kind: :tool_call, 77 | payload: { tool: "slow", args: { "id" => i } }, 78 | corr_id: "slow-#{i}", 79 | meta: { session_id: sid } 80 | ) 81 | end 82 | true 83 | else 84 | false 85 | end 86 | when :tool_result 87 | sid = message.meta&.dig(:session_id) 88 | @by_session[sid][:pending] -= 1 89 | if @by_session[sid][:pending] <= 0 90 | bus.emit VSM::Message.new(kind: :assistant, payload: "done", meta: { session_id: sid }) 91 | end 92 | true 93 | else 94 | false 95 | end 96 | end 97 | end 98 | 99 | # Create fresh capsule for this test 100 | test_capsule = VSM::DSL.define(:demo2) do 101 | identity klass: VSM::Identity, args: { identity: "demo2", invariants: [] } 102 | governance klass: VSM::Governance 103 | coordination klass: VSM::Coordination 104 | intelligence klass: isolated_intelligence 105 | operations do 106 | capsule :slow, klass: slow_tool_class 107 | end 108 | end 109 | 110 | sid = "s2" 111 | results = [] 112 | assistant_received = false 113 | 114 | test_capsule.bus.subscribe do |m| 115 | if [:tool_result, :assistant].include?(m.kind) && m.meta&.dig(:session_id) == sid 116 | results << [m.kind, m.corr_id] unless assistant_received 117 | assistant_received = true if m.kind == :assistant 118 | end 119 | end 120 | 121 | Async do |task| 122 | # Start the capsule loop in background 123 | capsule_task = task.async { test_capsule.run } 124 | 125 | # Give it time to start 126 | task.sleep(0.01) 127 | 128 | t0 = Process.clock_gettime(Process::CLOCK_MONOTONIC) 129 | test_capsule.bus.emit VSM::Message.new(kind: :user, payload: "slow2", meta: { session_id: sid }) 130 | test_capsule.roles[:coordination].wait_for_turn_end(sid) 131 | total = Process.clock_gettime(Process::CLOCK_MONOTONIC) - t0 132 | 133 | # Stop the capsule 134 | capsule_task.stop 135 | 136 | # Just verify we got some tool results and an assistant message 137 | tool_results = results.select { |kind, _| kind == :tool_result } 138 | assistant_msgs = results.select { |kind, _| kind == :assistant } 139 | 140 | expect(tool_results.count).to be >= 2 141 | expect(assistant_msgs.count).to be >= 1 142 | end 143 | end 144 | 145 | it "propagates tool errors but still completes the turn" do 146 | sid = "s3" 147 | seen = [] 148 | capsule.bus.subscribe { |m| seen << m if [:tool_result, :assistant].include?(m.kind) } 149 | 150 | Async do |task| 151 | # Start the capsule loop in background 152 | capsule_task = task.async { capsule.run } 153 | 154 | # Give it time to start 155 | task.sleep(0.01) 156 | 157 | capsule.bus.emit VSM::Message.new(kind: :user, payload: "boom", meta: { session_id: sid }) 158 | capsule.roles[:coordination].wait_for_turn_end(sid) 159 | 160 | # Stop the capsule 161 | capsule_task.stop 162 | 163 | tr = seen.find { |m| m.kind == :tool_result } 164 | expect(tr.payload).to match(/ERROR: RuntimeError: kapow/) 165 | expect(seen.last.kind).to eq(:assistant) 166 | end 167 | end 168 | end 169 | 170 | -------------------------------------------------------------------------------- /lib/vsm/drivers/gemini/async_driver.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "async" 3 | require "net/http" 4 | require "uri" 5 | require "json" 6 | require "securerandom" 7 | 8 | module VSM 9 | module Drivers 10 | module Gemini 11 | class AsyncDriver 12 | def initialize(api_key:, model:, base_url: "https://generativelanguage.googleapis.com/v1beta", streaming: true) 13 | @api_key, @model, @base, @streaming = api_key, model, base_url, streaming 14 | end 15 | 16 | def run!(conversation:, tools:, policy: {}, &emit) 17 | contents = to_gemini_contents(conversation) 18 | fndecls = normalize_gemini_tools(tools) 19 | if @streaming 20 | uri = URI.parse("#{@base}/models/#{@model}:streamGenerateContent?alt=sse&key=#{@api_key}") 21 | headers = { "content-type" => "application/json", "accept" => "text/event-stream" } 22 | body = JSON.dump({ contents: contents, system_instruction: (policy[:system_prompt] && { parts: [{ text: policy[:system_prompt] }], role: "user" }), tools: [{ functionDeclarations: fndecls }] }) 23 | http = Net::HTTP.new(uri.host, uri.port) 24 | http.use_ssl = (uri.scheme == "https") 25 | req = Net::HTTP::Post.new(uri.request_uri) 26 | headers.each { |k,v| req[k] = v } 27 | req.body = body 28 | http.request(req) do |res| 29 | if res.code.to_i != 200 30 | err = +""; res.read_body { |c| err << c } 31 | emit.call(:assistant_final, "Gemini HTTP #{res.code}: #{err.to_s.byteslice(0, 400)}") 32 | next 33 | end 34 | buffer = +""; text = +""; calls = [] 35 | res.read_body do |chunk| 36 | buffer << chunk 37 | while (i = buffer.index("\n")) 38 | line = buffer.slice!(0..i) 39 | line.chomp! 40 | next unless line.start_with?("data:") 41 | data = line.sub("data:","").strip 42 | next if data.empty? || data == "[DONE]" 43 | obj = JSON.parse(data) rescue nil 44 | next unless obj 45 | parts = (obj.dig("candidates",0,"content","parts") || []) 46 | parts.each do |p| 47 | if (t = p["text"]) && !t.empty? 48 | text << t 49 | emit.call(:assistant_delta, t) 50 | end 51 | if (fc = p["functionCall"]) && fc["name"] 52 | calls << { id: SecureRandom.uuid, name: fc["name"], arguments: (fc["args"] || {}) } 53 | end 54 | end 55 | end 56 | end 57 | if calls.any? 58 | emit.call(:tool_calls, calls) 59 | else 60 | emit.call(:assistant_final, text) 61 | end 62 | end 63 | else 64 | uri = URI.parse("#{@base}/models/#{@model}:generateContent?key=#{@api_key}") 65 | headers = { "content-type" => "application/json" } 66 | body = JSON.dump({ contents: contents, system_instruction: (policy[:system_prompt] && { parts: [{ text: policy[:system_prompt] }], role: "user" }), tools: [{ functionDeclarations: fndecls }] }) 67 | http = Net::HTTP.new(uri.host, uri.port) 68 | http.use_ssl = (uri.scheme == "https") 69 | req = Net::HTTP::Post.new(uri.request_uri) 70 | headers.each { |k,v| req[k] = v } 71 | req.body = body 72 | res = http.request(req) 73 | if res.code.to_i != 200 74 | emit.call(:assistant_final, "Gemini HTTP #{res.code}") 75 | else 76 | data = JSON.parse(res.body) rescue {} 77 | parts = (data.dig("candidates",0,"content","parts") || []) 78 | calls = parts.filter_map { |p| fc = p["functionCall"]; fc && { id: SecureRandom.uuid, name: fc["name"], arguments: fc["args"] || {} } } 79 | if calls.any? 80 | emit.call(:tool_calls, calls) 81 | else 82 | text = parts.filter_map { |p| p["text"] }.join 83 | emit.call(:assistant_final, text.to_s) 84 | end 85 | end 86 | end 87 | :done 88 | end 89 | 90 | private 91 | # (no IPv6/IPv4 forcing; rely on default Internet) 92 | def normalize_gemini_tools(tools) 93 | Array(tools).map { |t| normalize_gemini_tool(t) } 94 | end 95 | 96 | def normalize_gemini_tool(t) 97 | return t.to_gemini_tool if t.respond_to?(:to_gemini_tool) 98 | 99 | # Provider-shaped: { name:, description:, parameters: {…} } 100 | if t.is_a?(Hash) && (t[:parameters] || t["parameters"]) 101 | return t 102 | end 103 | 104 | # Neutral hash {name:, description:, schema:} 105 | if t.is_a?(Hash) && (t[:name] || t["name"]) 106 | return { 107 | name: t[:name] || t["name"], 108 | description: t[:description] || t["description"] || "", 109 | parameters: t[:schema] || t["schema"] || {} 110 | } 111 | end 112 | 113 | raise TypeError, "unsupported tool descriptor: #{t.inspect}" 114 | end 115 | 116 | 117 | def to_gemini_contents(neutral) 118 | items = [] 119 | neutral.each do |m| 120 | case m[:role] 121 | when "user" 122 | items << { role: "user", parts: [{ text: m[:content].to_s }] } 123 | when "assistant" 124 | items << { role: "model", parts: [{ text: m[:content].to_s }] } 125 | when "assistant_tool_calls" 126 | # Gemini doesn't need us to echo previous functionCall(s) 127 | # Skip: model will remember its own functionCall 128 | when "tool_result" 129 | # Provide functionResponse so model can continue 130 | name = m[:name] || "tool" 131 | items << { role: "user", parts: [{ functionResponse: { name: name, response: { content: m[:content].to_s } } }] } 132 | end 133 | end 134 | items 135 | end 136 | 137 | def extract_sse_line!(buffer) 138 | if (i = buffer.index("\n")) 139 | line = buffer.slice!(0..i) 140 | line.chomp! 141 | return line 142 | end 143 | nil 144 | end 145 | end 146 | end 147 | end 148 | end 149 | 150 | -------------------------------------------------------------------------------- /lib/vsm/roles/intelligence.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "set" 3 | require_relative "../drivers/family" 4 | 5 | module VSM 6 | # Orchestrates multi-turn LLM chat with native tool-calls: 7 | # - Maintains neutral conversation history per session_id 8 | # - Talks to a provider driver that yields :assistant_delta, :assistant_final, :tool_calls 9 | # - Emits :tool_call to Operations, waits for ALL results, then continues 10 | # 11 | # App authors can subclass and only customize: 12 | # - system_prompt(session_id) -> String 13 | # - offer_tools?(session_id, descriptor) -> true/false (filter tools) 14 | class Intelligence 15 | attr_reader :driver 16 | 17 | def initialize(driver: nil, system_prompt: nil) 18 | @driver = driver 19 | @system_prompt = system_prompt 20 | @sessions = Hash.new { |h,k| h[k] = new_session_state } 21 | end 22 | 23 | def observe(bus); end 24 | 25 | def handle(message, bus:, **) 26 | # If no driver is configured, the base implementation is inert. 27 | # Subclasses can override #handle to implement non-LLM behavior. 28 | return false if @driver.nil? 29 | case message.kind 30 | when :user 31 | sid = message.meta&.dig(:session_id) 32 | st = state(sid) 33 | st[:history] << { role: "user", content: message.payload.to_s } 34 | invoke_model(sid, bus) 35 | true 36 | 37 | when :tool_result 38 | sid = message.meta&.dig(:session_id) 39 | st = state(sid) 40 | # map id -> tool name if we learned it earlier (useful for Gemini) 41 | name = st[:tool_id_to_name][message.corr_id] 42 | 43 | # Debug logging 44 | if ENV["VSM_DEBUG_STREAM"] == "1" 45 | $stderr.puts "Intelligence: Received tool_result for #{name}(#{message.corr_id}): #{message.payload.to_s.slice(0, 100)}" 46 | end 47 | 48 | st[:history] << { role: "tool_result", tool_call_id: message.corr_id, name: name, content: message.payload.to_s } 49 | st[:pending_tool_ids].delete(message.corr_id) 50 | # Only continue once all tool results for this turn arrived: 51 | if st[:pending_tool_ids].empty? 52 | # Re-enter model for the same turn with tool results in history: 53 | invoke_model(sid, bus) 54 | end 55 | true 56 | 57 | else 58 | false 59 | end 60 | end 61 | 62 | # --- Extension points for apps --- 63 | 64 | # Override to compute a dynamic prompt per session 65 | def system_prompt(session_id) 66 | @system_prompt 67 | end 68 | 69 | # Override to filter tools the model may use (by descriptor) 70 | def offer_tools?(session_id, descriptor) 71 | true 72 | end 73 | 74 | private 75 | 76 | def new_session_state 77 | { 78 | history: [], 79 | pending_tool_ids: Set.new, 80 | tool_id_to_name: {}, 81 | inflight: false, 82 | turn_seq: 0 83 | } 84 | end 85 | 86 | def state(sid) = @sessions[sid] 87 | 88 | def invoke_model(session_id, bus) 89 | st = state(session_id) 90 | if st[:inflight] || !st[:pending_tool_ids].empty? 91 | if ENV["VSM_DEBUG_STREAM"] == "1" 92 | $stderr.puts "Intelligence: skip invoke sid=#{session_id} inflight=#{st[:inflight]} pending=#{st[:pending_tool_ids].size}" 93 | end 94 | return 95 | end 96 | st[:inflight] = true 97 | st[:turn_seq] += 1 98 | current_turn_id = st[:turn_seq] 99 | 100 | # Discover tools available from Operations children: 101 | descriptors, name_index = tool_inventory(bus, session_id) 102 | 103 | # Debug logging 104 | if ENV["VSM_DEBUG_STREAM"] == "1" 105 | $stderr.puts "Intelligence: invoke_model sid=#{session_id} inflight=#{st[:inflight]} pending=#{st[:pending_tool_ids].size} turn_seq=#{st[:turn_seq]}" 106 | $stderr.puts "Intelligence: Calling driver with #{st[:history].size} history entries" 107 | st[:history].each_with_index do |h, i| 108 | $stderr.puts " [#{i}] #{h[:role]}: #{h[:role] == 'assistant_tool_calls' ? h[:tool_calls].map{|tc| "#{tc[:name]}(#{tc[:id]})"}.join(', ') : h[:content]&.slice(0, 100)}" 109 | end 110 | end 111 | 112 | task = Async do 113 | begin 114 | @driver.run!( 115 | conversation: st[:history], 116 | tools: descriptors, 117 | policy: { system_prompt: system_prompt(session_id) } 118 | ) do |event, payload| 119 | case event 120 | when :assistant_delta 121 | # optionally buffer based on stream_policy 122 | bus.emit VSM::Message.new(kind: :assistant_delta, payload: payload, meta: { session_id: session_id, turn_id: current_turn_id }) 123 | when :assistant_final 124 | unless payload.to_s.empty? 125 | st[:history] << { role: "assistant", content: payload.to_s } 126 | end 127 | bus.emit VSM::Message.new(kind: :assistant, payload: payload, meta: { session_id: session_id, turn_id: current_turn_id }) 128 | when :tool_calls 129 | st[:history] << { role: "assistant_tool_calls", tool_calls: payload } 130 | st[:pending_tool_ids] = Set.new(payload.map { _1[:id] }) 131 | payload.each { |c| st[:tool_id_to_name][c[:id]] = c[:name] } 132 | if ENV["VSM_DEBUG_STREAM"] == "1" 133 | $stderr.puts "Intelligence: tool_calls count=#{payload.size}; pending now=#{st[:pending_tool_ids].size}" 134 | end 135 | # Allow next invocation (after tools complete) without waiting for driver ensure 136 | st[:inflight] = false 137 | payload.each do |call| 138 | bus.emit VSM::Message.new( 139 | kind: :tool_call, 140 | payload: { tool: call[:name], args: call[:arguments] }, 141 | corr_id: call[:id], 142 | meta: { session_id: session_id, tool: call[:name], turn_id: current_turn_id } 143 | ) 144 | end 145 | end 146 | end 147 | ensure 148 | if ENV["VSM_DEBUG_STREAM"] == "1" 149 | $stderr.puts "Intelligence: driver completed sid=#{session_id}; pending=#{st[:pending_tool_ids].size}; inflight->false" 150 | end 151 | st[:inflight] = false 152 | end 153 | end 154 | st[:task] = task 155 | end 156 | 157 | # Return [descriptors:Array, index Hash{name=>capsule}] 158 | def tool_inventory(bus, session_id) 159 | ops = bus.context[:operations_children] || {} 160 | descriptors = [] 161 | index = {} 162 | ops.each do |name, capsule| 163 | next unless capsule.respond_to?(:tool_descriptor) 164 | desc = capsule.tool_descriptor 165 | next unless offer_tools?(session_id, desc) 166 | descriptors << desc 167 | index[desc.name] = capsule 168 | end 169 | [descriptors, index] 170 | end 171 | end 172 | end 173 | -------------------------------------------------------------------------------- /lib/vsm/drivers/openai/async_driver.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "async" 3 | require "async/http/internet" 4 | require "json" 5 | 6 | module VSM 7 | module Drivers 8 | module OpenAI 9 | class AsyncDriver 10 | def initialize(api_key:, model:, base_url: "https://api.openai.com/v1") 11 | @api_key, @model, @base = api_key, model, base_url 12 | end 13 | 14 | MAX_TOOL_TURNS = 8 15 | 16 | def run!(conversation:, tools:, policy: {}, &emit) 17 | internet = Async::HTTP::Internet.new 18 | begin 19 | headers = { 20 | "Authorization" => "Bearer #{@api_key}", 21 | "Content-Type" => "application/json", 22 | "Accept" => "text/event-stream" 23 | } 24 | 25 | messages = to_openai_messages(conversation, policy[:system_prompt]) 26 | tool_list = normalize_openai_tools(tools) 27 | 28 | req_body = JSON.dump({ 29 | model: @model, 30 | messages: messages, 31 | tools: tool_list, 32 | tool_choice: "auto", 33 | stream: true 34 | }) 35 | 36 | # Debug logging 37 | if ENV["VSM_DEBUG_STREAM"] == "1" 38 | $stderr.puts "openai => messages: #{JSON.pretty_generate(messages)}" 39 | $stderr.puts "openai => tools count: #{tool_list.size}" 40 | end 41 | 42 | response = internet.post("#{@base}/chat/completions", headers, req_body) 43 | 44 | if response.status != 200 45 | body = response.read 46 | warn "openai HTTP #{response.status}: #{body}" 47 | emit.call(:assistant_final, "") 48 | return :done 49 | end 50 | 51 | buffer = +"" 52 | text_buffer = +"" 53 | tc_partial = Hash.new { |h,k| h[k] = { id: nil, name: nil, args_str: +"" } } 54 | 55 | response.body.each do |chunk| 56 | buffer << chunk 57 | while (line = extract_sse_line!(buffer)) 58 | next if line.empty? || line.start_with?(":") 59 | next unless line.start_with?("data:") 60 | data = line.sub("data:","").strip 61 | $stderr.puts("openai <= #{data}") if ENV["VSM_DEBUG_STREAM"] == "1" 62 | next if data == "[DONE]" 63 | 64 | obj = JSON.parse(data) rescue nil 65 | next unless obj 66 | choice = obj.dig("choices",0) || {} 67 | delta = choice["delta"] || {} 68 | 69 | if (content = delta["content"]) 70 | text_buffer << content 71 | emit.call(:assistant_delta, content) 72 | end 73 | 74 | if (tcs = delta["tool_calls"]) 75 | tcs.each do |tc| 76 | idx = tc["index"] || 0 77 | cell = tc_partial[idx] 78 | cell[:id] ||= tc["id"] 79 | fn = tc["function"] || {} 80 | cell[:name] ||= fn["name"] if fn["name"] 81 | cell[:args_str] << (fn["arguments"] || "") 82 | end 83 | end 84 | 85 | if (fr = choice["finish_reason"]) 86 | case fr 87 | when "tool_calls" 88 | calls = tc_partial.keys.sort.map do |i| 89 | cell = tc_partial[i] 90 | { 91 | id: cell[:id] || "call_#{i}", 92 | name: cell[:name] || "unknown_tool", 93 | arguments: safe_json(cell[:args_str]) 94 | } 95 | end 96 | tc_partial.clear 97 | emit.call(:tool_calls, calls) 98 | when "stop", "length", "content_filter" 99 | emit.call(:assistant_final, text_buffer.dup) 100 | text_buffer.clear 101 | end 102 | end 103 | end 104 | end 105 | ensure 106 | internet.close 107 | end 108 | :done 109 | end 110 | 111 | private 112 | def normalize_openai_tools(tools) 113 | Array(tools).map { |t| normalize_openai_tool(t) } 114 | end 115 | 116 | def normalize_openai_tool(t) 117 | # Case 1: our Descriptor object 118 | return t.to_openai_tool if t.respond_to?(:to_openai_tool) 119 | 120 | # Case 2: provider-shaped already (OpenAI tools API) 121 | if (t.is_a?(Hash) && (t[:type] || t["type"])) 122 | return t 123 | end 124 | 125 | # Case 3: neutral hash {name:, description:, schema:} 126 | if t.is_a?(Hash) && (t[:name] || t["name"]) 127 | return { 128 | type: "function", 129 | function: { 130 | name: t[:name] || t["name"], 131 | description: t[:description] || t["description"] || "", 132 | parameters: t[:schema] || t["schema"] || {} 133 | } 134 | } 135 | end 136 | 137 | raise TypeError, "unsupported tool descriptor: #{t.inspect}" 138 | end 139 | 140 | 141 | def to_openai_messages(neutral, system_prompt) 142 | msgs = [] 143 | msgs << { role: "system", content: system_prompt } if system_prompt 144 | neutral.each do |m| 145 | case m[:role] 146 | when "user" 147 | msgs << { role: "user", content: m[:content].to_s } 148 | when "assistant" 149 | msgs << { role: "assistant", content: m[:content].to_s } 150 | when "assistant_tool_calls" 151 | msg = { 152 | role: "assistant", 153 | tool_calls: Array(m[:tool_calls]).map { |c| 154 | { 155 | id: c[:id], 156 | type: "function", 157 | function: { 158 | name: c[:name], 159 | arguments: JSON.dump(c[:arguments] || {}) 160 | } 161 | } 162 | } 163 | } 164 | msgs << msg 165 | if ENV["VSM_DEBUG_STREAM"] == "1" 166 | $stderr.puts "OpenAI: Converting assistant_tool_calls: #{msg[:tool_calls].map{|tc| "#{tc[:function][:name]}(#{tc[:id]})"}.join(', ')}" 167 | end 168 | when "tool_result" 169 | msg = { 170 | role: "tool", 171 | tool_call_id: m[:tool_call_id], 172 | content: m[:content].to_s 173 | } 174 | msgs << msg 175 | if ENV["VSM_DEBUG_STREAM"] == "1" 176 | $stderr.puts "OpenAI: Converting tool_result(#{m[:tool_call_id]}): #{m[:content].to_s.slice(0, 100)}" 177 | end 178 | end 179 | end 180 | msgs 181 | end 182 | 183 | def extract_sse_line!(buffer) 184 | if (i = buffer.index("\n")) 185 | line = buffer.slice!(0..i) 186 | line.chomp! 187 | return line 188 | end 189 | nil 190 | end 191 | 192 | def safe_json(s) 193 | return {} if s.nil? || s.empty? 194 | JSON.parse(s) 195 | rescue JSON::ParserError 196 | { "_raw" => s } 197 | end 198 | end 199 | end 200 | end 201 | end 202 | 203 | -------------------------------------------------------------------------------- /mcp_update.md: -------------------------------------------------------------------------------- 1 | # MCP Integration Plan and Built‑In Ports 2 | 3 | This document proposes minimal, practical support to: 4 | - Expose any VSM capsule as an MCP server over stdio (JSON‑RPC) implementing `tools/list` and `tools/call`. 5 | - Add two reusable ports to VSM: a generic, customizable Chat TTY port and an MCP stdio server port. 6 | - Dynamically reflect tools from external MCP servers and wrap them as VSM tool capsules. 7 | 8 | The design uses Ruby’s dynamic capabilities and integrates cleanly with existing VSM roles (Operations, Intelligence, Governance, etc.). 9 | 10 | ## Scope (Phase 1) 11 | - MCP methods: `tools/list`, `tools/call` only. 12 | - Transport: JSON‑RPC over stdio (NDJSON framing to start; can evolve to LSP framing without API changes). 13 | - No additional MCP features (Prompts/Resources/Logs) in this phase. 14 | 15 | ## Components 16 | - `VSM::Ports::ChatTTY` — Generic, customizable chat terminal port. 17 | - `VSM::Ports::MCP::ServerStdio` — MCP server over stdio exposing capsule tools. 18 | - `VSM::MCP::Client` — Thin stdio JSON‑RPC client for MCP reflection and calls. 19 | - `VSM::MCP::RemoteToolCapsule` — Wraps a remote MCP tool as a local VSM `ToolCapsule`. 20 | - `VSM::DSL::ChildrenBuilder#mcp_server` — Reflect and register remote tools with include/exclude/prefix controls. 21 | - (Tiny core tweak) Inject `bus` into children that accept `bus=` to allow rich observability from wrappers. 22 | 23 | ## Design Overview 24 | - Client reflection: spawn an MCP server process, call `tools/list`, build `RemoteToolCapsule`s per tool, and add them as children. Include/exclude/prefix options shape the local tool namespace. 25 | - Server exposure: reflect local tools via `tools/list`; on `tools/call`, emit a normal VSM `:tool_call` and await matching `:tool_result` (corr_id = JSON‑RPC id) to reply. 26 | - Operations routing: unchanged for Phase 1. Reflected tools register as regular children; prefixing avoids collisions. (Optional namespacing router can be added later.) 27 | - Observability: all bridges emit events into the bus so Lens shows clear lanes (client: `[:mcp, :client, server, tool]`; server: `[:mcp, :server, tool]`). 28 | - Coexistence: ChatTTY targets the user’s real TTY (e.g., `IO.console`) or stderr; MCP server uses stdio exclusively. They can run together without interfering. 29 | 30 | ## File Layout (proposed) 31 | - `lib/vsm/ports/chat_tty.rb` 32 | - `lib/vsm/ports/mcp/server_stdio.rb` 33 | - `lib/vsm/mcp/jsonrpc.rb` (shared stdio JSON‑RPC util) 34 | - `lib/vsm/mcp/client.rb` 35 | - `lib/vsm/mcp/remote_tool_capsule.rb` 36 | - `lib/vsm/dsl_mcp.rb` (adds `mcp_server` to the DSL ChildrenBuilder) 37 | - (Core) optional `bus` injection in `lib/vsm/capsule.rb` 38 | 39 | ## APIs and Usage 40 | 41 | ### Expose a Capsule via CLI and MCP (simultaneously) 42 | ```ruby 43 | require "vsm" 44 | require "vsm/ports/chat_tty" 45 | require "vsm/ports/mcp/server_stdio" 46 | 47 | cap = VSM::DSL.define(:demo) do 48 | identity klass: VSM::Identity, args: { identity: "demo", invariants: [] } 49 | governance klass: VSM::Governance 50 | coordination klass: VSM::Coordination 51 | intelligence klass: VSM::Intelligence, args: { driver: VSM::Drivers::OpenAI::AsyncDriver.new(api_key: ENV["OPENAI_API_KEY"], model: "gpt-4o-mini") } 52 | monitoring klass: VSM::Monitoring 53 | operations do 54 | # local tools … 55 | end 56 | end 57 | 58 | ports = [ 59 | VSM::Ports::MCP::ServerStdio.new(capsule: cap), # machine IO (stdio) 60 | VSM::Ports::ChatTTY.new(capsule: cap) # human IO (TTY/console) 61 | ] 62 | VSM::Runtime.start(cap, ports: ports) 63 | ``` 64 | 65 | ### Mount a Remote MCP Server (dynamic reflection) 66 | ```ruby 67 | require "vsm" 68 | require "vsm/dsl_mcp" 69 | 70 | cap = VSM::DSL.define(:with_remote) do 71 | identity klass: VSM::Identity, args: { identity: "with_remote", invariants: [] } 72 | governance klass: VSM::Governance 73 | coordination klass: VSM::Coordination 74 | intelligence klass: VSM::Intelligence, args: { driver: VSM::Drivers::Anthropic::AsyncDriver.new(api_key: ENV["ANTHROPIC_API_KEY"], model: "claude-sonnet-4.0") } 75 | monitoring klass: VSM::Monitoring 76 | operations do 77 | mcp_server :smith, cmd: "smith-server --stdio", include: %w[search read], prefix: "smith_", env: { "SMITH_TOKEN" => ENV["SMITH_TOKEN"] } 78 | end 79 | end 80 | 81 | VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap)]) 82 | ``` 83 | 84 | ### Filter Tools Offered to the Model (optional) 85 | ```ruby 86 | class GuardedIntel < VSM::Intelligence 87 | def offer_tools?(sid, descriptor) 88 | descriptor.name.start_with?("smith_") # only offer smith_* tools 89 | end 90 | end 91 | ``` 92 | 93 | ## Customization (ChatTTY) 94 | - Constructor options: `input:`, `output:`, `banner:`, `prompt:`, `theme:`. 95 | - Defaults: reads/writes to `IO.console` if available; otherwise reads are disabled and output goes to a safe stream (stderr/console). Never interferes with MCP stdio. 96 | - Override points: subclass and override `banner(io)` and/or `render_out(message)` while reusing the main input loop. 97 | 98 | Example (options only): 99 | ```ruby 100 | tty = VSM::Ports::ChatTTY.new( 101 | capsule: cap, 102 | banner: ->(io) { io.puts "\e[96mMy App\e[0m — welcome!" }, 103 | prompt: "Me> " 104 | ) 105 | ``` 106 | 107 | Example (subclass): 108 | ```ruby 109 | class FancyTTY < VSM::Ports::ChatTTY 110 | def banner(io) 111 | io.puts "\e[95m\n ███ MY APP ███\n\e[0m" 112 | end 113 | def render_out(m) 114 | super 115 | @out.puts("\e[92m✓ #{m.payload.to_s.slice(0,200)}\e[0m") if m.kind == :tool_result 116 | end 117 | end 118 | ``` 119 | 120 | ## Coexistence and IO Routing 121 | - MCP stdio server: reads `$stdin`, writes `$stdout` with strict JSON (one message per line). No TTY assumptions. 122 | - ChatTTY: prefers `IO.console` for both input and output; falls back to `$stderr` for output and disables input if no TTY is present. 123 | - Result: Both ports can run in the same process without corrupting MCP stdio. 124 | 125 | ## Observability 126 | - Client wrapper emits `:progress`/`:audit` with `path: [:mcp, :client, server, tool]` around calls. 127 | - Server port emits `:audit` and wraps `tools/call` into standard `:tool_call`/`:tool_result` with `corr_id` mirrored to JSON‑RPC id. 128 | - Lens will show clear lanes and full payloads, subject to Governance redaction (if any). 129 | 130 | ## Governance and Operations 131 | - Operations: unchanged; executes capsules for `:tool_call` and emits `:tool_result`. 132 | - Governance: gate by name/prefix/regex; apply timeouts/rate limits/confirmations; redact args/results in Lens if needed. 133 | - Execution mode: remote wrappers default to `:thread` to avoid blocking the reactor on stdio I/O. 134 | 135 | ## Configuration and Authentication 136 | - Default via ENV (e.g., tokens/keys). Per‑mount overrides available through `mcp_server env: { … }`. 137 | - CLI flags can be introduced later in a helper script if needed. 138 | 139 | ## Backward Compatibility 140 | - No changes to `airb`. `VSM::Ports::ChatTTY` is a reusable, minimal alternative for new apps. 141 | 142 | ## Future Extensions (not in Phase 1) 143 | - Namespaced mounts (`smith.search`) with a tiny router enhancement in Operations. 144 | - Code generation flow (`vsm mcp import …`) to create durable wrappers. 145 | - Additional MCP features (prompts/resources/logs) and WebSocket transport. 146 | - Web interaction port: HTTP chat with customizable UI surfaces. 147 | 148 | ## Milestones 149 | 1) Implement ports and client/wrapper (files above), plus optional `bus` injection. 150 | 2) Add small README/usage and example snippet in `examples/`. 151 | 3) Manual tests: 152 | - Start capsule with both ChatTTY and MCP ports; verify no IO collision. 153 | - Reflect a known MCP server; verify tool listing and calls. 154 | - Lens shows client/server lanes with corr_id continuity. 155 | 4) Optional: DSL include/exclude/prefix validation and guardrails. 156 | 157 | ## Acceptance Criteria 158 | - Starting a capsule with `VSM::Ports::MCP::ServerStdio` exposes working `tools/list` and `tools/call` on stdio. 159 | - Starting a capsule with `VSM::Ports::ChatTTY` provides a working chat loop; banner/prompt are overridable without re‑implementing the loop. 160 | - Running both ports concurrently does not corrupt MCP stdio. 161 | - Reflecting a remote MCP server via `mcp_server` registers local tool capsules that work with `Intelligence` tool‑calling. 162 | - Lens displays meaningful events for both client and server paths. 163 | -------------------------------------------------------------------------------- /lib/vsm/lens/server.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "rack" 3 | require "rack/utils" 4 | 5 | module VSM 6 | module Lens 7 | class Server 8 | INDEX_HTML = <<~HTML 9 | 10 | 11 | 12 | 13 | 14 | VSM Lens 15 | 37 | 38 | 39 |
VSM Lens live
40 |
41 | 50 |
51 |

Timeline

52 |
53 |
54 |
55 | 112 | 113 | 114 | HTML 115 | 116 | def initialize(hub:, token: nil, stats: nil) 117 | @hub, @token, @stats = hub, token, stats 118 | end 119 | 120 | def rack_app 121 | hub = @hub 122 | token = @token 123 | stats = @stats 124 | Rack::Builder.new do 125 | use Rack::ContentLength 126 | 127 | map "/" do 128 | run proc { |_env| [200, {"Content-Type"=>"text/html; charset=utf-8"}, [Server::INDEX_HTML]] } 129 | end 130 | 131 | map "/events" do 132 | run proc { |env| 133 | req = Rack::Request.new(env) 134 | if token && req.params["token"] != token 135 | [401, {"Content-Type"=>"text/plain"}, ["unauthorized"]] 136 | else 137 | queue, snapshot = hub.subscribe 138 | headers = {"Content-Type"=>"text/event-stream", "Cache-Control"=>"no-cache", "Connection"=>"keep-alive"} 139 | body = SSEBody.new(hub, queue, snapshot) 140 | [200, headers, body] 141 | end 142 | } 143 | end 144 | 145 | map "/state" do 146 | run proc { |env| 147 | req = Rack::Request.new(env) 148 | if token && req.params["token"] != token 149 | [401, {"Content-Type"=>"application/json"}, [JSON.dump({error: "unauthorized"})]] 150 | else 151 | payload = stats ? stats.state : { error: "stats_unavailable" } 152 | [200, {"Content-Type"=>"application/json"}, [JSON.dump(payload)]] 153 | end 154 | } 155 | end 156 | end 157 | end 158 | end 159 | 160 | class SSEBody 161 | def initialize(hub, queue, snapshot) 162 | @hub, @queue, @snapshot = hub, queue, snapshot 163 | @heartbeat = true 164 | end 165 | 166 | def each 167 | # Send snapshot first 168 | @snapshot.each { |ev| yield "data: #{JSON.generate(ev)}\n\n" } 169 | # Heartbeat thread to keep connections alive 170 | hb = Thread.new do 171 | while @heartbeat 172 | sleep 15 173 | yield ": ping\n\n" # SSE comment line 174 | end 175 | end 176 | # Stream live events 177 | loop do 178 | ev = @queue.pop 179 | yield "data: #{JSON.generate(ev)}\n\n" 180 | end 181 | ensure 182 | @heartbeat = false 183 | @hub.unsubscribe(@queue) rescue nil 184 | hb.kill if hb&.alive? 185 | end 186 | end 187 | end 188 | end 189 | -------------------------------------------------------------------------------- /lib/vsm/drivers/anthropic/async_driver.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | require "json" 3 | require "net/http" 4 | require "uri" 5 | require "securerandom" 6 | 7 | module VSM 8 | module Drivers 9 | module Anthropic 10 | class AsyncDriver 11 | def initialize(api_key:, model:, base_url: "https://api.anthropic.com/v1", version: "2023-06-01") 12 | @api_key, @model, @base, @version = api_key, model, base_url, version 13 | end 14 | 15 | def run!(conversation:, tools:, policy: {}, &emit) 16 | # Always use Net::HTTP with SSE 17 | emitted_terminal = false 18 | 19 | headers = { 20 | "x-api-key" => @api_key, 21 | "anthropic-version" => @version, 22 | "content-type" => "application/json", 23 | "accept" => "text/event-stream" 24 | } 25 | 26 | messages = to_anthropic_messages(conversation, policy[:system_prompt]) 27 | tool_list = normalize_anthropic_tools(tools) 28 | payload = { 29 | model: @model, 30 | system: policy[:system_prompt], 31 | messages: messages, 32 | max_tokens: 512, 33 | stream: true 34 | } 35 | if tool_list.any? 36 | payload[:tools] = tool_list 37 | payload[:tool_choice] = { type: "auto" } 38 | end 39 | body = JSON.dump(payload) 40 | 41 | url = URI.parse("#{@base}/messages") 42 | http = Net::HTTP.new(url.host, url.port) 43 | http.use_ssl = (url.scheme == "https") 44 | http.read_timeout = 120 45 | 46 | req = Net::HTTP::Post.new(url.request_uri) 47 | headers.each { |k,v| req[k] = v } 48 | req.body = body 49 | 50 | res = http.request(req) do |response| 51 | ct = response["content-type"] 52 | if response.code.to_i != 200 53 | err_body = +"" 54 | response.read_body { |chunk| err_body << chunk } 55 | preview = err_body.to_s.byteslice(0, 400) 56 | emit.call(:assistant_final, "Anthropic HTTP #{response.code}: #{preview}") 57 | emitted_terminal = true 58 | next 59 | end 60 | 61 | if ct && ct.include?("text/event-stream") 62 | buffer = +"" 63 | textbuf = +"" 64 | toolbuf = {} 65 | tool_calls = [] 66 | 67 | response.read_body do |chunk| 68 | buffer << chunk 69 | while (i = buffer.index("\n")) 70 | line = buffer.slice!(0..i) 71 | line.chomp! 72 | next unless line.start_with?("data:") 73 | data = line.sub("data:","").strip 74 | next if data.empty? || data == "[DONE]" 75 | obj = JSON.parse(data) rescue nil 76 | next unless obj 77 | ev = obj["type"].to_s 78 | if ENV["VSM_DEBUG_STREAM"] == "1" 79 | $stderr.puts "anthropic(nethttp) <= #{ev}: #{data.byteslice(0, 160)}" 80 | end 81 | 82 | case ev 83 | when "content_block_delta" 84 | idx = obj["index"]; delta = obj["delta"] || {} 85 | case delta["type"] 86 | when "text_delta" 87 | part = delta["text"].to_s 88 | textbuf << part 89 | emit.call(:assistant_delta, part) 90 | when "input_json_delta" 91 | toolbuf[idx] ||= { id: nil, name: nil, json: +"" } 92 | toolbuf[idx][:json] << (delta["partial_json"] || "") 93 | end 94 | when "content_block_start" 95 | # For anthropic, the key can be 'content' or 'content_block' 96 | c = obj["content"] || obj["content_block"] || {} 97 | if c["type"] == "tool_use" 98 | name = c["name"] || obj["name"] 99 | toolbuf[obj["index"]] = { id: c["id"], name: name, json: +"" } 100 | end 101 | when "content_block_stop" 102 | idx = obj["index"] 103 | if tb = toolbuf[idx] 104 | args = tb[:json].empty? ? {} : (JSON.parse(tb[:json]) rescue {"_raw"=>tb[:json]}) 105 | # Only enqueue if name is present 106 | if tb[:name].to_s.strip != "" && tb[:id] 107 | tool_calls << { id: tb[:id], name: tb[:name], arguments: args } 108 | end 109 | end 110 | when "message_stop" 111 | if tool_calls.any? 112 | emit.call(:tool_calls, tool_calls) 113 | else 114 | emit.call(:assistant_final, textbuf.dup) 115 | end 116 | emitted_terminal = true 117 | end 118 | end 119 | end 120 | 121 | unless emitted_terminal 122 | # If the stream closed without a terminal, emit final text 123 | emit.call(:assistant_final, textbuf) 124 | emitted_terminal = true 125 | end 126 | else 127 | # Non-streaming JSON 128 | data = "" 129 | response.read_body { |chunk| data << chunk } 130 | obj = JSON.parse(data) rescue {} 131 | parts = Array(obj.dig("content")) 132 | calls = [] 133 | text = +"" 134 | parts.each do |p| 135 | case p["type"] 136 | when "text" then text << p["text"].to_s 137 | when "tool_use" then calls << { id: p["id"] || SecureRandom.uuid, name: p["name"], arguments: p["input"] || {} } 138 | end 139 | end 140 | if calls.any? 141 | emit.call(:tool_calls, calls) 142 | else 143 | emit.call(:assistant_final, text) 144 | end 145 | emitted_terminal = true 146 | end 147 | end 148 | 149 | :done 150 | end 151 | 152 | private 153 | # (no IPv6/IPv4 forcing; rely on default Internet) 154 | def normalize_anthropic_tools(tools) 155 | Array(tools).map { |t| normalize_anthropic_tool(t) } 156 | end 157 | 158 | def normalize_anthropic_tool(t) 159 | return t.to_anthropic_tool if t.respond_to?(:to_anthropic_tool) 160 | 161 | # Provider-shaped: {name:, description:, input_schema: {…}} 162 | if t.is_a?(Hash) && (t[:input_schema] || t["input_schema"]) 163 | return t 164 | end 165 | 166 | # Neutral hash {name:, description:, schema:} 167 | if t.is_a?(Hash) && (t[:name] || t["name"]) 168 | return { 169 | name: t[:name] || t["name"], 170 | description: t[:description] || t["description"] || "", 171 | input_schema: t[:schema] || t["schema"] || {} 172 | } 173 | end 174 | 175 | raise TypeError, "unsupported tool descriptor: #{t.inspect}" 176 | end 177 | 178 | 179 | def to_anthropic_messages(neutral, _system) 180 | # Build content blocks per message; keep ordering 181 | neutral.map do |m| 182 | case m[:role] 183 | when "user" 184 | { role: "user", content: [{ type: "text", text: m[:content].to_s }] } 185 | when "assistant" 186 | { role: "assistant", content: [{ type: "text", text: m[:content].to_s }] } 187 | when "assistant_tool_calls" 188 | blocks = Array(m[:tool_calls]).map { |c| 189 | { type: "tool_use", id: c[:id], name: c[:name], input: c[:arguments] || {} } 190 | } 191 | { role: "assistant", content: blocks } 192 | when "tool_result" 193 | { role: "user", content: [{ type: "tool_result", tool_use_id: m[:tool_call_id], content: m[:content].to_s }] } 194 | end 195 | end.compact 196 | end 197 | 198 | def extract_sse_line!(buffer) 199 | if (i = buffer.index("\n")) 200 | line = buffer.slice!(0..i) 201 | line.chomp! 202 | return line 203 | end 204 | nil 205 | end 206 | end 207 | end 208 | end 209 | end 210 | 211 | -------------------------------------------------------------------------------- /llms.txt: -------------------------------------------------------------------------------- 1 | 2 | LLMS CONTEXT FILE — VSM (Viable Systems for Agents) GEM 3 | ======================================================= 4 | 5 | Audience: Large Language Models (LLMs) acting as coding assistants and human contributors. 6 | Goal: Provide all key context to safely modify, extend, and use the VSM gem as a framework for agentic CLIs (e.g., `airb`). 7 | 8 | — TL;DR — 9 | - VSM is a small, async, message-driven runtime for building recursive “capsules” that each contain five named systems: 10 | Operations, Coordination, Intelligence, Governance, Identity. Monitoring (observability) is also provided. 11 | - Tools are implemented as *capsules* that *opt-in* to a tool interface (`ActsAsTool`) with JSON Schema descriptors. 12 | - The runtime is fiber-based (`async` gem). Tool execution can run in parallel via executors (`:fiber`, `:thread`, optional `:ractor`/`:subprocess` later). 13 | - Intelligence integrates provider drivers (OpenAI, Anthropic, Gemini) that support *structured tool calls* and (for OpenAI/Anthropic) *streaming*. 14 | - A built-in “Lens” web visualizer (SSE) streams live events from the bus; enable with one call. 15 | - Use the DSL to assemble a top-level capsule (the “organism”) and optional sub-capsules (recursive sub-agents or tools). 16 | 17 | -------------------------------------------------------------------------------- 18 | 1) REPO LAYOUT (EXPECTED FILES) 19 | -------------------------------------------------------------------------------- 20 | lib/ 21 | vsm.rb # top-level requires 22 | vsm/message.rb # Message struct 23 | vsm/async_channel.rb # async bus (fibers) 24 | vsm/homeostat.rb # budgets/alerts (minimal) 25 | vsm/observability/ledger.rb # Monitoring role: JSONL event ledger 26 | vsm/roles/ 27 | operations.rb # runs tools (capsules) via executors 28 | coordination.rb # scheduling, floor control, turn end 29 | intelligence.rb # abstract; apps subclass this 30 | governance.rb # policy/budgets/confirmation hooks 31 | identity.rb # invariants/escalation 32 | vsm/tool/ 33 | descriptor.rb # name/description/schema → provider shapes 34 | acts_as_tool.rb # mixin to mark capsules as tools 35 | capsule.rb # base tool capsule (implements #run) 36 | vsm/executors/ 37 | fiber_executor.rb # default (IO-bound) 38 | thread_executor.rb # CPU-ish or blocking libs 39 | # (optional) ractor_executor.rb / subprocess_executor.rb 40 | vsm/capsule.rb # Capsule: wires systems, async run loop 41 | vsm/dsl.rb # DSL for composing organisms & children 42 | vsm/port.rb # adapter base (CLI/TUI/HTTP/etc.) 43 | vsm/runtime.rb # boot helper: start capsule + ports 44 | vsm/drivers/ 45 | family.rb # returns :openai | :anthropic | :gemini 46 | openai/async_driver.rb # SSE streaming + tools 47 | anthropic/async_driver.rb # SSE streaming + tool_use blocks 48 | gemini/async_driver.rb # non-streaming MVP + functionCall 49 | vsm/lens.rb # Lens.attach!(capsule, ...) 50 | vsm/lens/event_hub.rb # ring buffer + fan-out 51 | vsm/lens/server.rb # Rack app + SSE + tiny HTML 52 | 53 | spec/ # RSpec tests (smoke + providers + routing) 54 | examples/ # small runnable demos 55 | 56 | 57 | -------------------------------------------------------------------------------- 58 | 2) DESIGN GOALS 59 | -------------------------------------------------------------------------------- 60 | - Small surface, idiomatic Ruby (SOLID/POODR), high cohesion/low coupling. 61 | - Recursion-by-default: a Capsule can contain child Capsules. 62 | - First-class asynchrony: non-blocking I/O; parallel tool calls where safe. 63 | - Provider-agnostic Intelligence with a stable, minimal event API: 64 | :assistant_delta, :assistant_final, :tool_calls. 65 | - Observability out-of-the-box: JSONL ledger + SSE Lens. 66 | - Safety hooks live in Governance (path sandbox, confirmations, budgets). 67 | 68 | 69 | -------------------------------------------------------------------------------- 70 | 3) MESSAGE MODEL 71 | -------------------------------------------------------------------------------- 72 | Struct: VSM::Message.new( 73 | kind:, # Symbol — :user, :assistant_delta, :assistant, :tool_call, :tool_result, :plan, :policy, :audit, :confirm_request, :confirm_response, :progress 74 | payload:, # Object/String — event-specific data (small where possible) 75 | path:, # Array — addressing (e.g., [:airb, :operations, :read_file]) 76 | corr_id:, # String — correlates tool_call <-> tool_result 77 | meta: # Hash — session_id, tool name, budgets, severity, etc. 78 | ) 79 | 80 | Guidance for LLMs: 81 | - Always set meta.session_id for multi-turn sessions. 82 | - When emitting tool events, fill meta.tool and corr_id for pairing. 83 | - Keep payload compact; include previews for large data (full bodies may be written to disk separately if needed). 84 | 85 | 86 | -------------------------------------------------------------------------------- 87 | 4) CAPSULE + SYSTEMS (THE “ORG” SPINE) 88 | -------------------------------------------------------------------------------- 89 | Capsule contains: 90 | - bus: AsyncChannel (fiber-friendly queue + subscribers) 91 | - homeostat: budgets/alerts (minimal for MVP) 92 | - roles: five named systems 93 | - Operations: runs tools, dispatches to child tool-capsules 94 | - Coordination: schedules messages; grants floor per session; turn end 95 | - Intelligence: orchestrates LLM driver; streams deltas; emits tool_calls 96 | - Governance: policy gates (sandboxing, confirmation, budgets) 97 | - Identity: invariants, escalation to owner/user 98 | - children: Hash of name → child capsule (often tool capsules) 99 | 100 | Dispatch order (typical): Operations → Intelligence → Identity 101 | (Operations consumes :tool_call; Intelligence consumes :user/:tool_result; Identity handles policy updates/broadcasts.) 102 | 103 | 104 | -------------------------------------------------------------------------------- 105 | 5) ASYNCHRONY & PARALLELISM 106 | -------------------------------------------------------------------------------- 107 | - The bus is fiber-based (`async` gem). Capsule.run loops on bus.pop and lets Coordination drain/schedule messages. 108 | - Operations executes each tool_call concurrently via an Executor: 109 | - :fiber (default) for IO-aware code 110 | - :thread for brief CPU spikes or blocking C libs 111 | - (:ractor/:subprocess) later for isolation or heavy CPU 112 | - Use Async::Semaphore to cap per-tool concurrency if needed. 113 | - Coordination.wait_for_turn_end(session_id) enables CLI ports to block until the assistant final is emitted for the turn. 114 | 115 | 116 | -------------------------------------------------------------------------------- 117 | 6) TOOLS AS CAPSULES 118 | -------------------------------------------------------------------------------- 119 | Implement a tool by subclassing VSM::ToolCapsule and including ActsAsTool: 120 | 121 | class MyTool < VSM::ToolCapsule 122 | tool_name "search_repo" 123 | tool_description "Search codebase by regex" 124 | tool_schema({ 125 | type: "object", 126 | properties: { pattern: { type: "string" }, path: { type: "string" } }, 127 | required: ["pattern"] 128 | }) 129 | 130 | def execution_mode = :thread # optional; defaults to :fiber 131 | def run(args) 132 | # return a String or small JSON-compatible object 133 | end 134 | end 135 | 136 | Notes: 137 | - The JSON Schema should be compact and valid for OpenAI/Anthropic/Gemini. 138 | - Governance is injected into tool capsules (if they expose #governance=), allowing helpers like safe_path(). 139 | - Keep results small. For big outputs, summarize or write artifacts to files and return a reference. 140 | 141 | 142 | -------------------------------------------------------------------------------- 143 | 7) INTELLIGENCE & PROVIDER DRIVERS 144 | -------------------------------------------------------------------------------- 145 | - Intelligence subclasses handle per-session conversation history and call a driver’s run!(conversation:, tools:, policy:) which yields three events: 146 | - [:assistant_delta, String] — stream partial text 147 | - [:assistant_final, String] — final text for the turn (may be empty if only tool calls) 148 | - [:tool_calls, Array] — each { id:, name:, arguments: Hash } 149 | - Conversation messages passed into drivers are hashes like: 150 | { role: "system"|"user"|"assistant"|"tool", content: String, tool_call_id?: String } 151 | 152 | Providers: 153 | - OpenAI::DriverAsync — SSE streaming; tools in choices[].delta.tool_calls; pass tools via `[{type:"function", function:{name, description, parameters}}]`. 154 | - Anthropic::DriverAsync — SSE streaming; `tool_use` content blocks with `input_json_delta` fragments; pass tools via `[{name, description, input_schema}]`; tool_result fed back as a user content block `{type:"tool_result", tool_use_id, content}`. 155 | - Gemini::DriverAsync — non-streaming MVP; declare tools via `function_declarations`; receive `functionCall`; reply next turn with `functionResponse`. 156 | 157 | Driver selection: 158 | - VSM::Intelligence::DriverFamily.of(@driver) → :openai | :anthropic | :gemini. 159 | - Build provider-specific tool arrays from VSM::Tool::Descriptor: 160 | - to_openai_tool, to_anthropic_tool, to_gemini_tool. 161 | 162 | Important rules for LLMs modifying Intelligence: 163 | - Do not parse tool calls from free-form text. Always use structured tool-calling outputs from drivers. 164 | - Maintain conversation faithfully; append assistant/tool messages exactly as providers expect. 165 | - Always emit :assistant_delta before :assistant when streaming text. 166 | 167 | 168 | -------------------------------------------------------------------------------- 169 | 8) GOVERNANCE & IDENTITY 170 | -------------------------------------------------------------------------------- 171 | - Governance.enforce(message) wraps routing. Add sandboxing, diff previews, confirmations, budgets, and timeouts here. 172 | - Emit :confirm_request when needed; the Port must collect a :confirm_response. 173 | - Identity holds invariants (e.g., “stay in workspace”), escalates algedonic alerts (homeostat.alarm?). 174 | 175 | LLM policy changes should emit a :policy message that Identity can broadcast to children if necessary. 176 | 177 | 178 | -------------------------------------------------------------------------------- 179 | 9) PORTS (INTERFACES) 180 | -------------------------------------------------------------------------------- 181 | - Base: VSM::Port with #ingress(event) and #render_out(message). 182 | - ChatTTY port: reads stdin lines, emits :user with meta.session_id; renders :assistant_delta (stream) and :assistant (final), handles :confirm_request → :confirm_response. 183 | - Other ports: CommandTTY (one-shot task), HTTP, WebSocket, MCP client/server ports (planned), TUI. 184 | 185 | Guidance for LLMs: 186 | - Keep ports thin. No policy or LLM logic in ports. 187 | - Always pass session_id; grant floor in Coordination for deterministic streaming. 188 | 189 | 190 | -------------------------------------------------------------------------------- 191 | 10) OBSERVABILITY (MONITORING + LENS) 192 | -------------------------------------------------------------------------------- 193 | - Monitoring subscribes to the bus and appends JSONL to .vsm.log.jsonl. 194 | - Lens is a tiny Rack app serving an SSE `/events` feed with a simple HTML viewer. 195 | - Enable in apps: 196 | VSM::Lens.attach!(capsule, host: "127.0.0.1", port: 9292, token: ENV["VSM_LENS_TOKEN"]) 197 | 198 | Lens best practices: 199 | - Include meta.session_id, path, corr_id, and meta.tool on events. 200 | - Keep payload small to avoid UI lag; the server already truncates strings. 201 | - For multi-process swarms, add a RemotePublisher that forwards events to one hub (future). 202 | 203 | 204 | -------------------------------------------------------------------------------- 205 | 11) DSL FOR ASSEMBLY 206 | -------------------------------------------------------------------------------- 207 | Example organism: 208 | 209 | org = VSM::DSL.define(:airb) do 210 | identity klass: MyIdentity, args: { identity: "airb", invariants: ["stay in workspace"] } 211 | governance klass: MyGovernance, args: { workspace_root: Dir.pwd } 212 | coordination klass: VSM::Coordination 213 | intelligence klass: MyIntelligence, args: { driver: my_driver } 214 | monitoring klass: VSM::Monitoring 215 | 216 | operations do 217 | capsule :list_files, klass: Tools::ListFiles 218 | capsule :read_file, klass: Tools::ReadFile 219 | capsule :edit_file, klass: Tools::EditFile 220 | # capsule :editor, klass: Capsules::Editor (full sub-agent capsule) 221 | end 222 | end 223 | 224 | Start: 225 | VSM::Runtime.start(org, ports: [ChatTTY.new(capsule: org)]) 226 | 227 | 228 | -------------------------------------------------------------------------------- 229 | 12) PROVIDER CONFIG (ENV VARS) 230 | -------------------------------------------------------------------------------- 231 | - OPENAI_API_KEY, AIRB_MODEL (e.g., "gpt-4o-mini") 232 | - ANTHROPIC_API_KEY, AIRB_MODEL (e.g., "claude-3-5-sonnet-latest") 233 | - GEMINI_API_KEY, AIRB_MODEL (e.g., "gemini-2.0-flash-001") 234 | - AIRB_PROVIDER = openai | anthropic | gemini 235 | - VSM_LENS=1, VSM_LENS_PORT=9292, VSM_LENS_TOKEN=... 236 | 237 | 238 | -------------------------------------------------------------------------------- 239 | 13) CODING STANDARDS (FOR LLM CHANGES) 240 | -------------------------------------------------------------------------------- 241 | - Idiomatic Ruby, small objects, SRP. Keep classes under ~150 LOC when possible. 242 | - Favor explicit dependencies via initializer args. 243 | - Avoid global mutable state. If you add caches, use per-capsule fields. 244 | - Don’t block fibers: for I/O use async-http; for CPU spikes switch to thread executor. 245 | - Tests for every new adapter/driver parser with fixtures; route tests for message sequencing. 246 | - Prefer incremental diffs (unified patches) with file paths and clear commit titles: 247 | - Title: : (e.g., "intelligence/openai: handle empty delta lines") 248 | - Body: “Why”, “What changed”, “Tests”. 249 | 250 | 251 | -------------------------------------------------------------------------------- 252 | 14) TESTING (MINIMUM BASELINE) 253 | -------------------------------------------------------------------------------- 254 | - Routing smoke test: :tool_call → :tool_result → :assistant 255 | - Provider parsing tests: 256 | - OpenAI SSE fixture → emits deltas + final + tool_calls 257 | - Anthropic SSE fixture with tool_use/input_json_delta → emits tool_calls + final 258 | - Gemini functionCall fixture → emits tool_calls or final text 259 | - Governance tests: sandbox rejects path traversal; confirm flow produces :confirm_request 260 | - Concurrency tests: parallel tool calls produce paired results (different corr_id), no interleaved confusion in Coordination 261 | 262 | 263 | -------------------------------------------------------------------------------- 264 | 15) EXTENDING THE FRAMEWORK 265 | -------------------------------------------------------------------------------- 266 | A) Add a new tool capsule 267 | - Create class < VSM::ToolCapsule 268 | - Declare name/description/schema; implement #run; optional #execution_mode 269 | - Register in operations DSL 270 | 271 | B) Add a sub-agent capsule 272 | - Provide its own Operations/Coordination/Intelligence/Governance/Identity (recursive) 273 | - Optionally include ActsAsTool and expose itself as a parent tool (its #run orchestrates internal steps and returns a string) 274 | 275 | C) Add a provider 276 | - Place a new driver_* under lib/vsm/intelligence// 277 | - Yield the same three events (:assistant_delta, :assistant_final, :tool_calls) 278 | - Add a descriptor conversion if provider needs a special tool shape 279 | - Update DriverFamily.of to map the class → symbol 280 | 281 | D) Add MCP support (future plan) 282 | - Implement Ports::MCP::Server to expose tools via MCP spec 283 | - Implement Ports::MCP::Client to consume external MCP tools and wrap as tool capsules 284 | 285 | 286 | -------------------------------------------------------------------------------- 287 | 16) SAFETY & SECURITY 288 | -------------------------------------------------------------------------------- 289 | - Never write outside the workspace. Use Governance.safe_path() in tools. 290 | - Confirm risky writes with :confirm_request → :confirm_response. 291 | - Add timeouts on tool calls and LLM calls (budget via Homeostat or Governance). 292 | - Use semaphores to cap concurrency per tool to avoid resource exhaustion. 293 | - Do not log secrets. Mask API keys and sensitive args before emitting events. 294 | 295 | 296 | -------------------------------------------------------------------------------- 297 | 17) KNOWN LIMITATIONS / TODOs 298 | -------------------------------------------------------------------------------- 299 | - Ractor/Subprocess executors are stubs in some scaffolds; implement when needed. 300 | - Gemini streaming is not wired yet (MVP uses non-streaming). Add Live/stream endpoints later. 301 | - Homeostat budgets are placeholders; implement counters and algedonic signals as needed. 302 | - Lens has minimal UI; extract richer vsm-lens gem when features grow. 303 | 304 | 305 | -------------------------------------------------------------------------------- 306 | 18) HOW TO ASK THIS LLM FOR CHANGES 307 | -------------------------------------------------------------------------------- 308 | - Provide concrete goals and constraints (e.g., “Add a `search_repo` tool that scans *.rb files for a pattern; thread executor; unit tests; and show it in Lens with meta.tool”). 309 | - Ask for *unified diffs* with exact file paths under lib/ and spec/. Keep patches minimal and focused. 310 | - Require updates to README snippets if public API changes. 311 | - Have it add/extend tests and run them locally (`bundle exec rspec`). 312 | - If the change affects the message kinds or meta fields, ensure Lens/TUI still render sensibly. 313 | 314 | 315 | -------------------------------------------------------------------------------- 316 | 19) LICENSE & ATTRIBUTION 317 | -------------------------------------------------------------------------------- 318 | - MIT by default (edit gemspec if different). Respect third-party licenses for gems you add. 319 | - Keep provider SDKs optional; current drivers use `async-http` + stdlib only. 320 | 321 | 322 | End of llms.txt. 323 | --------------------------------------------------------------------------------