├── README.md └── src └── main.zig /README.md: -------------------------------------------------------------------------------- 1 | Create an expression object at comptime. 2 | ```zig 3 | const E = CompileExpression("x*(x+1)*y-3*y"); 4 | ``` 5 | 6 | Now execute that expression at runtime by binding variables with an anonymous struct. 7 | ```zig 8 | const result : f64 = E.eval(.{ .x=1.234, .y=3.456 }); 9 | ``` 10 | -------------------------------------------------------------------------------- /src/main.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const testing = std.testing; 3 | 4 | const Token = union(enum) { 5 | ref: []const u8, 6 | literal: f64, 7 | @"+": void, 8 | @"*": void, 9 | @"-": void, 10 | @"(": void, 11 | @")": void, 12 | }; 13 | 14 | fn tokenPrecedence(token: Token) u8 { 15 | return switch (token) { 16 | .ref => 0, 17 | .literal => 0, 18 | .@"(" => 0, 19 | .@")" => 255, 20 | .@"*" => 9, 21 | .@"-" => 8, 22 | .@"+" => 1, 23 | }; 24 | } 25 | 26 | pub fn Stack(comptime T: type) type { 27 | return struct { 28 | const Self = @This(); 29 | 30 | _buf: [128]T = undefined, 31 | len: u8 = 0, 32 | 33 | pub fn push(comptime self: *Self, token: T) void { 34 | self._buf[self.len] = token; 35 | self.len += 1; 36 | } 37 | 38 | pub fn peek(comptime self: *const Self) T { 39 | return self._buf[self.len - 1]; 40 | } 41 | 42 | pub fn pop(comptime self: *Self) void { 43 | self.len -= 1; 44 | } 45 | 46 | pub fn peekPop(comptime self: *Self) T { 47 | const token = self.peek(); 48 | self.pop(); 49 | return token; 50 | } 51 | 52 | pub fn empty(comptime self: *const Self) bool { 53 | return self.len == 0; 54 | } 55 | }; 56 | } 57 | 58 | fn pushAstNode(comptime operator_stack: *Stack(Token), comptime ast: *Stack(type)) void { 59 | // helper function for CompileExpression 60 | // assemble an AST node from the top of the operator stack 61 | // and the top few AST nodes of the ast stack 62 | 63 | switch (operator_stack.peekPop()) { 64 | .@"+" => { 65 | const rhs = ast.peekPop(); 66 | const lhs = ast.peekPop(); 67 | ast.push(PlusOp(lhs, rhs)); 68 | }, 69 | .@"-" => { 70 | const rhs = ast.peekPop(); 71 | const lhs = ast.peekPop(); 72 | ast.push(MinusOp(lhs, rhs)); 73 | }, 74 | .@"*" => { 75 | const rhs = ast.peekPop(); 76 | const lhs = ast.peekPop(); 77 | ast.push(MulOp(lhs, rhs)); 78 | }, 79 | else => unreachable, 80 | } 81 | } 82 | 83 | fn CompileExpression(comptime expression: []const u8) type { 84 | comptime var work_buf: [100]Token = undefined; 85 | const tokens = comptime try tokenize(expression, work_buf[0..]); 86 | 87 | var operator_stack: Stack(Token) = .{}; 88 | var ast_stack: Stack(type) = .{}; 89 | 90 | for (tokens) |token| { 91 | switch (token) { 92 | .literal => { 93 | ast_stack.push(LiteralOp(token.literal)); 94 | }, 95 | .ref => { 96 | ast_stack.push(RefOp(token.ref)); 97 | }, 98 | .@"(" => { 99 | operator_stack.push(token); 100 | }, 101 | .@")" => { 102 | while (!operator_stack.empty() and operator_stack.peek() != .@"(") { 103 | pushAstNode(&operator_stack, &ast_stack); 104 | } 105 | std.debug.assert(!operator_stack.empty() and operator_stack.peek() == .@"("); 106 | operator_stack.pop(); 107 | }, 108 | else => { 109 | while (!operator_stack.empty() and (tokenPrecedence(operator_stack.peek()) >= tokenPrecedence(token))) { 110 | pushAstNode(&operator_stack, &ast_stack); 111 | } 112 | operator_stack.push(token); 113 | }, 114 | } 115 | } 116 | while (!operator_stack.empty()) { 117 | pushAstNode(&operator_stack, &ast_stack); 118 | } 119 | 120 | if (ast_stack.len != 1) { 121 | @compileLog("malformed expression"); 122 | } 123 | 124 | return ast_stack.peek(); 125 | } 126 | 127 | fn LiteralOp(comptime f: f64) type { 128 | return struct { 129 | pub fn eval(vars: anytype) f64 { 130 | _ = vars; 131 | return f; 132 | } 133 | }; 134 | } 135 | 136 | fn PlusOp(comptime a: type, comptime b: type) type { 137 | return struct { 138 | pub fn eval(vars: anytype) f64 { 139 | return a.eval(vars) + b.eval(vars); 140 | } 141 | }; 142 | } 143 | 144 | fn MulOp(comptime a: type, comptime b: type) type { 145 | return struct { 146 | pub fn eval(vars: anytype) f64 { 147 | return a.eval(vars) * b.eval(vars); 148 | } 149 | }; 150 | } 151 | 152 | fn MinusOp(comptime a: type, comptime b: type) type { 153 | return struct { 154 | pub fn eval(vars: anytype) f64 { 155 | return a.eval(vars) - b.eval(vars); 156 | } 157 | }; 158 | } 159 | 160 | fn RefOp(comptime name: []const u8) type { 161 | return struct { 162 | pub fn eval(vars: anytype) f64 { 163 | // @compileLog(vars); 164 | return @field(vars, name); 165 | } 166 | }; 167 | } 168 | 169 | pub fn dumpTokens(tokens: []const Token) void { 170 | for (tokens) |token| { 171 | switch (token) { 172 | .ref => { 173 | std.debug.print("{s}", .{token.ref}); 174 | }, 175 | .literal => { 176 | std.debug.print("{d}", .{token.literal}); 177 | }, 178 | .@"(" => { 179 | std.debug.print("(", .{}); 180 | }, 181 | .@")" => { 182 | std.debug.print(")", .{}); 183 | }, 184 | .@"+" => { 185 | std.debug.print("+", .{}); 186 | }, 187 | .@"-" => { 188 | std.debug.print("-", .{}); 189 | }, 190 | .@"*" => { 191 | std.debug.print("*", .{}); 192 | }, 193 | } 194 | std.debug.print(" ", .{}); 195 | } 196 | std.debug.print("\n", .{}); 197 | } 198 | 199 | pub fn isTokenEnd(comptime char: u8) bool { 200 | return switch (char) { 201 | ' ' => true, 202 | '(' => true, 203 | ')' => true, 204 | '*' => true, 205 | '+' => true, 206 | '-' => true, 207 | else => false, 208 | }; 209 | } 210 | 211 | const CharSegment = struct { start: comptime_int, end: comptime_int }; 212 | 213 | fn nextSegment(comptime str: []const u8, search_start: comptime_int) ?CharSegment { 214 | // find the first continguous sequence of characters that is not space 215 | comptime var start_idx = search_start; 216 | inline while (start_idx < str.len and str[start_idx] == ' ') { 217 | start_idx += 1; 218 | } 219 | comptime var end_idx = start_idx; 220 | inline while (end_idx < str.len and comptime !isTokenEnd(str[end_idx])) { 221 | end_idx += 1; 222 | } 223 | if (end_idx == start_idx) end_idx += 1; 224 | if (start_idx < str.len and end_idx <= str.len) { 225 | return CharSegment{ .start = start_idx, .end = end_idx }; 226 | } 227 | return null; 228 | } 229 | 230 | fn isDigit(d: u8) bool { 231 | return ('0' <= d and d <= '9'); 232 | } 233 | 234 | fn isAlpha(d: u8) bool { 235 | return ('a' <= d and d <= 'z') or ('A' <= d and d <= 'Z'); 236 | } 237 | 238 | fn tokenize(comptime str: []const u8, comptime output_buf: []Token) ![]Token { 239 | _ = output_buf; 240 | comptime var output_idx = 0; 241 | comptime var start_idx = 0; 242 | inline while (comptime nextSegment(str, start_idx)) |segment| : (start_idx = segment.end) { 243 | const token = comptime str[segment.start..segment.end]; 244 | 245 | if (isDigit(token[0])) { 246 | // parse as literal 247 | output_buf[output_idx] = Token{ .literal = try std.fmt.parseFloat(f64, token) }; 248 | output_idx += 1; 249 | } else if (isAlpha(token[0])) { 250 | // parse as a reference 251 | output_buf[output_idx] = Token{ .ref = token }; 252 | output_idx += 1; 253 | } else { 254 | // parse as operator or parens 255 | if (token.len != 1) { 256 | @compileError("unexpected token"); 257 | } 258 | const char = token[0]; 259 | output_buf[output_idx] = switch (char) { 260 | '*' => .@"*", 261 | '+' => .@"+", 262 | '-' => .@"-", 263 | '(' => .@"(", 264 | ')' => .@")", 265 | else => unreachable, 266 | }; 267 | output_idx += 1; 268 | } 269 | } 270 | return output_buf[0..output_idx]; 271 | } 272 | 273 | test "parse1" { 274 | const E = CompileExpression("x*(x+1)*y-3*y"); 275 | const x: f64 = 1.234; 276 | const y: f64 = -3.456; 277 | try std.testing.expect(E.eval(.{ .x = x, .y = y }) == (x * (x + 1) * y - 3 * y)); 278 | } 279 | 280 | test "parse2" { 281 | const E = CompileExpression("2*3"); 282 | try std.testing.expect(E.eval(.{}) == (2 * 3)); 283 | } 284 | 285 | // Malformed expression should trigger compile error 286 | // test "parse3" { 287 | // const E = CompileExpression("2*3 8"); 288 | // try std.testing.expect(E.eval(.{}) == (2 * 3)); 289 | // } 290 | --------------------------------------------------------------------------------