├── .gitignore ├── .idea └── workspace.xml ├── LICENSE ├── README.md ├── config ├── __init__.py └── config.py ├── handler ├── __init__.py └── trace.py ├── render.py ├── res ├── analysis_object.png ├── detail_stack.png └── render_result.png ├── sheet_analysis.py └── util ├── __init__.py ├── analysis_tracefile.py ├── convertutil.jar └── log.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Idea 10 | .idea/ 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | logs/ 60 | *.csv 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | 75 | # Jupyter Notebook 76 | .ipynb_checkpoints 77 | 78 | # pyenv 79 | .python-version 80 | 81 | # celery beat schedule file 82 | celerybeat-schedule 83 | 84 | # SageMath parsed files 85 | *.sage.py 86 | 87 | # dotenv 88 | .env 89 | 90 | # virtualenv 91 | .venv 92 | venv/ 93 | ENV/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | -------------------------------------------------------------------------------- /.idea/workspace.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 163 | 164 | 165 | 167 | 168 | 187 | 188 | 189 | 190 | 191 | true 192 | DEFINITION_ORDER 193 | 194 | 195 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 234 | 235 | 236 | 237 | 240 | 241 | 244 | 245 | 246 | 247 | 250 | 251 | 254 | 255 | 258 | 259 | 260 | 261 | 264 | 265 | 268 | 269 | 272 | 273 | 274 | 275 | 278 | 279 | 282 | 283 | 286 | 287 | 288 | 289 | 292 | 293 | 296 | 297 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 338 | 339 | 356 | 357 | 374 | 375 | 385 | 386 | 405 | 406 | 407 | 408 | 409 | 422 | 423 | 436 | 437 | 454 | 455 | 467 | 468 | 469 | 470 | 471 | 472 | 473 | 474 | 475 | 476 | 477 | 478 | 497 | 498 | 517 | 518 | 539 | 540 | 562 | 563 | 587 | 588 | 589 | 590 | 591 | 592 | 593 | 594 | 595 | 596 | 597 | 598 | 599 | 600 | 601 | 603 | 604 | 605 | 606 | 607 | 608 | 609 | 1498630109071 610 | 614 | 615 | 616 | 617 | 618 | 619 | 620 | 621 | 622 | 623 | 624 | 625 | 626 | 627 | 628 | 629 | 630 | 631 | 632 | 633 | 634 | 635 | 638 | 641 | 642 | 643 | 645 | 646 | 647 | 648 | 649 | 650 | 651 | 652 | 653 | 655 | 656 | 657 | 659 | 660 | 661 | 662 | 663 | 664 | 665 | 666 | 667 | 668 | 669 | 670 | 671 | 672 | 673 | 674 | 675 | 676 | 677 | 678 | 679 | 680 | 681 | 682 | 683 | 684 | 685 | 686 | 687 | 688 | 689 | 690 | 691 | 692 | 693 | 694 | 695 | 696 | 697 | 698 | 699 | 700 | 701 | 702 | 703 | 704 | 705 | 706 | 707 | 708 | 709 | 710 | 711 | 712 | 713 | 714 | 715 | 716 | 717 | 718 | 719 | 720 | 721 | 722 | 723 | 724 | 725 | 726 | 727 | 728 | 729 | 730 | 731 | 732 | 733 | 734 | 735 | 736 | 737 | 738 | 739 | 740 | 741 | 742 | 743 | 744 | 745 | 746 | 747 | 748 | 749 | 750 | 751 | 752 | 753 | 754 | 755 | 756 | 757 | 758 | 759 | 760 | 761 | 762 | 763 | 764 | 765 | 766 | 767 | 768 | 769 | 770 | 771 | 772 | 773 | 774 | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | 784 | 785 | 786 | 787 | 788 | 789 | 790 | 791 | 792 | 793 | 794 | 795 | 796 | 797 | 798 | 799 | 800 | 801 | 802 | 803 | 804 | 805 | 806 | 807 | 808 | 809 | 810 | 811 | 812 | 813 | 814 | 815 | 816 | 817 | 818 | 819 | 820 | 821 | 822 | 823 | 824 | 825 | 826 | 827 | 828 | 829 | 830 | 831 | 832 | 833 | 834 | 835 | 836 | 837 | 838 | 839 | 840 | 841 | 842 | 843 | 844 | 845 | 846 | 847 | 848 | 849 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 alex 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TraceAnalysis 2 | `traceview`分析工具(android) 3 | - 该工具可以把对应`trace`的方法耗时/调用堆栈以及线程关系给解析出来 4 | - 主要用于在两次版本迭代时,为了迅速找到新版本新增的方法以及异常的方法耗时,可以使用默认的`sheet_analysys.py`来生成报表 5 | 6 | ## 使用方式 7 | ### 前置操作 8 | 在`config.py`中配置基准`traceview`路径以及对比`traceview`路径,如果需要解混淆,配置对应`mapping`文件路径,目前已自带解`mapping`工具 9 | 10 | ### 结果输出 11 | 有两种方式 12 | 1. 通过`TraceUtils(...).anti_mapping.analysis.get("dict")`可以获取解析结果,结果是一个`dict/json`,格式为 13 | ```json 14 | { 15 | "inclusive": "xx", 16 | "exclusive": "xx", 17 | "method_thread": "xx", 18 | "theads_pid": "xx", 19 | "call_times": "xx", 20 | "costs": "xx", 21 | "sorted_dic": "xx" 22 | } 23 | 24 | ``` 25 | 26 | 27 | 2. 使用默认的报表对比方法,该方法直接对比两个trace文件的方法,过滤出新增的方法,以及同一个方法的耗时差对比,入口为`sheet_analysis.py`,通过`render.py`来将结果渲染成csv文件 28 | 29 | ### 注意事项 30 | 1. `config.py`的`WATCH_MODULES`可以用来设置需要关注的模块,采用此配置可以在解析中过滤出我们不需要关心的包或者模块中的方法 31 | 2. `config.py`的`MAPPING_FILE`用于配置反混淆mapping(如果不配置,则不会反混淆),默认采用`convertutil.jar`来生成混淆前的`trace`文件,如果修改`TraceHandler`中的`anti_mapping`函数 32 | 33 | 34 | -------------------------------------------------------------------------------- /config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/config/__init__.py -------------------------------------------------------------------------------- /config/config.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | import os 3 | 4 | # 基准trace文件路径 5 | BASE_TRACES = "/tmp/data/475175/trace/StepBeforeFirstDraw.trace" 6 | 7 | # 当前trace文件路径 8 | COMPARE_TRACES = "/tmp/data/475176/trace/StepBeforeFirstDraw.trace" 9 | 10 | # 反混淆工具,自带 11 | ANTI_CONFUSE_TOOL = os.path.join(os.path.dirname(os.getcwd()), "util/convertutil.jar") 12 | 13 | # 反混淆的mapping文件地址 14 | MAPPING_FILE = None 15 | 16 | # 解析的最大记录值 17 | MAX_SAVE_RECORDS = 5000 18 | 19 | # 关注的模块 20 | # WATCH_MODULES = "com.(alipay.mobile.command|taobao|uc.(addon|annotation|application|base|browser|business|common|config|external|framework|jni|lowphone|model|search|security|shenma|shopping|stat|svg|syslinsener|wa)|UCMobile.(desktopwidget|jnibridge|main|model|receivers|service|shellassetsres|wxapi)|ucweb.activity)" 21 | WATCH_MODULES = None 22 | 23 | # 输出日志方式(debug/online) 24 | LOG_LEVEL = "debug" 25 | -------------------------------------------------------------------------------- /handler/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/handler/__init__.py -------------------------------------------------------------------------------- /handler/trace.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | import json 3 | import os 4 | from util import log 5 | from util.analysis_tracefile import TraceDump, sortInclThreadCost 6 | 7 | logger = log.logger() 8 | 9 | 10 | class TraceHandler(object): 11 | def __init__(self, trace_path, mapping_path=None, convert_jar=None, mode="exclusive"): 12 | """traceview解析的工具类 13 | :params convert_jar: convert_jar文件 14 | :params trace_path: traceview文件 15 | :params trace_root: traceview文件所在路径 16 | :params trace_name: traceview文件的名字 17 | :params mapping_path: mapping文件路径 18 | :params out_path: traceview反混淆文件输出的路径 19 | """ 20 | self.trace_dump = TraceDump() 21 | self.convert_jar = convert_jar 22 | self.trace_path = trace_path 23 | self.trace_root, self.trace_name = os.path.split(trace_path) 24 | self.mapping_path = mapping_path 25 | self.mode = mode 26 | self.analysis_results = None 27 | 28 | @property 29 | def anti_mapping(self): 30 | """反混淆操作""" 31 | 32 | if self.convert_jar is None: 33 | return self 34 | 35 | if self.mapping_path is None: 36 | return self 37 | 38 | convert_cmd = "java -jar {0:s} -i {1:s} -m {2:s} -o {3:s}".format(self.convert_jar, self.trace_path, 39 | self.mapping_path, self.trace_path) 40 | logger.info("convert_cmd: " + convert_cmd) 41 | 42 | try: 43 | os.system(convert_cmd) 44 | except Exception as e: 45 | raise Exception("trace文件解混淆失败: " + str(e)) 46 | return self 47 | 48 | @property 49 | def analysis(self, sort=True): 50 | """结果格式为 51 | { 52 | "inclusive": final_inclusive_time_result, 53 | "exclusive": final_exclusive_time_result, 54 | "method_thread": method_thread, 55 | "theads_pid": theads_pid, 56 | "call_times": call_times_result, 57 | "costs": costs, 58 | "sorted_dic": sorted_dic 59 | } 60 | 61 | """ 62 | self.analysis_results = self.trace_dump.analysisTraceFile(self.trace_path, self.mode) 63 | sorted_dic = sortInclThreadCost(self.analysis_results, self.mode, sort) 64 | self.analysis_results['sorted_dic'] = sorted_dic 65 | return self 66 | 67 | def get(self, resp="json"): 68 | if resp == "json": 69 | return json.dumps(self.analysis_results) 70 | elif resp == "dict": 71 | return self.analysis_results 72 | -------------------------------------------------------------------------------- /render.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | 3 | import collections 4 | import csv 5 | import os 6 | from util.log import logger 7 | 8 | logger = logger() 9 | 10 | 11 | class Template(object): 12 | def __init__(self, 13 | base_dic, cmp_dic, 14 | base_cost, cmp_cost, 15 | base_call_times, cmp_call_times, 16 | base_method_thread, cmp_method_thread, 17 | base_theads_pid, cmp_theads_pid): 18 | self.base = base_dic # base_sorted_dic, cmp_sorted_dic, base_cost, cmp_cost,base_call_times,cmp_call_times 19 | self.cmp = cmp_dic 20 | self.base_cost = base_cost 21 | self.cmp_cost = cmp_cost 22 | self.base_call_times = base_call_times 23 | self.cmp_call_times = cmp_call_times 24 | self.order_base_dic = collections.OrderedDict() 25 | self.order_cmp_dic = collections.OrderedDict() 26 | self.order_base_keys, self.order_base_values = self.initObjDatas(self.base, self.order_base_dic) 27 | self.order_cmp_keys, self.order_cmp_values = self.initObjDatas(self.cmp, self.order_cmp_dic) 28 | self.base_method_thread = base_method_thread 29 | self.cmp_method_thread = cmp_method_thread 30 | self.base_theads_pid = base_theads_pid 31 | self.cmp_theads_pid = cmp_theads_pid 32 | 33 | def initObjDatas(self, obj, init_obj): 34 | _keys = [] 35 | _values = [] 36 | for each in obj: 37 | init_obj[each[0]] = each[1] 38 | for _k, _v in init_obj.items(): 39 | _keys.append(_k) 40 | _values.append(_v) 41 | return _keys, _values 42 | 43 | def generateTable(self, path, rows, data): 44 | if os.path.isfile(path): 45 | os.remove(path) 46 | csvfile = file(path, "wb") 47 | writer = csv.writer(csvfile) 48 | # writer.writerow(rows) 49 | writer.writerows(data) 50 | csvfile.close() 51 | 52 | def searchDictList(self, orderDict): 53 | keys = [] 54 | values = [] 55 | for k, v in orderDict.items(): 56 | keys.append(k) 57 | values.append(v) 58 | return keys, values 59 | 60 | def generateTableData(self, path, rows): 61 | ''' ['调用方法','隶属线程', '线程PID', '基准分支排名', '对比分支排名', '基准分支方法耗时', '对比分支方法耗时', 62 | '耗时差(对比分支-基准分支)', '耗时上涨比例(%)', '基准分支方法调用次数','对比分支方法调用次数','方法耗时排名变化'] ''' 63 | logger.debug("self.cmp_cost:\n" + str(self.cmp_cost)) 64 | logger.debug("self.base_cost:\n" + str(self.base_cost)) 65 | if self.base_cost != 0: 66 | ratio = format(float(self.cmp_cost - self.base_cost) / float(self.base_cost), '.2%') 67 | else: 68 | ratio = self.cmp_cost 69 | data = [] 70 | add_rows = rows 71 | add_rows[0] = add_rows[0] + "- 系数: " + str(ratio) 72 | add_flag = 0 73 | for cmp_obj in self.order_cmp_keys: 74 | ''' 当cmp_obj有新增方法时 ''' 75 | if cmp_obj not in self.order_base_keys: 76 | add_flag = 1 77 | method = cmp_obj 78 | base_index = "-" 79 | cmp_index = self.order_cmp_keys.index(cmp_obj) 80 | base_time = 0 81 | cmp_time = self.order_cmp_values[cmp_index] 82 | cmp_call_times = self.cmp_call_times[cmp_obj] if self.cmp_call_times.has_key(cmp_obj) else "-" 83 | if self.cmp_method_thread.has_key(cmp_obj): 84 | cmp_thread = self.cmp_method_thread[cmp_obj] 85 | self.cmp_method_thread.pop(cmp_obj) 86 | else: 87 | cmp_thread = "-" 88 | base_call_times = 0 89 | diff = cmp_time 90 | rate = format(float(1), '.2%') 91 | rank_change = cmp_index 92 | content = ( 93 | method, str(cmp_thread), str(base_index), str(cmp_index), str(base_time), str(cmp_time), str(diff), 94 | str(rate), str(base_call_times), str(cmp_call_times), str(rank_change)) 95 | data.append(content) 96 | if add_flag == 1: 97 | data.insert(0, add_rows) 98 | rows[0] = rows[0] + "- 系数: " + str(ratio) 99 | data.append(rows) 100 | for base_obj in self.order_base_keys: 101 | method = base_obj 102 | base_index = self.order_base_keys.index(base_obj) # 获取base_key的排名 103 | if base_obj in self.order_cmp_keys: 104 | cmp_index = self.order_cmp_keys.index(base_obj) # 当base_obj方法还在cmp_obj方法中 105 | base_call_times = self.base_call_times[base_obj] if self.base_call_times.has_key(base_obj) else "-" 106 | cmp_call_times = self.cmp_call_times[base_obj] if self.cmp_call_times.has_key(base_obj) else "-" 107 | else: 108 | cmp_index = "-" # 当base_obj方法在cmp_obj已经删减 109 | base_call_times = self.base_call_times[base_obj] if self.base_call_times.has_key(base_obj) else "-" 110 | cmp_call_times = 0 111 | if self.base_method_thread.has_key(base_obj): 112 | base_thread = self.base_method_thread[base_obj] 113 | self.base_method_thread.pop(base_obj) 114 | else: 115 | base_thread = "-" 116 | 117 | base_time = self.order_base_values[base_index] 118 | if cmp_index == "-": 119 | cmp_time = 0 120 | rank_change = base_index 121 | else: 122 | cmp_time = self.order_cmp_values[cmp_index] 123 | rank_change = base_index - cmp_index 124 | diff = cmp_time - base_time 125 | try: 126 | rate = format(float(diff) / float(base_time), '.2%') # -100%:代表base_obj方法在cmp_obj已经删减的比率 127 | except Exception as e: 128 | rate = "error" 129 | content = ( 130 | method, str(base_thread), str(base_index), str(cmp_index), str(base_time), str(cmp_time), str(diff), 131 | str(rate), str(base_call_times), str(cmp_call_times), str(rank_change)) 132 | data.append(content) 133 | self.generateTable(path, rows, data) 134 | logger.debug("self.base_cost-self.cmp_cost:\n" + str(self.base_cost - self.cmp_cost)) 135 | logger.debug("self.base_method_thread:\n" + str(self.base_method_thread)) 136 | logger.debug("self.cmp_method_thread:\n" + str(self.cmp_method_thread)) 137 | -------------------------------------------------------------------------------- /res/analysis_object.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/res/analysis_object.png -------------------------------------------------------------------------------- /res/detail_stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/res/detail_stack.png -------------------------------------------------------------------------------- /res/render_result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/res/render_result.png -------------------------------------------------------------------------------- /sheet_analysis.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | import os 3 | 4 | from config import config 5 | from handler.trace import TraceHandler 6 | from render import Template 7 | from util import log 8 | 9 | logger = log.logger() 10 | 11 | csv_name = "result.csv" 12 | csv_path = os.path.join(os.getcwd(), csv_name) 13 | 14 | table_rows = ['调用方法', '隶属线程', '基准分支排名', '对比分支排名', '基准分支方法耗时', '对比分支方法耗时', 15 | '耗时差(对比分支-基准分支)', '耗时上涨比例(%)', '基准分支方法调用次数', '对比分支方法调用次数', '方法耗时排名变化'] 16 | 17 | 18 | def run(): 19 | # 获取基准traceview的解析结果 20 | logger.info(u"csv日志结果清理") 21 | if os.path.exists(csv_path): 22 | os.remove(csv_path) 23 | logger.info(u"开始解析: " + config.BASE_TRACES) 24 | base_results = TraceHandler( 25 | config.BASE_TRACES, 26 | convert_jar=config.ANTI_CONFUSE_TOOL, 27 | mapping_path=config.MAPPING_FILE).anti_mapping.analysis.get("dict") 28 | 29 | # 获取当前traceview的解析结果 30 | logger.info(u"开始解析: " + config.BASE_TRACES) 31 | # r = json.loads(base_results) 32 | compare_results = TraceHandler( 33 | config.COMPARE_TRACES, 34 | convert_jar=config.ANTI_CONFUSE_TOOL, 35 | mapping_path=config.MAPPING_FILE).anti_mapping.analysis.get("dict") 36 | 37 | # 生成csv结果 38 | template = Template( 39 | base_results["sorted_dic"], compare_results["sorted_dic"], 40 | base_results["costs"], compare_results["costs"], 41 | base_results["call_times"], compare_results["call_times"], 42 | base_results["method_thread"], compare_results["method_thread"], 43 | base_results["theads_pid"], compare_results["theads_pid"] 44 | ) 45 | 46 | table_rows[0] += csv_name 47 | 48 | template.generateTableData(csv_path, table_rows) 49 | logger.info(u"结果已生成: " + csv_name) 50 | 51 | 52 | if __name__ == '__main__': 53 | run() 54 | -------------------------------------------------------------------------------- /util/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/util/__init__.py -------------------------------------------------------------------------------- /util/analysis_tracefile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import re 5 | import sys 6 | 7 | from config import config 8 | from log import logger 9 | 10 | sys.path.append("../") 11 | 12 | logger = logger() 13 | 14 | MAX_RECORD = int(config.MAX_SAVE_RECORDS) 15 | 16 | WATCHER = config.WATCH_MODULES or "" 17 | 18 | 19 | class TraceDump(object): 20 | def __init__(self): 21 | self.traceFileName = "" 22 | self.traceThreadName = "" 23 | 24 | def getTraceFileName(self): 25 | return self.traceFileName 26 | 27 | def setTraceFileName(self, fileName): 28 | self.traceFileName = fileName 29 | 30 | def analysisExclusiveTime(self, fileName): 31 | try: 32 | flag = False 33 | excl_record = [] 34 | f = open(fileName, 'r') 35 | for line_content in f: 36 | if flag: 37 | if "Inclusive elapsed times for each method" in line_content: 38 | break 39 | excl_record.append(line_content) 40 | else: 41 | if "Exclusive elapsed times for each method" in line_content: 42 | flag = True 43 | except IOError: 44 | logger.error("The file don't exist, Please double check!") 45 | 46 | regex = '(\d+?)\s+?([\d\.]+?)\s+?([\d\.]+?)\s+?\[(\d+?)\]\s+?(.+?)$' 47 | p = re.compile(regex) 48 | method_dict = {} 49 | excl_data = {} 50 | for each_excl in excl_record: 51 | result = p.findall(each_excl) 52 | if len(result) != 0: 53 | result = result[0] 54 | method_dict[result[3]] = result[4] 55 | tmp_method = self.trimPreStr(result[4].replace(" ", "").replace("/", "."), '.') 56 | if re.search(WATCHER, tmp_method) is not None: 57 | excl_data[tmp_method] = int(result[0]) 58 | return method_dict, excl_data 59 | 60 | def analysisInclusiveTime(self, fileName): 61 | try: 62 | flag = False 63 | incl_record = [] 64 | each_method = [] 65 | f = open(fileName, 'r') 66 | for line_content in f: 67 | if flag: 68 | if "Exclusive elapsed time for each class" in line_content: 69 | break 70 | if "---------------------------------------------------" in line_content: 71 | incl_record.append(each_method) 72 | each_method = [] 73 | else: 74 | each_method.append(line_content) 75 | else: 76 | if "Inclusive elapsed times for each method" in line_content: 77 | flag = True 78 | except IOError: 79 | logger.error("The file don't exist, Please double check!") 80 | 81 | regex_self = '\[(\d+?)\]\s+?([\d\.]+?)%\s+?([\d\+]+?)\s+?(\d+?)\s+?(.+?)$' 82 | regex_other = '([\d\.]+?)%\s+?\[(\d+?)\]\s+?([\d\/]+?)\s+?(\d+?)\s+?(.+?)$' 83 | p_self = re.compile(regex_self) 84 | p_other = re.compile(regex_other) 85 | 86 | parent_children = {} 87 | incl_data = {} 88 | for each_incl_record in incl_record: 89 | flag = 0 90 | index = 0 91 | method_dict = {0: [], 1: []} 92 | for each_incl in each_incl_record: 93 | result = p_other.findall(each_incl) 94 | if len(result) != 0: 95 | result = result[0] 96 | tmp_method = self.trimPreStr(result[4].replace(" ", "").replace("/", "."), '.') 97 | method_dict[flag].append(tmp_method) 98 | if re.search(WATCHER, tmp_method) is not None: 99 | if tmp_method not in incl_data.keys(): 100 | incl_data[tmp_method] = int(result[3]) 101 | else: 102 | if incl_data[tmp_method] < int(result[3]): 103 | incl_data[tmp_method] = int(result[3]) 104 | else: 105 | result = p_self.findall(each_incl) 106 | if len(result) != 0: 107 | result = result[0] 108 | tmp_method = self.trimPreStr(result[4].replace(" ", "").replace("/", "."), '.') 109 | # print tmp_method 110 | # if re.search(WATCHER, tmp_method) is not None: 111 | # incl_data[tmp_method] = int(result[3]) 112 | flag = 1 113 | index = tmp_method 114 | parent_children[index] = method_dict 115 | return parent_children, incl_data 116 | 117 | def analysisCallTimes(self, fileName): 118 | try: 119 | flag = False 120 | call_record = [] 121 | f = open(fileName, 'r') 122 | for line_content in f: 123 | if flag: 124 | call_record.append(line_content) 125 | else: 126 | if "Exclusive elapsed time for each method" in line_content: 127 | flag = True 128 | except IOError: 129 | logger.error("The file don't exist, Please double check!") 130 | 131 | regex = '(\d+?)\s+?([\d\.]+?)\s+?([\d\.]+?)\s+?([\d\.]+?)\s+?(\d+?)\+(\d+?)\s+?\[(\d+?)\]\s+?(.+?)$' 132 | p = re.compile(regex) 133 | call_data = {} 134 | for each_excl in call_record: 135 | result = p.findall(each_excl) 136 | if len(result) != 0: 137 | result = result[0] 138 | tmp_method = self.trimPreStr(result[7].replace(" ", "").replace("/", "."), '.') 139 | if re.search(WATCHER, tmp_method) is not None: 140 | call_data[tmp_method] = int(result[4]) + int(result[5]) 141 | return call_data 142 | 143 | def analysisTheadTime(self, traceThreadName): 144 | try: 145 | flag = True 146 | regex_thread = '(\d+?)\s+?(.+?)$' 147 | regex_action = '(\d+?)\s+?([a-z]+?)\s+?(\d+?)[\s-]+?(.+?)$' 148 | p_thread = re.compile(regex_thread) 149 | p_action = re.compile(regex_action) 150 | 151 | threads = {} 152 | thread_time = {} 153 | method_thread = {} 154 | tmp = {} 155 | f = open(traceThreadName, 'r') 156 | for line_content in f: 157 | if flag: 158 | if "Trace (threadID action usecs class.method signature):" in line_content: 159 | flag = False 160 | else: 161 | result = p_thread.findall(line_content) 162 | if len(result) != 0: 163 | result = result[0] 164 | threads[result[0]] = result[1] 165 | else: 166 | result = p_action.findall(line_content) 167 | if len(result) != 0: 168 | result = result[0] 169 | # 将方法中多余多空格去掉,将 / 转成 . , 170 | # 并且把前面用于表现层级关系的 . 去掉 171 | tmp_method = self.trimPreStr(result[3].replace(" ", "").replace("/", "."), '.') 172 | 173 | if result[0] not in threads.keys(): 174 | continue 175 | if not (threads[result[0]] in thread_time.keys()): 176 | thread_time[threads[result[0]]] = 0 177 | thread_time[threads[result[0]]] = int(result[2]) 178 | if re.search(WATCHER, tmp_method) is not None: 179 | if not (tmp_method in method_thread.keys()): 180 | method_thread[tmp_method] = threads[result[0]] 181 | if result[0] in tmp.keys(): 182 | if len(tmp[result[0]]) != 0: 183 | last_action = tmp[result[0]][-1] 184 | if result[1] == "xit" and result[3] == last_action[2]: 185 | thread_time[threads[result[0]]] += int(result[2]) - int(last_action[1]) 186 | tmp[result[0]].pop() 187 | else: 188 | if result[1] == "ent": 189 | tmp[result[0]].append((result[1], result[2], result[3])) 190 | else: 191 | if result[1] == "ent": 192 | tmp[result[0]] = [(result[1], result[2], result[3])] 193 | 194 | return {value: key for key, value in threads.items()}, thread_time, method_thread 195 | except IOError: 196 | logger.error("The file don't exist, Please double check!") 197 | 198 | # 去掉前置的 . 199 | def trimPreStr(self, s, opt): 200 | length = len(s) 201 | i = 0 202 | while i < length: 203 | if s[i] == opt: 204 | i += 1 205 | else: 206 | break 207 | return s[i:length] 208 | 209 | def analysisTraceFile(self, fileName, mode): 210 | txt_file_name_g = fileName[:fileName.rfind('.')] + "_g.txt" 211 | if not os.path.exists(txt_file_name_g): 212 | os.system('dmtracedump -g a.png ' + fileName.replace("(", "\(").replace(")", "\)") + ' > ' 213 | + txt_file_name_g.replace("(", "\(").replace(")", "\)")) 214 | # 求exclusive time 215 | method_dict, exclusive_time_result = self.analysisExclusiveTime(txt_file_name_g) 216 | 217 | exclusive_time_items = sorted(exclusive_time_result.items(), lambda x, y: cmp(x[1], y[1]), reverse=True) 218 | exclusive_time_result = {} 219 | max_record = min(len(exclusive_time_items), MAX_RECORD) 220 | for i in range(0, max_record): 221 | exclusive_time_result[exclusive_time_items[i][0]] = exclusive_time_items[i][1] 222 | 223 | # 求inclusive time 224 | parent_children, inclusive_time_result = self.analysisInclusiveTime(txt_file_name_g) 225 | inclusive_time_items = sorted(inclusive_time_result.items(), lambda x, y: cmp(x[1], y[1]), reverse=True) 226 | 227 | stack_hash_record = {} 228 | max_record = min(len(inclusive_time_items), 3) 229 | for i in range(0, max_record): 230 | stack_hash_record[i] = inclusive_time_items[i][0] 231 | 232 | max_record = min(len(inclusive_time_items), MAX_RECORD) 233 | inclusive_time_result = {} 234 | for i in range(0, max_record): 235 | inclusive_time_result[inclusive_time_items[i][0]] = inclusive_time_items[i][1] 236 | 237 | # 求调用次数和递归调用次数总和 238 | call_times_result = self.analysisCallTimes(txt_file_name_g) 239 | call_times_items = sorted(call_times_result.items(), lambda x, y: cmp(x[1], y[1]), reverse=True) 240 | call_times_result = {} 241 | max_record = min(len(call_times_items), MAX_RECORD) 242 | for i in range(0, max_record): 243 | call_times_result[call_times_items[i][0]] = call_times_items[i][1] 244 | 245 | # 分析线程 246 | txt_file_thread_o = fileName[:fileName.rfind('.')] + "_o.txt" 247 | if not os.path.exists(txt_file_thread_o): 248 | os.system('dmtracedump -o ' + fileName.replace("(", "\(").replace(")", "\)") + ' > ' 249 | + txt_file_thread_o.replace("(", "\(").replace(")", "\)")) 250 | # method_stack,threads = self.analysiscCycleFunction(txt_file_thread_o) 251 | theads_pid, thead_time_result, method_thread = self.analysisTheadTime(txt_file_thread_o) 252 | thead_time_items = sorted(thead_time_result.items(), lambda x, y: cmp(x[1], y[1]), reverse=True) 253 | len_theads = len(thead_time_items) 254 | len_theads = min(len_theads, MAX_RECORD) 255 | thead_time_result = {} 256 | for i in range(0, len_theads): 257 | thead_time_result[thead_time_items[i][0]] = thead_time_items[i][1] 258 | 259 | final_inclusive_time_result, inclusive_costs = self.render_data(inclusive_time_result, parent_children, 260 | method_dict) 261 | final_exclusive_time_result, exclusive_costs = self.render_data(exclusive_time_result, parent_children, 262 | method_dict) 263 | 264 | final_thead_time_result = [] 265 | 266 | for key, value in thead_time_result.items(): 267 | if key in theads_pid.keys(): 268 | final_thead_time_result.append({"name": theads_pid[key], "time": value}) 269 | 270 | if mode == "inclusive": 271 | costs = inclusive_costs 272 | else: 273 | costs = exclusive_costs 274 | 275 | final_result = { 276 | "inclusive": final_inclusive_time_result, 277 | "exclusive": final_exclusive_time_result, 278 | "method_thread": method_thread, 279 | "theads_pid": theads_pid, 280 | "call_times": call_times_result, 281 | "costs": costs 282 | } 283 | 284 | return final_result 285 | 286 | def render_data(self, data, parent_children, methods): 287 | result = [] 288 | cost_time = 0 289 | for key, value in data.items(): 290 | 291 | if "aerie" in key: 292 | continue 293 | one_record = {} 294 | one_record['name'] = key 295 | one_record['time'] = value 296 | cost_time = cost_time + value 297 | parents = [] 298 | children = [] 299 | if key in parent_children.keys(): 300 | for p_c in parent_children[key][0]: 301 | parents.append(p_c) 302 | for p_c in parent_children[key][1]: 303 | children.append(p_c) 304 | one_record['parents'] = parents 305 | one_record['children'] = children 306 | result.append(one_record) 307 | return result, cost_time 308 | 309 | 310 | def sortInclThreadCost(result_dic, mode, sort): 311 | before_sort = {} 312 | sorted_tmp_dic = {} 313 | sorted_dic = {} 314 | for each_result in result_dic[mode]: 315 | before_sort[each_result["name"]] = each_result["time"] 316 | sorted_tmp_dic = sorted(before_sort.iteritems(), key=lambda d: d[1], reverse=True) 317 | # for key,value in sorted_tmp_dic: 318 | # sorted_dic[key] = value 319 | if sort == False: 320 | return before_sort 321 | return sorted_tmp_dic 322 | -------------------------------------------------------------------------------- /util/convertutil.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexknight/TraceAnalysis/8f549b6f0d969c0b50993929a1b136b1d58f93ce/util/convertutil.jar -------------------------------------------------------------------------------- /util/log.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | import os 3 | import logging 4 | import logging.config as log_conf 5 | 6 | from config import config 7 | 8 | log_dir = os.path.dirname(os.path.dirname(__file__))+'/logs' 9 | if not os.path.exists(log_dir): 10 | os.mkdir(log_dir) 11 | 12 | log_path = os.path.join(log_dir, 'run.log') 13 | 14 | log_config = { 15 | 'version': 1.0, 16 | 'formatters': { 17 | 'detail': { 18 | 'format': '%(asctime)s - %(name)s - %(levelname)s - [%(filename)s:%(lineno)s]: %(message)s', 19 | 'datefmt': "%Y-%m-%d %H:%M:%S" 20 | }, 21 | 'simple': { 22 | 'format': '%(name)s - %(levelname)s - %(message)s', 23 | }, 24 | }, 25 | 'handlers': { 26 | 'console': { 27 | 'class': 'logging.StreamHandler', 28 | 'level': 'DEBUG', 29 | 'formatter': 'detail' 30 | }, 31 | 'file': { 32 | 'class': 'logging.handlers.RotatingFileHandler', 33 | 'maxBytes': 1024 * 1024 * 5, 34 | 'backupCount': 10, 35 | 'filename': log_path, 36 | 'level': 'DEBUG', 37 | 'formatter': 'detail', 38 | 'encoding': 'utf-8', 39 | }, 40 | }, 41 | 'loggers': { 42 | 'online': { 43 | 'handlers': ['console', 'file'], 44 | 'level': 'INFO', 45 | }, 46 | 'debug': { 47 | 'handlers': ['console', 'file'], 48 | 'level': 'DEBUG', 49 | }, 50 | } 51 | } 52 | 53 | 54 | def logger(): 55 | log_conf.dictConfig(log_config) 56 | 57 | return logging.getLogger(config.LOG_LEVEL) 58 | --------------------------------------------------------------------------------