├── test ├── __init__.py ├── graph │ ├── __init__.py │ ├── parse │ │ ├── __init__.py │ │ ├── test_ddg_java.py │ │ ├── test_cfg_java.py │ │ ├── test_pdg_java.py │ │ └── test_parse.py │ ├── convert │ │ ├── __init__.py │ │ ├── test_cfg.py │ │ ├── test_ddg.py │ │ └── test_pdg.py │ ├── test_cdg.py │ ├── test_cfg.py │ ├── test_ddg.py │ ├── test_pdg.py │ ├── test_point.py │ ├── test_statement.py │ └── test_basic_block.py ├── decomposition │ ├── __init__.py │ ├── block │ │ ├── __init__.py │ │ └── extension │ │ │ └── __init__.py │ ├── variable │ │ └── __init__.py │ ├── test_slicing.py │ └── test_program_slice.py ├── file_manager │ ├── __init__.py │ ├── test_writer.py │ └── test_reader.py └── test_cli.py ├── integration_tests ├── __init__.py ├── files │ ├── expected_variable_slices.json │ ├── method_4.java │ ├── method_1.java │ ├── method_2.java │ ├── method_5.java │ ├── method_24.java │ ├── method_9.java │ ├── method_3.java │ ├── method_20.java │ ├── method_6.java │ ├── method_7.java │ ├── method_14.java │ ├── method_0.java │ ├── method_22.java │ ├── method_23.java │ ├── method_15.java │ ├── method_21.java │ ├── method_13.java │ ├── method_8.java │ ├── method_28.java │ ├── method_29.java │ ├── method_17.java │ ├── method_16.java │ ├── method_19.java │ ├── method_26.java │ ├── method_10.java │ ├── method_27.java │ ├── method_18.java │ ├── method_25.java │ ├── method_12.java │ ├── class_3.java │ ├── test.java │ ├── class_2.java │ ├── method_11.java │ ├── class_1.java │ ├── class_5.java │ └── class_4.java ├── integration_slicing.py ├── integration_block_slicing.py └── integration_variable_slicing.py ├── scripts └── performance_benchmarks │ ├── __init__.py │ ├── requirements.txt │ ├── README.md │ └── block_slices │ ├── dataset_BC.csv │ ├── check_perfomance.py │ └── dataset │ ├── Controller_createDirectories_100.0_71.0_136.0_168.java │ ├── MetaDataBuilder_emit_100.0_41.0_66.0_127.java │ └── SubCommonRdbmsWriter_fillPreparedStatementColumnType_100.0_43.0_34.0_138.java ├── requirements.txt ├── .gitmodules ├── setup.cfg ├── program_slicing ├── decomposition │ ├── block │ │ ├── __init__.py │ │ └── extension │ │ │ └── __init__.py │ ├── variable │ │ └── __init__.py │ ├── __init__.py │ └── slicing.py ├── file_manager │ ├── __init__.py │ ├── writer.py │ └── reader.py ├── __init__.py ├── graph │ ├── convert │ │ ├── __init__.py │ │ ├── ddg.py │ │ ├── pdg.py │ │ ├── cdg.py │ │ └── cfg.py │ ├── __init__.py │ ├── parse │ │ ├── tree_sitter_ast_java.py │ │ ├── cfg_java.py │ │ ├── ddg_java.py │ │ ├── pdg_java.py │ │ ├── __init__.py │ │ ├── tree_sitter_parsers.py │ │ └── parse.py │ ├── ddg.py │ ├── pdg.py │ ├── cfg.py │ ├── basic_block.py │ ├── point.py │ ├── statement.py │ └── cdg.py └── cli.py ├── .github └── workflows │ ├── anomaly-index.yml │ └── ci.yml ├── LICENSE ├── setup.py └── .gitignore /test/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/graph/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/graph/parse/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /integration_tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/decomposition/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/file_manager/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/graph/convert/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/decomposition/block/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /scripts/performance_benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/decomposition/variable/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/decomposition/block/extension/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tree_sitter 2 | gitmodules 3 | networkx 4 | wheel 5 | -------------------------------------------------------------------------------- /scripts/performance_benchmarks/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.18.1 2 | tqdm==4.32.1 3 | -------------------------------------------------------------------------------- /integration_tests/files/expected_variable_slices.json: -------------------------------------------------------------------------------- 1 | { 2 | "class_7.java": [ 3 | ] 4 | } -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "vendor/tree-sitter-java"] 2 | path = vendor/tree-sitter-java 3 | url = https://github.com/tree-sitter/tree-sitter-java.git 4 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | ignore = W503, W504 6 | max-line-length = 120 7 | max-complexity = 10 8 | 9 | extensions = ['recommonmark'] 10 | 11 | [mypy] 12 | mypy_path = stubs 13 | ignore_missing_imports = True -------------------------------------------------------------------------------- /program_slicing/decomposition/block/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/27/09' 6 | 7 | import program_slicing.decomposition.block.slicing # noqa: F401 8 | -------------------------------------------------------------------------------- /program_slicing/decomposition/variable/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/27/09' 6 | 7 | import program_slicing.decomposition.variable.slicing # noqa: F401 8 | -------------------------------------------------------------------------------- /program_slicing/decomposition/block/extension/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/07/10' 6 | 7 | import program_slicing.decomposition.block.extension.slicing # noqa: F401 8 | -------------------------------------------------------------------------------- /program_slicing/file_manager/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | import program_slicing.file_manager.reader # noqa: F401 8 | import program_slicing.file_manager.writer # noqa: F401 9 | -------------------------------------------------------------------------------- /test/graph/test_cdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class CDGTestCase(TestCase): 11 | 12 | def test_entry_points(self) -> None: 13 | pass 14 | -------------------------------------------------------------------------------- /test/graph/test_cfg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class CFGTestCase(TestCase): 11 | 12 | def test_entry_points(self) -> None: 13 | pass 14 | -------------------------------------------------------------------------------- /test/graph/test_ddg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/20' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class DDGTestCase(TestCase): 11 | 12 | def test_entry_points(self) -> None: 13 | pass 14 | -------------------------------------------------------------------------------- /test/graph/test_pdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/22' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class PDGTestCase(TestCase): 11 | 12 | def test_entry_points(self) -> None: 13 | pass 14 | -------------------------------------------------------------------------------- /program_slicing/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Program Slicing provides a set of code decomposition methods availabel via command line interface. 3 | """ 4 | 5 | __licence__ = 'MIT' 6 | __author__ = 'kuyaki' 7 | __credits__ = ['kuyaki'] 8 | __maintainer__ = 'kuyaki' 9 | __date__ = '2021/03/22' 10 | __version__ = '0.0.1' 11 | -------------------------------------------------------------------------------- /test/file_manager/test_writer.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/22' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class WriterTestCase(TestCase): 11 | 12 | def test_save_file(self): 13 | pass 14 | 15 | def test_save_json(self): 16 | pass 17 | -------------------------------------------------------------------------------- /program_slicing/graph/convert/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | import program_slicing.graph.convert.cdg # noqa: F401 8 | import program_slicing.graph.convert.cfg # noqa: F401 9 | import program_slicing.graph.convert.ddg # noqa: F401 10 | import program_slicing.graph.convert.pdg # noqa: F401 11 | -------------------------------------------------------------------------------- /test/test_cli.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/22' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class CLITestCase(TestCase): 11 | 12 | def test_get_uri_type(self) -> None: 13 | pass 14 | 15 | def test_get_uri_message(self) -> None: 16 | pass 17 | 18 | def test_cli(self) -> None: 19 | pass 20 | -------------------------------------------------------------------------------- /program_slicing/decomposition/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | import program_slicing.decomposition.slicing # noqa: F401 8 | import program_slicing.decomposition.program_slice # noqa: F401 9 | import program_slicing.decomposition.slice_predicate # noqa: F401 10 | from program_slicing.decomposition.slice_predicate import check_slice # noqa: F401 11 | -------------------------------------------------------------------------------- /test/graph/convert/test_cfg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class CFGTestCase(TestCase): 11 | 12 | def test_convert_cfg_to_cdg(self) -> None: 13 | pass 14 | 15 | def test_convert_cfg_to_ddg(self) -> None: 16 | pass 17 | 18 | def test_convert_cfg_to_pdg(self) -> None: 19 | pass 20 | -------------------------------------------------------------------------------- /test/graph/convert/test_ddg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class DDGTestCase(TestCase): 11 | 12 | def test_convert_ddg_to_cdg(self) -> None: 13 | pass 14 | 15 | def test_convert_ddg_to_cfg(self) -> None: 16 | pass 17 | 18 | def test_convert_ddg_to_pdg(self) -> None: 19 | pass 20 | -------------------------------------------------------------------------------- /test/graph/convert/test_pdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/22' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class PDGTestCase(TestCase): 11 | 12 | def test_convert_pdg_to_cdg(self) -> None: 13 | pass 14 | 15 | def test_convert_pdg_to_cfg(self) -> None: 16 | pass 17 | 18 | def test_convert_pdg_to_ddg(self) -> None: 19 | pass 20 | -------------------------------------------------------------------------------- /test/file_manager/test_reader.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/22' 6 | 7 | from unittest import TestCase 8 | 9 | 10 | class ReaderTestCase(TestCase): 11 | 12 | def test_read_json(self) -> None: 13 | pass 14 | 15 | def test_read_file(self) -> None: 16 | pass 17 | 18 | def test_read_files(self) -> None: 19 | pass 20 | 21 | def test_browse_file_sub_paths(self) -> None: 22 | pass 23 | -------------------------------------------------------------------------------- /program_slicing/graph/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | import program_slicing.graph.cdg # noqa: F401 8 | import program_slicing.graph.cfg # noqa: F401 9 | import program_slicing.graph.ddg # noqa: F401 10 | import program_slicing.graph.pdg # noqa: F401 11 | import program_slicing.graph.statement # noqa: F401 12 | import program_slicing.graph.basic_block # noqa: F401 13 | import program_slicing.graph.manager # noqa: F401 14 | -------------------------------------------------------------------------------- /program_slicing/graph/parse/tree_sitter_ast_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/06/01' 6 | 7 | from tree_sitter import Tree 8 | 9 | from program_slicing.graph.parse import tree_sitter_parsers 10 | 11 | 12 | def parse(source_code: str) -> Tree: 13 | """ 14 | Parse the source code string into a Tree Sitter AST. 15 | :param source_code: string with the source code in it. 16 | :return: Tree Sitter AST. 17 | """ 18 | return tree_sitter_parsers.java().parse(bytes(source_code, "utf8")) 19 | -------------------------------------------------------------------------------- /program_slicing/graph/parse/cfg_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | from program_slicing.graph.cfg import ControlFlowGraph 8 | from program_slicing.graph.parse import cdg_java 9 | from program_slicing.graph import convert 10 | 11 | 12 | def parse(source_code: str) -> ControlFlowGraph: 13 | """ 14 | Parse the source code string into a Control Flow Graph. 15 | :param source_code: the string that should to be parsed. 16 | :return: Control Flow Graph. 17 | """ 18 | return convert.cdg.to_cfg(cdg_java.parse(source_code)) 19 | -------------------------------------------------------------------------------- /program_slicing/graph/parse/ddg_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/20' 6 | 7 | from program_slicing.graph.ddg import DataDependenceGraph 8 | from program_slicing.graph.parse import cdg_java 9 | from program_slicing.graph import convert 10 | 11 | 12 | def parse(source_code: str) -> DataDependenceGraph: 13 | """ 14 | Parse the source code string into a Data Dependence Graph. 15 | :param source_code: the string that should to be parsed. 16 | :return: Data Dependence Graph. 17 | """ 18 | return convert.cdg.to_ddg(cdg_java.parse(source_code)) 19 | -------------------------------------------------------------------------------- /program_slicing/graph/parse/pdg_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/20' 6 | 7 | from program_slicing.graph.pdg import ProgramDependenceGraph 8 | from program_slicing.graph.parse import cdg_java 9 | from program_slicing.graph import convert 10 | 11 | 12 | def parse(source_code: str) -> ProgramDependenceGraph: 13 | """ 14 | Parse the source code string into a Data Dependence Graph. 15 | :param source_code: the string that should to be parsed. 16 | :return: Data Dependence Graph. 17 | """ 18 | return convert.cdg.to_pdg(cdg_java.parse(source_code)) 19 | -------------------------------------------------------------------------------- /.github/workflows/anomaly-index.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: A-Index 4 | 5 | on: 6 | # Triggers the workflow on push or pull request events but only for the main branch 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | workflow_dispatch: 13 | 14 | jobs: 15 | build: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - uses: docker-practice/actions-setup-docker@master 21 | 22 | # Runs a set of commands using the runners shell 23 | - name: Calculate A-Index 24 | run: | 25 | docker run -v ${{ github.workspace }}:/${{ github.workspace }} alexbers/anomaly python3 prototype.py /${{ github.workspace }} 26 | -------------------------------------------------------------------------------- /program_slicing/file_manager/writer.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2020/05/19' 6 | 7 | import os 8 | import json 9 | from typing import Any, AnyStr 10 | 11 | 12 | def save_file(data: AnyStr, path: str) -> None: 13 | """ 14 | Save data to a file. 15 | :param data: data to save. 16 | :param path: string with the path to an output file. 17 | """ 18 | if not os.path.exists(os.path.dirname(path)): 19 | os.makedirs(os.path.dirname(path)) 20 | with open(path, 'w', encoding="utf8") as f: 21 | f.write(data) 22 | 23 | 24 | def save_json(data: Any, path: str) -> None: 25 | """ 26 | Save data to a JSON file. 27 | :param data: data to save. 28 | :param path: string with the path to an output file. 29 | """ 30 | save_file(json.dumps(data, indent=4), path) 31 | -------------------------------------------------------------------------------- /integration_tests/integration_slicing.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'lyriccoder' 3 | __credits__ = ['lyriccoder, kuyaki'] 4 | __maintainer__ = 'lyriccoder' 5 | __date__ = '2021/11/08' 6 | 7 | import json 8 | 9 | 10 | def run_check(expected_emos, observable_emos): 11 | for filename, ex_emos in expected_emos.items(): 12 | print(filename, end="\t") 13 | observable_emos_set = set(observable_emos.get(filename)) 14 | expect_emos_set = set([tuple(x) for x in ex_emos]) 15 | message = "" 16 | found_obj = observable_emos_set.difference(expect_emos_set) 17 | if found_obj: 18 | message += f'Objects which were wrongly found: {json.dumps(tuple(sorted(found_obj)), indent=4)}\n' 19 | found_obj = expect_emos_set.difference(observable_emos_set) 20 | if found_obj: 21 | message += f'Objects which were not found: {json.dumps(tuple(sorted(found_obj)), indent=4)}\n' 22 | if message: 23 | raise Exception(message) 24 | print("OK") 25 | -------------------------------------------------------------------------------- /test/decomposition/test_slicing.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/22' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.decomposition import slicing 10 | from program_slicing.graph.parse import Lang 11 | 12 | 13 | class SlicingTestCase(TestCase): 14 | 15 | @staticmethod 16 | def __get_source_code_0(): 17 | return """ 18 | class A { 19 | void main() { 20 | int a = 0; 21 | int b = 10; 22 | a = b; 23 | b += a; 24 | } 25 | } 26 | """ 27 | 28 | def test_decompose_dir(self): 29 | pass 30 | 31 | def test_decompose_file(self): 32 | pass 33 | 34 | def test_decompose_code(self): 35 | source_code = self.__get_source_code_0() 36 | res = [decomposition for decomposition in slicing.decompose_code(source_code, Lang.JAVA)] 37 | self.assertEqual(4, len(res)) 38 | -------------------------------------------------------------------------------- /program_slicing/graph/ddg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/20' 6 | 7 | from typing import Dict, Set 8 | 9 | import networkx 10 | 11 | from program_slicing.graph.statement import Statement 12 | 13 | 14 | class DataDependenceGraph(networkx.DiGraph): 15 | 16 | def __init__(self) -> None: 17 | super().__init__() 18 | self.__entry_points: Set[Statement] = set() 19 | self.__scope_dependency: Dict[Statement, Statement] = {} 20 | 21 | @property 22 | def entry_points(self) -> Set[Statement]: 23 | return self.__entry_points 24 | 25 | @property 26 | def scope_dependency(self) -> Dict[Statement, Statement]: 27 | return self.__scope_dependency 28 | 29 | def add_entry_point(self, root: Statement) -> None: 30 | self.__entry_points.add(root) 31 | 32 | def set_scope_dependency(self, scope_dependency: Dict[Statement, Statement]) -> None: 33 | self.__scope_dependency = scope_dependency 34 | -------------------------------------------------------------------------------- /program_slicing/graph/parse/__init__.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/02' 6 | 7 | import program_slicing.graph.parse.tree_sitter_ast_java # noqa: F401 8 | import program_slicing.graph.parse.cdg_java # noqa: F401 9 | import program_slicing.graph.parse.cfg_java # noqa: F401 10 | import program_slicing.graph.parse.ddg_java # noqa: F401 11 | import program_slicing.graph.parse.pdg_java # noqa: F401 12 | import program_slicing.graph.parse.tree_sitter_parsers # noqa: F401 13 | from program_slicing.graph.parse.parse import tree_sitter_ast # noqa: F401 14 | from program_slicing.graph.parse.parse import control_dependence_graph # noqa: F401 15 | from program_slicing.graph.parse.parse import control_flow_graph # noqa: F401 16 | from program_slicing.graph.parse.parse import data_dependence_graph # noqa: F401 17 | from program_slicing.graph.parse.parse import program_dependence_graph # noqa: F401 18 | from program_slicing.graph.parse.parse import Lang # noqa: F401 19 | -------------------------------------------------------------------------------- /program_slicing/graph/pdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/22' 6 | 7 | from typing import Set, Dict 8 | 9 | import networkx 10 | 11 | from program_slicing.graph.statement import Statement 12 | 13 | 14 | class ProgramDependenceGraph(networkx.DiGraph): 15 | 16 | def __init__(self) -> None: 17 | super().__init__() 18 | self.__entry_points: Set[Statement] = set() 19 | self.__scope_dependency: Dict[Statement, Statement] = {} 20 | 21 | @property 22 | def entry_points(self) -> Set[Statement]: 23 | return self.__entry_points 24 | 25 | @property 26 | def scope_dependency(self) -> Dict[Statement, Statement]: 27 | return self.__scope_dependency 28 | 29 | def add_entry_point(self, root: Statement) -> None: 30 | self.__entry_points.add(root) 31 | 32 | def set_scope_dependency(self, scope_dependency: Dict[Statement, Statement]) -> None: 33 | self.__scope_dependency = scope_dependency 34 | -------------------------------------------------------------------------------- /integration_tests/files/method_4.java: -------------------------------------------------------------------------------- 1 | private void initializeImage() { 2 | // http://bugs.eclipse.org/bugs/show_bug.cgi?id=18936 3 | if (!fImageInitialized) { 4 | initializeImages(); 5 | if (!isQuickFixableStateSet()) 6 | setQuickFixable(isProblem() && indicateQuixFixableProblems() && JavaCorrectionProcessor.hasCorrections(this)); // no light bulb for tasks 7 | if (isQuickFixable()) { 8 | if (JavaMarkerAnnotation.ERROR_ANNOTATION_TYPE.equals(getType())) 9 | fImage= fgQuickFixErrorImage; 10 | else 11 | fImage= fgQuickFixImage; 12 | } else { 13 | String type= getType(); 14 | if (JavaMarkerAnnotation.TASK_ANNOTATION_TYPE.equals(type)) 15 | fImage= fgTaskImage; 16 | else if (JavaMarkerAnnotation.INFO_ANNOTATION_TYPE.equals(type)) 17 | fImage= fgInfoImage; 18 | else if (JavaMarkerAnnotation.WARNING_ANNOTATION_TYPE.equals(type)) 19 | fImage= fgWarningImage; 20 | else if (JavaMarkerAnnotation.ERROR_ANNOTATION_TYPE.equals(type)) 21 | fImage= fgErrorImage; 22 | } 23 | fImageInitialized= true; 24 | } 25 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Anton 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/graph/parse/test_ddg_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/20' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.parse.ddg_java import parse 10 | 11 | 12 | class DDGJavaTestCase(TestCase): 13 | 14 | def test_parse(self): 15 | source_code = """ 16 | class A { 17 | public static int main() { 18 | int n = 10; 19 | for(int i = 0; i < n; i += 1) { 20 | if (i < 4) { 21 | System.out.println("lol"); 22 | continue; 23 | } 24 | if (i > 6) { 25 | System.out.println("che bu rek"); 26 | break; 27 | } 28 | else 29 | System.out.println("kek"); 30 | } 31 | return n; 32 | } 33 | } 34 | """ 35 | ddg = parse(source_code) 36 | roots = ddg.entry_points 37 | self.assertIsNotNone(roots) 38 | self.assertEqual(1, len(roots)) 39 | -------------------------------------------------------------------------------- /test/graph/parse/test_cfg_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.parse.cfg_java import parse 10 | 11 | 12 | class CFGJavaTestCase(TestCase): 13 | 14 | def test_parse(self) -> None: 15 | source_code = """ 16 | class A { 17 | public static int main() { 18 | int n = 10; 19 | for(int i = 0; i < n; i += 1) { 20 | if (i < 4) { 21 | System.out.println("lol"); 22 | continue; 23 | } 24 | if (i > 6) { 25 | System.out.println("che bu rek"); 26 | break; 27 | } 28 | else 29 | System.out.println("kek"); 30 | } 31 | return n; 32 | } 33 | } 34 | """ 35 | cfg = parse(source_code) 36 | roots = cfg.entry_points 37 | self.assertIsNotNone(roots) 38 | self.assertEqual(1, len(roots)) 39 | -------------------------------------------------------------------------------- /test/graph/parse/test_pdg_java.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/22' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.parse.pdg_java import parse 10 | 11 | 12 | class PDGJavaTestCase(TestCase): 13 | 14 | def test_parse(self) -> None: 15 | source_code = """ 16 | class A { 17 | public static int main() { 18 | int n = 10; 19 | for(int i = 0; i < n; i += 1) { 20 | if (i < 4) { 21 | System.out.println("lol"); 22 | continue; 23 | } 24 | if (i > 6) { 25 | System.out.println("che bu rek"); 26 | break; 27 | } 28 | else 29 | System.out.println("kek"); 30 | } 31 | return n; 32 | } 33 | } 34 | """ 35 | pdg = parse(source_code) 36 | roots = pdg.entry_points 37 | self.assertIsNotNone(roots) 38 | self.assertEqual(1, len(roots)) 39 | -------------------------------------------------------------------------------- /integration_tests/files/method_1.java: -------------------------------------------------------------------------------- 1 | private Object safeCreateGui(MUIElement element) { 2 | // Obtain the necessary parent widget 3 | Object parent = null; 4 | MUIElement parentME = element.getParent(); 5 | if (parentME == null) 6 | parentME = (MUIElement) ((EObject) element).eContainer(); 7 | if (parentME != null) { 8 | AbstractPartRenderer renderer = getRendererFor(parentME); 9 | if (renderer != null) { 10 | if (!element.isVisible()) { 11 | parent = getLimboShell(); 12 | } else { 13 | parent = renderer.getUIContainer(element); 14 | } 15 | } 16 | } 17 | 18 | // Obtain the necessary parent context 19 | IEclipseContext parentContext = null; 20 | if (element.getCurSharedRef() != null) { 21 | MPlaceholder ph = element.getCurSharedRef(); 22 | parentContext = getContext(ph.getParent()); 23 | } else if (parentContext == null && element.getParent() != null) { 24 | parentContext = getContext(element.getParent()); 25 | } else if (parentContext == null && element.getParent() == null) { 26 | parentContext = getContext((MUIElement) ((EObject) element) 27 | .eContainer()); 28 | } 29 | 30 | return safeCreateGui(element, parent, parentContext); 31 | } -------------------------------------------------------------------------------- /integration_tests/files/method_2.java: -------------------------------------------------------------------------------- 1 | public static String getFunctionName(AST ast) { 2 | assert isCast(ast); 3 | AST operator = AstUtil.findChildOfType(ast, CPPTokenTypes.LITERAL_OPERATOR); 4 | if( operator == null ) { 5 | // error in AST 6 | return "operator ???"; // NOI18N 7 | } 8 | StringBuilder sb = new StringBuilder(operator.getText()); 9 | sb.append(' '); 10 | begin: 11 | for( AST next = operator.getNextSibling(); next != null; next = next.getNextSibling() ) { 12 | switch( next.getType() ) { 13 | case CPPTokenTypes.CSM_TYPE_BUILTIN: 14 | case CPPTokenTypes.CSM_TYPE_COMPOUND: 15 | sb.append(' '); 16 | addTypeText(next, sb); 17 | break; 18 | case CPPTokenTypes.CSM_PTR_OPERATOR: 19 | addTypeText(next, sb); 20 | break; 21 | case CPPTokenTypes.LPAREN: 22 | break begin; 23 | case CPPTokenTypes.AMPERSAND: 24 | case CPPTokenTypes.STAR: 25 | case CPPTokenTypes.LITERAL_const: 26 | case CPPTokenTypes.LITERAL___const: 27 | case CPPTokenTypes.LITERAL___const__: 28 | sb.append(next.getText()); 29 | break; 30 | default: 31 | sb.append(' '); 32 | sb.append(next.getText()); 33 | } 34 | } 35 | return sb.toString(); 36 | } -------------------------------------------------------------------------------- /test/graph/parse/test_parse.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.parse import \ 10 | control_flow_graph, \ 11 | control_dependence_graph, \ 12 | data_dependence_graph, \ 13 | program_dependence_graph, \ 14 | Lang 15 | 16 | 17 | class ParseTestCase(TestCase): 18 | 19 | def __check_graph(self, graph) -> None: 20 | self.assertIsNotNone(graph) 21 | self.assertTrue(len(graph) > 0) 22 | 23 | def test_control_flow_graph(self) -> None: 24 | code = "class A { void foo() {} }" 25 | self.__check_graph(control_flow_graph(code, Lang.JAVA)) 26 | 27 | def test_control_dependence_graph(self) -> None: 28 | code = "class A { void foo() {} }" 29 | self.__check_graph(control_dependence_graph(code, Lang.JAVA)) 30 | 31 | def test_data_dependence_graph(self) -> None: 32 | code = "class A { void foo() {} }" 33 | self.__check_graph(data_dependence_graph(code, Lang.JAVA)) 34 | 35 | def test_program_dependence_graph(self) -> None: 36 | code = "class A { void foo() {} }" 37 | self.__check_graph(program_dependence_graph(code, Lang.JAVA)) 38 | -------------------------------------------------------------------------------- /scripts/performance_benchmarks/README.md: -------------------------------------------------------------------------------- 1 | ## Performance test for Block Slicing 2 | 3 | ### Usage 4 | 5 | 1. ```$ pip3 install ./program_slicing``` 6 | 2. ```$ cd scripts/performance_benchmarks``` 7 | 3. ```$ pip3 install -r requirements.txt``` 8 | 4. ```$ cd block_slices``` 9 | 5. Script parameters: 10 | 11 | `--dir` folder with java files 12 | 13 | `-i` number of iterations to run all files, default is 1 14 | 15 | Run ```$ python3 check_performance.py --dir dataset -i 2``` 16 | where dataset is teh folder with java files and number of iterations is 2. 17 | 18 | 19 | You should have access to global network to use pip. 20 | Python 3.8 with corresponding C compiler is required. 21 | Run Python Console to check the version of C compiler. 22 | 23 | ___ 24 | #### Last reported result 25 | 26 | PC info: 27 | 28 | `RAM 64GiB` 29 | 30 | `CPU Intel(R) Xeon(R) Gold 6266C CPU @ 3.00GHz`: 31 | 32 | The algorithm was run for 15 java files with 100 ncss. The procedure was run 1 time(s) for more accurate calculations. 33 | 34 | Total time of running 15 java methods is 32.0 secs. 35 | 36 | Average time for 1 method: 1.667 secs. 37 | 38 | Min time of 1 method: 0.000 secs, 39 | 40 | max time of 1 method: 11.000 secs, 41 | 42 | median: 0.000 secs, 43 | 44 | quantile 75%: 1.500 secs, 45 | 46 | quantile 95%: 7.500 secs -------------------------------------------------------------------------------- /program_slicing/graph/cfg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | from typing import Set, Dict, List 8 | 9 | import networkx 10 | 11 | from program_slicing.graph.basic_block import BasicBlock 12 | from program_slicing.graph.statement import Statement 13 | 14 | 15 | class ControlFlowGraph(networkx.DiGraph): 16 | 17 | def __init__(self) -> None: 18 | super().__init__() 19 | self.__entry_points: Set[BasicBlock] = set() 20 | self.__forward_dominance: Dict[BasicBlock, List[BasicBlock]] = {} 21 | self.__scope_dependency: Dict[Statement, Statement] = {} 22 | 23 | @property 24 | def entry_points(self) -> Set[BasicBlock]: 25 | return self.__entry_points 26 | 27 | @property 28 | def forward_dominance(self) -> Dict[BasicBlock, List[BasicBlock]]: 29 | return self.__forward_dominance 30 | 31 | @property 32 | def scope_dependency(self) -> Dict[Statement, Statement]: 33 | return self.__scope_dependency 34 | 35 | def add_entry_point(self, root: BasicBlock) -> None: 36 | self.__entry_points.add(root) 37 | 38 | def set_scope_dependency(self, scope_dependency: Dict[Statement, Statement]) -> None: 39 | self.__scope_dependency = scope_dependency 40 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI workflow 2 | 3 | on: [push] 4 | 5 | jobs: 6 | do_static_analysis: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: [3.7] 11 | 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python ${{ matrix.python-version }} 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install flake8 pytest 22 | pip install tqdm 23 | git submodule update --recursive --init 24 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 25 | pip3 install ./ 26 | - name: Lint with flake8 27 | run: | 28 | # stop the build if there are Python syntax errors or undefined names 29 | flake8 . --count --exit-zero --select=E9,F63,F7,F82 --show-source --statistics 30 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 31 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 32 | - name: Test with pytest 33 | run: | 34 | python3 -m unittest discover -s test 35 | - name: Integration tests 36 | run: | 37 | python3 integration_tests/integration_block_slicing.py 38 | -------------------------------------------------------------------------------- /integration_tests/files/method_5.java: -------------------------------------------------------------------------------- 1 | private void loadManifest() throws IOException { 2 | Util.err.fine("loading manifest of " + jar); 3 | File jarBeingOpened = null; // for annotation purposes 4 | try { 5 | if (reloadable) { 6 | // Never try to cache reloadable JARs. 7 | jarBeingOpened = physicalJar; // might be null 8 | ensurePhysicalJar(); 9 | jarBeingOpened = physicalJar; // might have changed 10 | JarFile jarFile = new JarFile(physicalJar, false); 11 | try { 12 | Manifest m = jarFile.getManifest(); 13 | if (m == null) throw new IOException("No manifest found in " + physicalJar); // NOI18N 14 | manifest = m; 15 | } finally { 16 | jarFile.close(); 17 | } 18 | } else { 19 | jarBeingOpened = jar; 20 | manifest = getManager().loadManifest(jar); 21 | } 22 | } catch (IOException e) { 23 | if (jarBeingOpened != null) { 24 | Exceptions.attachMessage(e, 25 | "While loading manifest from: " + 26 | jarBeingOpened); // NOI18N 27 | } 28 | throw e; 29 | } 30 | } -------------------------------------------------------------------------------- /program_slicing/graph/basic_block.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/23' 6 | 7 | from typing import List, Iterator, Optional 8 | 9 | from program_slicing.graph.statement import Statement 10 | 11 | 12 | class BasicBlock: 13 | 14 | def __init__(self, statements: List[Statement] = None) -> None: 15 | self.__statements: List[Statement] = [] if statements is None else statements 16 | 17 | def __iter__(self) -> Iterator[Statement]: 18 | return self.__statements.__iter__() 19 | 20 | def __repr__(self) -> str: 21 | return "BasicBlock{}".format(self) 22 | 23 | def __str__(self) -> str: 24 | return str(self.__statements) 25 | 26 | @property 27 | def statements(self) -> List[Statement]: 28 | return self.__statements 29 | 30 | @property 31 | def root(self) -> Optional[Statement]: 32 | return self.__statements[0] if len(self.__statements) > 0 else None 33 | 34 | def append(self, statement: Statement) -> None: 35 | self.__statements.append(statement) 36 | 37 | def is_empty(self) -> bool: 38 | return self.root is None 39 | 40 | def split(self, index: int) -> 'BasicBlock': 41 | new_block = BasicBlock(statements=self.__statements[index:]) 42 | self.__statements = self.__statements[:index] 43 | return new_block 44 | -------------------------------------------------------------------------------- /integration_tests/files/method_24.java: -------------------------------------------------------------------------------- 1 | public boolean reportRecordedErrors(Scope scope, int mergedStatus) { 2 | FakedTrackingVariable current = this; 3 | while (current.globalClosingState == 0) { 4 | current = current.innerTracker; 5 | if (current == null) { 6 | // no relevant state found -> report: 7 | reportError(scope.problemReporter(), null, mergedStatus); 8 | return true; 9 | } 10 | } 11 | boolean hasReported = false; 12 | if (this.recordedLocations != null) { 13 | Iterator locations = this.recordedLocations.entrySet().iterator(); 14 | int reportFlags = 0; 15 | while (locations.hasNext()) { 16 | Map.Entry entry = (Entry) locations.next(); 17 | reportFlags |= reportError(scope.problemReporter(), (ASTNode)entry.getKey(), ((Integer)entry.getValue()).intValue()); 18 | hasReported = true; 19 | } 20 | if (reportFlags != 0) { 21 | // after all locations have been reported, mark as reported to prevent duplicate report via an outer wrapper 22 | current = this; 23 | do { 24 | current.globalClosingState |= reportFlags; 25 | } while ((current = current.innerTracker) != null); 26 | } 27 | } 28 | return hasReported; 29 | } -------------------------------------------------------------------------------- /integration_tests/files/method_9.java: -------------------------------------------------------------------------------- 1 | private void autoDetectRoot(IPath path) throws CoreException { 2 | if (!fRootDetected) { 3 | ZipFile zip = null; 4 | try { 5 | zip = getArchive(); 6 | } catch (IOException e) { 7 | throw new CoreException(new Status(IStatus.ERROR, LaunchingPlugin.getUniqueIdentifier(), IJavaLaunchConfigurationConstants.ERR_INTERNAL_ERROR, 8 | NLS.bind(LaunchingMessages.ArchiveSourceLocation_Exception_occurred_while_detecting_root_source_directory_in_archive__0__1, new String[] {getName()}), e)); 9 | } 10 | synchronized (zip) { 11 | Enumeration entries = zip.entries(); 12 | String fileName = path.toString(); 13 | try { 14 | while (entries.hasMoreElements()) { 15 | ZipEntry entry = entries.nextElement(); 16 | String entryName = entry.getName(); 17 | if (entryName.endsWith(fileName)) { 18 | int rootLength = entryName.length() - fileName.length(); 19 | if (rootLength > 0) { 20 | String root = entryName.substring(0, rootLength); 21 | setRootPath(root); 22 | } 23 | fRootDetected = true; 24 | return; 25 | } 26 | } 27 | } catch (IllegalStateException e) { 28 | throw new CoreException(new Status(IStatus.ERROR, LaunchingPlugin.getUniqueIdentifier(), IJavaLaunchConfigurationConstants.ERR_INTERNAL_ERROR, 29 | NLS.bind(LaunchingMessages.ArchiveSourceLocation_Exception_occurred_while_detecting_root_source_directory_in_archive__0__2, new String[] {getName()}), e)); 30 | } 31 | } 32 | } 33 | } -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | import program_slicing 3 | import os 4 | from setuptools.command.build_py import build_py 5 | from shutil import copytree 6 | 7 | HERE = os.path.abspath(os.path.dirname(__file__)) 8 | NAME = "vendor" 9 | 10 | 11 | class BuildCommand(build_py): 12 | 13 | def run(self) -> None: 14 | build_py.run(self) 15 | if not self.dry_run: 16 | target_dir = os.path.join(self.build_lib, NAME) 17 | copytree(os.path.join(HERE, NAME), target_dir) 18 | 19 | 20 | setup( 21 | name='program_slicing', 22 | version=program_slicing.__version__, 23 | description=program_slicing.__doc__.strip(), 24 | long_description='Set of methods for source code decomposition.', 25 | url='https://github.com/acheshkov/program_slicing', 26 | download_url='https://github.com/acheshkov/program_slicing', 27 | author=program_slicing.__author__, 28 | author_email=['yakimetsku@gmail.com'], 29 | license=program_slicing.__licence__, 30 | packages=find_packages(), 31 | extras_require={}, 32 | install_requires=open('requirements.txt', 'r').readlines(), 33 | tests_require=open('requirements.txt', 'r').readlines(), 34 | classifiers=[ 35 | 'Programming Language :: Python', 36 | 'Environment :: Console', 37 | 'Intended Audience :: Developers', 38 | 'License :: OSI Approved :: MIT License', 39 | 'Topic :: Software Development', 40 | 'Topic :: Utilities' 41 | ], 42 | cmdclass={"build_py": BuildCommand}, 43 | ) 44 | -------------------------------------------------------------------------------- /integration_tests/files/method_3.java: -------------------------------------------------------------------------------- 1 | protected void initializeApplicationSection(ILaunchConfiguration config) throws CoreException { 2 | 3 | String attribute = getApplicationAttribute(); 4 | 5 | // first see if the application name has been set on the launch config 6 | String application = config.getAttribute(attribute, (String) null); 7 | if (application == null || fApplicationCombo.indexOf(application) == -1) { 8 | application = null; 9 | 10 | // check if the user has entered the -application arg in the program arg field 11 | StringTokenizer tokenizer = new StringTokenizer(config.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, "")); //$NON-NLS-1$ 12 | while (tokenizer.hasMoreTokens()) { 13 | String token = tokenizer.nextToken(); 14 | if (token.equals("-application") && tokenizer.hasMoreTokens()) { //$NON-NLS-1$ 15 | application = tokenizer.nextToken(); 16 | break; 17 | } 18 | } 19 | 20 | int index = -1; 21 | if (application != null) 22 | index = fApplicationCombo.indexOf(application); 23 | 24 | // use default application as specified in the install.ini of the target platform 25 | if (index == -1) 26 | index = fApplicationCombo.indexOf(TargetPlatform.getDefaultApplication()); 27 | 28 | if (index != -1) { 29 | fApplicationCombo.setText(fApplicationCombo.getItem(index)); 30 | } else if (fApplicationCombo.getItemCount() > 0) { 31 | fApplicationCombo.setText(fApplicationCombo.getItem(0)); 32 | } 33 | } else { 34 | fApplicationCombo.setText(application); 35 | } 36 | } -------------------------------------------------------------------------------- /integration_tests/files/method_20.java: -------------------------------------------------------------------------------- 1 | protected void internalInitialize() throws StorageException { 2 | if (cipherAlgorithm != null && keyFactoryAlgorithm != null) { 3 | if (roundtrip(cipherAlgorithm, keyFactoryAlgorithm)) 4 | return; 5 | // this is a bad situation - JVM cipher no longer available. Both log and throw an exception 6 | String msg = NLS.bind(SecAuthMessages.noAlgorithm, cipherAlgorithm); 7 | StorageException e = new StorageException(StorageException.INTERNAL_ERROR, msg); 8 | AuthPlugin.getDefault().logError(msg, e); 9 | throw e; 10 | } 11 | if (cipherAlgorithm == null || keyFactoryAlgorithm == null) { 12 | IEclipsePreferences eclipseNode = new ConfigurationScope().getNode(AuthPlugin.PI_AUTH); 13 | cipherAlgorithm = eclipseNode.get(IStorageConstants.CIPHER_KEY, IStorageConstants.DEFAULT_CIPHER); 14 | keyFactoryAlgorithm = eclipseNode.get(IStorageConstants.KEY_FACTORY_KEY, IStorageConstants.DEFAULT_KEY_FACTORY); 15 | } 16 | if (roundtrip(cipherAlgorithm, keyFactoryAlgorithm)) 17 | return; 18 | String unavailableCipher = cipherAlgorithm; 19 | 20 | detect(); 21 | if (availableCiphers.size() == 0) 22 | throw new StorageException(StorageException.INTERNAL_ERROR, SecAuthMessages.noAlgorithms); 23 | 24 | // use first available 25 | cipherAlgorithm = (String) availableCiphers.keySet().iterator().next(); 26 | keyFactoryAlgorithm = (String) availableCiphers.get(cipherAlgorithm); 27 | 28 | String msg = NLS.bind(SecAuthMessages.usingAlgorithm, unavailableCipher, cipherAlgorithm); 29 | AuthPlugin.getDefault().logMessage(msg); 30 | } -------------------------------------------------------------------------------- /scripts/performance_benchmarks/block_slices/dataset_BC.csv: -------------------------------------------------------------------------------- 1 | File,Line,cognitive,cyclo,filename,method_name,ncss 2 | STSWizardCreator_generateProviderImplClass_100.0_6.0_16.0_242.java,242,16,6,STSWizardCreator,generateProviderImplClass,100 3 | NbWelcomePanel_initComponents_100.0_25.0_43.0_256.java,256,43,25,NbWelcomePanel,initComponents,100 4 | InstanceController_doSrvIPXT_100.0_26.0_38.0_165.java,165,38,26,InstanceController,doSrvIPXT,100 5 | QMUIQQFaceView_onDrawText_100.0_27.0_118.0_124.java,124,118,27,QMUIQQFaceView,onDrawText,100 6 | ASTUtils_getRange_100.0_27.0_56.0_144.java,144,56,27,ASTUtils,getRange,100 7 | AddressTable_createSwitchTable_100.0_42.0_73.0_193.java,193,73,42,AddressTable,createSwitchTable,100 8 | SubCommonRdbmsWriter_fillPreparedStatementColumnType_100.0_43.0_34.0_138.java,138,34,43,SubCommonRdbmsWriter,fillPreparedStatementColumnType,100 9 | FilesystemHandler_svnMoveImplementation_100.0_45.0_90.0_162.java,162,90,45,FilesystemHandler,svnMoveImplementation,100 10 | MakeNBM_createInfoXml_100.0_46.0_48.0_128.java,128,48,46,MakeNBM,createInfoXml,100 11 | Parser_variable_100.0_56.0_97.0_135.java,135,97,56,Parser,variable,100 12 | Controller_createDirectories_100.0_71.0_136.0_168.java,168,136,71,Controller,createDirectories,100 13 | SanitizingParser_sanitizeSource_100.0_32.0_126.0_131.java,131,126,32,SanitizingParser,sanitizeSource,100 14 | BaseTransport_internalProcessMessage_100.0_32.0_86.0_170.java,170,86,32,BaseTransport,internalProcessMessage,100 15 | MetaDataBuilder_emit_100.0_41.0_66.0_127.java,127,66,41,MetaDataBuilder,emit,100 16 | SQLExecutionHelper_initialDataLoad_100.0_7.0_58.0_188.java,188,58,7,SQLExecutionHelper,initialDataLoad,100 -------------------------------------------------------------------------------- /integration_tests/files/method_6.java: -------------------------------------------------------------------------------- 1 | void removeElementFromFilters(int index) { 2 | if (filteredIndexes != null) { 3 | int location = Arrays.binarySearch(filteredIndexes, index); 4 | if (location >= 0) { 5 | // remove a filtered item 6 | if (filteredIndexes.length == 1) { 7 | // only filtered item 8 | filteredIndexes = null; 9 | filteredElements = null; 10 | } else { 11 | int[] next = new int[filteredIndexes.length - 1]; 12 | Object[] filt = new Object[next.length]; 13 | if (location == 0) { 14 | // first 15 | System.arraycopy(filteredIndexes, 1, next, 0, next.length); 16 | System.arraycopy(filteredElements, 1, filt, 0, filt.length); 17 | } else if (location == (filteredIndexes.length - 1)) { 18 | // last 19 | System.arraycopy(filteredIndexes, 0, next, 0, next.length); 20 | System.arraycopy(filteredElements, 0, filt, 0, filt.length); 21 | } else { 22 | // middle 23 | System.arraycopy(filteredIndexes, 0, next, 0, location); 24 | System.arraycopy(filteredElements, 0, filt, 0, location); 25 | System.arraycopy(filteredIndexes, location + 1, next, location, next.length - location); 26 | System.arraycopy(filteredElements, location + 1, filt, location, filt.length - location); 27 | } 28 | filteredIndexes = next; 29 | filteredElements = filt; 30 | } 31 | } else { 32 | location = 0 - (location + 1); 33 | } 34 | if (filteredIndexes != null) { 35 | // decrement remaining indexes 36 | for (int i = location; i < filteredIndexes.length; i ++) { 37 | filteredIndexes[i]--; 38 | } 39 | } 40 | } 41 | } -------------------------------------------------------------------------------- /integration_tests/files/method_7.java: -------------------------------------------------------------------------------- 1 | private boolean isClassOf(Element e, Class providerClass) { 2 | switch (e.getKind()) { 3 | case CLASS: { 4 | TypeElement te = (TypeElement) e; 5 | TypeMirror superType = te.getSuperclass(); 6 | if (superType.getKind().equals(TypeKind.NONE)) { 7 | return false; 8 | } else { 9 | e = ((DeclaredType) superType).asElement(); 10 | String clazz = processingEnv.getElementUtils().getBinaryName((TypeElement) e).toString(); 11 | if (clazz.equals(providerClass.getName())) { 12 | return true; 13 | } else { 14 | return isClassOf(e, providerClass); 15 | } 16 | } 17 | } 18 | case METHOD: { 19 | TypeMirror retType = ((ExecutableElement) e).getReturnType(); 20 | if (retType.getKind().equals(TypeKind.NONE)) { 21 | return false; 22 | } else { 23 | e = ((DeclaredType) retType).asElement(); 24 | String clazz = processingEnv.getElementUtils().getBinaryName((TypeElement) e).toString(); 25 | if (clazz.equals(providerClass.getName())) { 26 | return true; 27 | } else { 28 | return isClassOf(e, providerClass); 29 | } 30 | } 31 | } 32 | default: 33 | throw new IllegalArgumentException("Annotated element is not loadable as an instance: " + e); 34 | } 35 | } -------------------------------------------------------------------------------- /integration_tests/files/method_14.java: -------------------------------------------------------------------------------- 1 | public void computeConstant() { 2 | //a special constant is use for the potential Integer.MAX_VALUE+1 3 | //which is legal if used with a - as prefix....cool.... 4 | //notice that Integer.MIN_VALUE == -2147483648 5 | 6 | long MAX = Integer.MAX_VALUE; 7 | if (this == One) { constant = IntConstant.fromValue(1); return ;} 8 | 9 | int length = source.length; 10 | long computedValue = 0L; 11 | if (source[0] == '0') 12 | { MAX = 0xFFFFFFFFL ; //a long in order to be positive ! 13 | if (length == 1) { constant = IntConstant.fromValue(0); return ;} 14 | final int shift,radix; 15 | int j ; 16 | if ( (source[1] == 'x') || (source[1] == 'X') ) 17 | { shift = 4 ; j = 2; radix = 16;} 18 | else 19 | { shift = 3 ; j = 1; radix = 8;} 20 | while (source[j]=='0') 21 | { j++; //jump over redondant zero 22 | if (j == length) 23 | { //watch for 000000000000000000 24 | constant = IntConstant.fromValue(value = (int)computedValue); 25 | return ;}} 26 | 27 | while (j MAX) return /*constant stays null*/ ;}} 33 | else 34 | { //-----------regular case : radix = 10----------- 35 | for (int i = 0 ; i < length;i++) 36 | { int digitValue ; 37 | if ((digitValue = ScannerHelper.digit(source[i],10)) < 0 ) 38 | { constant = FORMAT_ERROR; return ;} 39 | computedValue = 10*computedValue + digitValue; 40 | if (computedValue > MAX) return /*constant stays null*/ ; }} 41 | 42 | constant = IntConstant.fromValue(value = (int)computedValue); 43 | 44 | } -------------------------------------------------------------------------------- /test/graph/test_point.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/06/25' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.point import Point 10 | 11 | 12 | class PointCase(TestCase): 13 | 14 | def test_repr(self) -> None: 15 | a = Point(10, 20) 16 | self.assertEqual("(10, 20)", str(a)) 17 | self.assertEqual("Point(10, 20)", repr(a)) 18 | 19 | def test_operators(self) -> None: 20 | a = Point(10, 20) 21 | b = Point(20, 10) 22 | c = Point(10, 30) 23 | d = Point(10, 10) 24 | e = Point(10, 20) 25 | 26 | self.assertTrue(a == e) 27 | self.assertFalse(a == b) 28 | self.assertFalse(a == c) 29 | self.assertFalse(a == d) 30 | self.assertFalse(b == d) 31 | 32 | self.assertFalse(a != e) 33 | self.assertTrue(a != b) 34 | self.assertTrue(a != c) 35 | self.assertTrue(a != d) 36 | self.assertTrue(b != d) 37 | 38 | self.assertFalse(a < e) 39 | self.assertTrue(a < b) 40 | self.assertTrue(a < c) 41 | self.assertFalse(a < d) 42 | self.assertFalse(b < d) 43 | 44 | self.assertTrue(a <= e) 45 | self.assertTrue(a <= b) 46 | self.assertTrue(a <= c) 47 | self.assertFalse(a <= d) 48 | self.assertFalse(b <= d) 49 | 50 | self.assertFalse(a > e) 51 | self.assertFalse(a > b) 52 | self.assertFalse(a > c) 53 | self.assertTrue(a > d) 54 | self.assertTrue(b > d) 55 | 56 | self.assertTrue(a >= e) 57 | self.assertFalse(a >= b) 58 | self.assertFalse(a >= c) 59 | self.assertTrue(a >= d) 60 | self.assertTrue(b >= d) 61 | -------------------------------------------------------------------------------- /integration_tests/files/method_0.java: -------------------------------------------------------------------------------- 1 | public void freeEntry(BasicPoolEntry entry, boolean reusable, long validDuration, TimeUnit timeUnit) { 2 | 3 | HttpRoute route = entry.getPlannedRoute(); 4 | if (log.isDebugEnabled()) { 5 | log.debug("Releasing connection" + 6 | " [" + route + "][" + entry.getState() + "]"); 7 | } 8 | 9 | poolLock.lock(); 10 | try { 11 | if (shutdown) { 12 | // the pool is shut down, release the 13 | // connection's resources and get out of here 14 | closeConnection(entry); 15 | return; 16 | } 17 | 18 | // no longer issued, we keep a hard reference now 19 | leasedConnections.remove(entry); 20 | 21 | RouteSpecificPool rospl = getRoutePool(route, true); 22 | 23 | if (reusable) { 24 | if (log.isDebugEnabled()) { 25 | String s; 26 | if (validDuration > 0) { 27 | s = "for " + validDuration + " " + timeUnit; 28 | } else { 29 | s = "indefinitely"; 30 | } 31 | log.debug("Pooling connection" + 32 | " [" + route + "][" + entry.getState() + "]; keep alive " + s); 33 | } 34 | rospl.freeEntry(entry); 35 | entry.updateExpiry(validDuration, timeUnit); 36 | freeConnections.add(entry); 37 | } else { 38 | rospl.dropEntry(); 39 | numConnections--; 40 | } 41 | 42 | notifyWaitingThread(rospl); 43 | 44 | } finally { 45 | poolLock.unlock(); 46 | } 47 | } -------------------------------------------------------------------------------- /integration_tests/files/method_22.java: -------------------------------------------------------------------------------- 1 | protected static void loadPackageDirectory(File directory, boolean verbose) 2 | throws Exception { 3 | File[] contents = directory.listFiles(); 4 | 5 | // make sure that jar files and lib directory get processed first 6 | for (int i = 0; i < contents.length; i++) { 7 | if (contents[i].isFile() && contents[i].getPath().endsWith(".jar")) { 8 | if (verbose) { 9 | System.out.println("[Weka] loading " + contents[i].getPath()); 10 | } 11 | ClassloaderUtil.addFile(contents[i].getPath()); 12 | } else if (contents[i].isDirectory() 13 | && contents[i].getName().equalsIgnoreCase("lib")) { 14 | loadPackageDirectory(contents[i], verbose); 15 | } 16 | } 17 | 18 | // now any auxilliary files 19 | for (int i = 0; i < contents.length; i++) { 20 | if (contents[i].isFile() && contents[i].getPath().endsWith("Beans.props")) { 21 | KnowledgeFlowApp.addToPluginBeanProps(contents[i]); 22 | KnowledgeFlowApp.disposeSingleton(); 23 | 24 | } else if (contents[i].isFile() 25 | && contents[i].getPath().endsWith("Explorer.props")) { 26 | processExplorerProps(contents[i]); 27 | } else if (contents[i].isFile() 28 | && contents[i].getPath().endsWith("GUIEditors.props")) { 29 | processGUIEditorsProps(contents[i]); 30 | } else if (contents[i].isFile() 31 | && contents[i].getPath().endsWith("GenericPropertiesCreator.props")) { 32 | processGenericPropertiesCreatorProps(contents[i]); 33 | } else if (contents[i].isFile() 34 | && contents[i].getPath().endsWith("PluginManager.props")) { 35 | processPluginManagerProps(contents[i]); 36 | } 37 | } 38 | } -------------------------------------------------------------------------------- /integration_tests/integration_block_slicing.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'lyriccoder' 3 | __credits__ = ['lyriccoder, kuyaki'] 4 | __maintainer__ = 'lyriccoder' 5 | __date__ = '2021/11/08' 6 | 7 | from pathlib import Path 8 | 9 | import tqdm 10 | 11 | from integration_tests.integration_slicing import run_check 12 | from program_slicing.file_manager.reader import read_file, read_json 13 | from program_slicing.decomposition.block.slicing import get_block_slices 14 | from program_slicing.decomposition.slice_predicate import SlicePredicate 15 | from program_slicing.graph.parse import Lang 16 | 17 | 18 | def main(): 19 | cur_dir = Path(__file__).parent 20 | expected_emos = read_json(Path(cur_dir, 'files', 'expected_EMOs.json')) 21 | observable_emos = {} 22 | for java_file in tqdm.tqdm(Path(cur_dir, 'files').glob('*.java')): 23 | code = read_file(java_file) 24 | found_opportunities = { 25 | (program_slice.ranges[0][0].line_number + 1, program_slice.ranges[-1][1].line_number + 1) 26 | for program_slice in get_block_slices( 27 | code, 28 | Lang.JAVA, 29 | slice_predicate=SlicePredicate( 30 | min_amount_of_lines=6, 31 | max_amount_of_effective_lines=51, 32 | min_amount_of_statements=4, 33 | max_percentage_of_lines=0.8, 34 | max_amount_of_exit_statements=1, 35 | cause_code_duplication=False, 36 | lang_to_check_parsing=Lang.JAVA, 37 | lines_are_full=True 38 | ) 39 | ) 40 | } 41 | observable_emos[java_file.name] = tuple(found_opportunities) 42 | run_check(expected_emos, observable_emos) 43 | 44 | 45 | if __name__ == '__main__': 46 | main() 47 | -------------------------------------------------------------------------------- /test/graph/test_statement.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.statement import Statement, StatementType 10 | from program_slicing.graph.point import Point 11 | 12 | 13 | class StatementTestCase(TestCase): 14 | 15 | def test_repr_full(self) -> None: 16 | statement = Statement( 17 | StatementType.ASSIGNMENT, 18 | Point(10, 20), 19 | Point(10, 31), 20 | {"args"}, 21 | "my_variable", 22 | "assignment_expression") 23 | self.assertEqual( 24 | "ASSIGNMENT(assignment_expression) 'my_variable' affected by variables {'args'} " 25 | "position in code: (10, 20) - (10, 31)", str(statement)) 26 | self.assertEqual( 27 | "Statement(" 28 | "statement_type=StatementType.ASSIGNMENT, " 29 | "ast_node_type=assignment_expression, " 30 | "name='my_variable', " 31 | "affected_by={'args'}, " 32 | "start_point=(10, 20), " 33 | "end_point=(10, 31))", repr(statement)) 34 | 35 | def test_repr_short(self) -> None: 36 | statement = Statement( 37 | StatementType.GOTO, 38 | Point(10, 20), 39 | Point(10, 24), 40 | ast_node_type="else") 41 | self.assertEqual( 42 | "GOTO(else) " 43 | "position in code: (10, 20) - (10, 24)", str(statement)) 44 | self.assertEqual( 45 | "Statement(" 46 | "statement_type=StatementType.GOTO, " 47 | "ast_node_type=else, " 48 | "name=None, " 49 | "affected_by=set(), " 50 | "start_point=(10, 20), " 51 | "end_point=(10, 24))", repr(statement)) 52 | -------------------------------------------------------------------------------- /integration_tests/integration_variable_slicing.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'lyriccoder' 3 | __credits__ = ['lyriccoder, kuyaki'] 4 | __maintainer__ = 'lyriccoder' 5 | __date__ = '2021/11/08' 6 | 7 | from pathlib import Path 8 | 9 | import tqdm 10 | 11 | from integration_tests.integration_slicing import run_check 12 | from program_slicing.file_manager.reader import read_file, read_json 13 | from program_slicing.decomposition.variable.slicing import get_variable_slices 14 | from program_slicing.decomposition.slice_predicate import SlicePredicate 15 | from program_slicing.graph.parse import Lang 16 | 17 | 18 | def main(): 19 | cur_dir = Path(__file__).parent 20 | expected_emos = read_json(Path(cur_dir, 'files', 'expected_variable_slices.json')) 21 | observable_emos = {} 22 | for java_file in tqdm.tqdm(Path(cur_dir, 'files').glob('*.java')): 23 | code = read_file(java_file) 24 | found_opportunities = { 25 | tuple(r[0].line_number + 1 for r in program_slice.ranges) 26 | for program_slice in get_variable_slices( 27 | code, 28 | Lang.JAVA, 29 | slice_predicate=SlicePredicate( 30 | min_amount_of_lines=4, 31 | min_amount_of_statements=2, 32 | max_amount_of_effective_lines=51, 33 | max_percentage_of_lines=0.8, 34 | max_amount_of_exit_statements=1, 35 | cause_code_duplication=False, 36 | lang_to_check_parsing=Lang.JAVA, 37 | lines_are_full=True, 38 | has_returnable_variable=True, 39 | ) 40 | ) 41 | } 42 | observable_emos[java_file.name] = tuple(found_opportunities) 43 | run_check(expected_emos, observable_emos) 44 | 45 | 46 | if __name__ == '__main__': 47 | main() 48 | -------------------------------------------------------------------------------- /integration_tests/files/method_23.java: -------------------------------------------------------------------------------- 1 | public Operand receiveArgs(final ArgsNode argsNode, IR_Scope s) { 2 | final int required = argsNode.getRequiredArgsCount(); 3 | final int opt = argsNode.getOptionalArgsCount(); 4 | final int rest = argsNode.getRestArg(); 5 | 6 | s.addInstr(new ReceiveArgumentInstruction(s.getSelf(), 0)); 7 | 8 | // Other args begin at index 1 9 | int argIndex = 1; 10 | 11 | // Both for fixed arity and variable arity methods 12 | ListNode preArgs = argsNode.getPre(); 13 | for (int i = 0; i < required; i++, argIndex++) { 14 | ArgumentNode a = (ArgumentNode)preArgs.get(i); 15 | if (a instanceof TypedArgumentNode) { 16 | TypedArgumentNode t = (TypedArgumentNode)a; 17 | s.addInstr(new DECLARE_LOCAL_TYPE_Instr(argIndex, buildType(t.getTypeNode()))); 18 | } 19 | s.addInstr(new ReceiveArgumentInstruction(new LocalVariable(a.getName()), argIndex)); 20 | } 21 | 22 | if (argsNode.getBlock() != null) 23 | s.addInstr(new RECV_CLOSURE_Instr(new LocalVariable(argsNode.getBlock().getName()))); 24 | 25 | // Now for the rest 26 | if (opt > 0 || rest > -1) { 27 | ListNode optArgs = argsNode.getOptArgs(); 28 | for (int j = 0; j < opt; j++, argIndex++) { 29 | // Jump to 'l' if this arg is not null. If null, fall through and build the default value! 30 | Label l = s.getNewLabel(); 31 | LocalAsgnNode n = (LocalAsgnNode)optArgs.get(j); 32 | s.addInstr(new RECV_OPT_ARG_Instr(new LocalVariable(n.getName()), argIndex, l)); 33 | build(n, s); 34 | s.addInstr(new LABEL_Instr(l)); 35 | } 36 | 37 | if (rest > -1) { 38 | s.addInstr(new ReceiveArgumentInstruction(new LocalVariable(argsNode.getRestArgNode().getName()), argIndex, true)); 39 | argIndex++; 40 | } 41 | } 42 | 43 | return null; 44 | } -------------------------------------------------------------------------------- /integration_tests/files/method_15.java: -------------------------------------------------------------------------------- 1 | public static PdfPageLabelFormat[] getPageLabelFormats(PdfReader reader) { 2 | PdfDictionary dict = reader.getCatalog(); 3 | PdfDictionary labels = (PdfDictionary)PdfReader.getPdfObjectRelease(dict.get(PdfName.PAGELABELS)); 4 | if (labels == null) 5 | return null; 6 | HashMap numberTree = PdfNumberTree.readTree(labels); 7 | Integer numbers[] = new Integer[numberTree.size()]; 8 | numbers = numberTree.keySet().toArray(numbers); 9 | Arrays.sort(numbers); 10 | PdfPageLabelFormat[] formats = new PdfPageLabelFormat[numberTree.size()]; 11 | String prefix; 12 | int numberStyle; 13 | int pagecount; 14 | for (int k = 0; k < numbers.length; ++k) { 15 | Integer key = numbers[k]; 16 | PdfDictionary d = (PdfDictionary)PdfReader.getPdfObjectRelease(numberTree.get(key)); 17 | if (d.contains(PdfName.ST)) { 18 | pagecount = ((PdfNumber)d.get(PdfName.ST)).intValue(); 19 | } else { 20 | pagecount = 1; 21 | } 22 | if (d.contains(PdfName.P)) { 23 | prefix = ((PdfString)d.get(PdfName.P)).toUnicodeString(); 24 | } else { 25 | prefix = ""; 26 | } 27 | if (d.contains(PdfName.S)) { 28 | char type = ((PdfName)d.get(PdfName.S)).toString().charAt(1); 29 | switch(type) { 30 | case 'R': numberStyle = UPPERCASE_ROMAN_NUMERALS; break; 31 | case 'r': numberStyle = LOWERCASE_ROMAN_NUMERALS; break; 32 | case 'A': numberStyle = UPPERCASE_LETTERS; break; 33 | case 'a': numberStyle = LOWERCASE_LETTERS; break; 34 | default: numberStyle = DECIMAL_ARABIC_NUMERALS; break; 35 | } 36 | } else { 37 | numberStyle = EMPTY; 38 | } 39 | formats[k] = new PdfPageLabelFormat(key.intValue()+1, numberStyle, prefix, pagecount); 40 | } 41 | return formats; 42 | } -------------------------------------------------------------------------------- /program_slicing/graph/convert/ddg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/20' 6 | 7 | from program_slicing.graph.cdg import ControlDependenceGraph 8 | from program_slicing.graph.cfg import ControlFlowGraph 9 | from program_slicing.graph.ddg import DataDependenceGraph 10 | from program_slicing.graph.pdg import ProgramDependenceGraph 11 | 12 | 13 | def to_cdg(ddg: DataDependenceGraph) -> ControlDependenceGraph: 14 | """ 15 | Convert the Data Dependence Graph into a Control Dependence Graph. 16 | New graph will contain same nodes as in the original one so that 17 | any changes made after converting in the original graph's statements will affect the converted one. 18 | :param ddg: Data Dependence Graph that should to be converted. 19 | :return: Control Dependence Graph which nodes where presented in the Data Dependence Graph on which it was based on. 20 | """ 21 | raise NotImplementedError() 22 | 23 | 24 | def to_cfg(ddg: DataDependenceGraph) -> ControlFlowGraph: 25 | """ 26 | Convert the Data Dependence Graph into a Control Flow Graph. 27 | New graph will contain links on nodes of the original one so that 28 | any changes made after converting in the original graph's statements will affect the converted one. 29 | :param ddg: Data Dependence Graph that should to be converted. 30 | :return: Control Flow Graph which nodes contain nodes of the Data Dependence Graph on which it was based on. 31 | """ 32 | raise NotImplementedError() 33 | 34 | 35 | def to_pdg(ddg: DataDependenceGraph) -> ProgramDependenceGraph: 36 | """ 37 | Convert the Data Dependence Graph into a Program Dependence Graph. 38 | New graph will contain same nodes as in the original one so that 39 | any changes made after converting in the original graph's statements will affect the converted one. 40 | :param ddg: Data Dependence Graph that should to be converted. 41 | :return: Program Dependence Graph which nodes where presented in the Data Dependence Graph on which it was based on. 42 | """ 43 | raise NotImplementedError() 44 | -------------------------------------------------------------------------------- /program_slicing/graph/convert/pdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/22' 6 | 7 | from program_slicing.graph.cdg import ControlDependenceGraph 8 | from program_slicing.graph.cfg import ControlFlowGraph 9 | from program_slicing.graph.ddg import DataDependenceGraph 10 | from program_slicing.graph.pdg import ProgramDependenceGraph 11 | 12 | 13 | def to_cdg(pdg: ProgramDependenceGraph) -> ControlDependenceGraph: 14 | """ 15 | Convert the Program Dependence Graph into a Control Dependence Graph. 16 | New graph will contain same nodes as in the original one so that 17 | any changes made after converting in the original graph's statements will affect the converted one. 18 | :param pdg: Program Dependence Graph that should to be converted. 19 | :return: Control Dependence Graph which nodes where presented in the original Program Dependence Graph. 20 | """ 21 | raise NotImplementedError() 22 | 23 | 24 | def to_cfg(pdg: ProgramDependenceGraph) -> ControlFlowGraph: 25 | """ 26 | Convert the Program Dependence Graph into a Control Flow Graph. 27 | New graph will contain links on nodes of the original one so that 28 | any changes made after converting in the original graph's statements will affect the converted one. 29 | :param pdg: Program Dependence Graph that should to be converted. 30 | :return: Control Flow Graph which nodes contain nodes of the Program Dependence Graph on which it was based on. 31 | """ 32 | raise NotImplementedError() 33 | 34 | 35 | def to_ddg(pdg: ProgramDependenceGraph) -> DataDependenceGraph: 36 | """ 37 | Convert the Program Dependence Graph into a Data Dependence Graph. 38 | New graph will contain same nodes as in the original one so that 39 | any changes made after converting in the original graph's statements will affect the converted one. 40 | :param pdg: Program Dependence Graph that should to be converted. 41 | :return: Data Dependence Graph which nodes where presented in the Program Dependence Graph on which it was based on. 42 | """ 43 | raise NotImplementedError() 44 | -------------------------------------------------------------------------------- /integration_tests/files/method_21.java: -------------------------------------------------------------------------------- 1 | public String getClientId(FacesContext context) 2 | { 3 | if (context == null) 4 | throw new NullPointerException("context"); 5 | 6 | if (_clientId != null) 7 | return _clientId; 8 | 9 | String id = getId(); 10 | if (id == null) 11 | { 12 | UniqueIdVendor parentUniqueIdVendor = _ComponentUtils.findParentUniqueIdVendor(this); 13 | if (parentUniqueIdVendor == null) 14 | { 15 | UIViewRoot viewRoot = context.getViewRoot(); 16 | if (viewRoot != null) 17 | { 18 | id = viewRoot.createUniqueId(); 19 | } 20 | else 21 | { 22 | // The RI throws a NPE 23 | String location = getComponentLocation(this); 24 | throw new FacesException("Cannot create clientId. No id is assigned for component" 25 | + " to create an id and UIViewRoot is not defined: " 26 | + getPathToComponent(this) 27 | + (location != null ? " created from: " + location : "")); 28 | } 29 | } 30 | else 31 | { 32 | id = parentUniqueIdVendor.createUniqueId(context, null); 33 | } 34 | setId(id); 35 | } 36 | 37 | UIComponent namingContainer = _ComponentUtils.findParentNamingContainer(this, false); 38 | if (namingContainer != null) 39 | { 40 | String containerClientId = namingContainer.getContainerClientId(context); 41 | if (containerClientId != null) 42 | { 43 | StringBuilder bld = __getSharedStringBuilder(); 44 | _clientId = bld.append(containerClientId).append(UINamingContainer.getSeparatorChar(context)).append(id).toString(); 45 | } 46 | else 47 | { 48 | _clientId = id; 49 | } 50 | } 51 | else 52 | { 53 | _clientId = id; 54 | } 55 | 56 | Renderer renderer = getRenderer(context); 57 | if (renderer != null) 58 | { 59 | _clientId = renderer.convertClientId(context, _clientId); 60 | } 61 | 62 | return _clientId; 63 | } 64 | -------------------------------------------------------------------------------- /integration_tests/files/method_13.java: -------------------------------------------------------------------------------- 1 | public void setOption(Token key, Token value) { 2 | if (key.getText().equals("combineChars")) { 3 | if (value.getText().equals("true")) { 4 | if (Tool.agressive) { 5 | combineChars = true; 6 | } 7 | } 8 | else if (value.getText().equals("false")) { 9 | combineChars = false; 10 | } 11 | else { 12 | grammar.antlrTool.error("Value for combineChars must be true or false", grammar.getFilename(), key.getLine(), key.getColumn()); 13 | } 14 | } else 15 | if (key.getText().equals("warnWhenFollowAmbig")) { 16 | if (value.getText().equals("true")) { 17 | warnWhenFollowAmbig = true; 18 | } 19 | else if (value.getText().equals("false")) { 20 | warnWhenFollowAmbig = false; 21 | } 22 | else { 23 | grammar.antlrTool.error("Value for warnWhenFollowAmbig must be true or false", grammar.getFilename(), key.getLine(), key.getColumn()); 24 | } 25 | } 26 | else if (key.getText().equals("generateAmbigWarnings")) { 27 | if (value.getText().equals("true")) { 28 | generateAmbigWarnings = true; 29 | } 30 | else if (value.getText().equals("false")) { 31 | generateAmbigWarnings = false; 32 | } 33 | else { 34 | grammar.antlrTool.error("Value for generateAmbigWarnings must be true or false", grammar.getFilename(), key.getLine(), key.getColumn()); 35 | } 36 | } 37 | else if (key.getText().equals("greedy")) { 38 | if (value.getText().equals("true")) { 39 | greedy = true; 40 | greedySet = true; 41 | } 42 | else if (value.getText().equals("false")) { 43 | greedy = false; 44 | greedySet = true; 45 | } 46 | else { 47 | grammar.antlrTool.error("Value for greedy must be true or false", grammar.getFilename(), key.getLine(), key.getColumn()); 48 | } 49 | } 50 | else { 51 | grammar.antlrTool.error("Invalid subrule option: " + key.getText(), grammar.getFilename(), key.getLine(), key.getColumn()); 52 | } 53 | } -------------------------------------------------------------------------------- /integration_tests/files/method_8.java: -------------------------------------------------------------------------------- 1 | private void loadEquipment(Protomech t, String sName, int nLoc) throws EntityLoadingException { 2 | String[] saEquip = dataFile.getDataAsString(sName + " Equipment"); 3 | if (saEquip == null) { 4 | return; 5 | } 6 | 7 | // prefix is "Clan " or "IS " 8 | String prefix; 9 | if (t.getTechLevel() == TechConstants.T_CLAN_TW) { 10 | prefix = "Clan "; 11 | } else { 12 | prefix = "IS "; 13 | } 14 | 15 | for (String element : saEquip) { 16 | String equipName = element.trim(); 17 | 18 | // ProtoMech Ammo comes in non-standard amounts. 19 | int ammoIndex = equipName.indexOf("Ammo ("); 20 | int shotsCount = 0; 21 | if (ammoIndex > 0) { 22 | // Try to get the number of shots. 23 | try { 24 | String shots = equipName.substring(ammoIndex + 6, equipName.length() - 1); 25 | shotsCount = Integer.parseInt(shots); 26 | if (shotsCount < 0) { 27 | throw new EntityLoadingException("Invalid number of shots in: " + equipName + "."); 28 | } 29 | } catch (NumberFormatException badShots) { 30 | throw new EntityLoadingException("Could not determine the number of shots in: " + equipName + "."); 31 | } 32 | 33 | // Strip the shots out of the ammo name. 34 | equipName = equipName.substring(0, ammoIndex + 4); 35 | } 36 | EquipmentType etype = EquipmentType.get(equipName); 37 | 38 | if (etype == null) { 39 | // try w/ prefix 40 | etype = EquipmentType.get(prefix + equipName); 41 | } 42 | 43 | if (etype != null) { 44 | try { 45 | // If this is an Ammo slot, only add 46 | // the indicated number of shots. 47 | if (ammoIndex > 0) { 48 | t.addEquipment(etype, nLoc, false, shotsCount); 49 | } else { 50 | t.addEquipment(etype, nLoc); 51 | } 52 | } catch (LocationFullException ex) { 53 | throw new EntityLoadingException(ex.getMessage()); 54 | } 55 | } 56 | } 57 | } -------------------------------------------------------------------------------- /integration_tests/files/method_28.java: -------------------------------------------------------------------------------- 1 | public boolean loadEntryByID(String id, boolean activate, 2 | boolean onlyLoadOnce, Object datasource) { 3 | if (id == null) { 4 | return false; 5 | } 6 | MdiEntry entry = getEntry(id); 7 | if (entry != null) { 8 | if (datasource != null) { 9 | entry.setDatasource(datasource); 10 | } 11 | if (activate) { 12 | showEntry(entry); 13 | } 14 | return true; 15 | } 16 | 17 | @SuppressWarnings("deprecation") 18 | boolean loadedOnce = COConfigurationManager.getBooleanParameter("sb.once." 19 | + id, false); 20 | if (loadedOnce && onlyLoadOnce) { 21 | return false; 22 | } 23 | 24 | if (id.equals(SIDEBAR_SECTION_WELCOME)) { 25 | SideBarEntrySWT entryWelcome = (SideBarEntrySWT) createWelcomeSection(); 26 | if (activate) { 27 | showEntry(entryWelcome); 28 | } 29 | return true; 30 | } else if (id.startsWith("ContentNetwork.")) { 31 | long networkID = Long.parseLong(id.substring(15)); 32 | handleContentNetworkSwitch(id, networkID); 33 | return true; 34 | } else if (id.equals("library") || id.equals("minilibrary")) { 35 | id = SIDEBAR_SECTION_LIBRARY; 36 | loadEntryByID(id, activate); 37 | return true; 38 | } else if (id.equals("activities")) { 39 | id = SIDEBAR_SECTION_ACTIVITIES; 40 | loadEntryByID(id, activate); 41 | return true; 42 | } 43 | 44 | MdiEntryCreationListener mdiEntryCreationListener = mapIdToCreationListener.get(id); 45 | if (mdiEntryCreationListener != null) { 46 | MdiEntry mdiEntry = mdiEntryCreationListener.createMDiEntry(id); 47 | if (datasource != null) { 48 | mdiEntry.setDatasource(datasource); 49 | } 50 | if (mdiEntry instanceof SideBarEntrySWT) { 51 | if (onlyLoadOnce) { 52 | COConfigurationManager.setParameter("sb.once." + id, true); 53 | } 54 | if (activate) { 55 | showEntry(mdiEntry); 56 | } 57 | return true; 58 | } 59 | } else { 60 | setEntryAutoOpen(id, datasource, true); 61 | } 62 | 63 | return false; 64 | } -------------------------------------------------------------------------------- /integration_tests/files/method_29.java: -------------------------------------------------------------------------------- 1 | public Point2D[] intersectionPoint() { 2 | 3 | Point2D[] result = null; 4 | if (itsc==null) return null; 5 | else if ( itsc.length == 2 ) { 6 | if ( !isNaN(itsc[0]) ) { 7 | result = new Point2D[1]; 8 | result[0] = AlgoLine2D.evaluate(line0,itsc[0]); 9 | } else if ( !isNaN(itsc[1]) ) { 10 | result = new Point2D[1]; 11 | result[0] = AlgoLine2D.evaluate(line0,itsc[1]); 12 | } else { 13 | result = null; 14 | } 15 | } else if ( itsc.length == 4 ) { 16 | int count = 0; 17 | int index[] = new int[4]; 18 | if (!isNaN(itsc[0])) index[count++] = 0; 19 | if (!isNaN(itsc[1])) index[count++] = 1; 20 | if (!isNaN(itsc[2])) index[count++] = 2; 21 | if (!isNaN(itsc[3])) index[count++] = 3; 22 | Line2D l0 = (index[0]<2) ? line0 : line1; 23 | Line2D l1 = (index[1]<2) ? line0 : line1; 24 | result = new Point2D[2]; 25 | if (count==0) { 26 | result = null; 27 | } else if (count==2) { 28 | if ((itsc[index[0]]==0.0 || itsc[index[0]]==1.0) && 29 | (itsc[index[1]]==0.0 || itsc[index[1]]==1.0)) { 30 | /** colinear touching the boundaries */ 31 | result = new Point2D[1]; 32 | result[0] = AlgoLine2D.evaluate(l0,itsc[index[0]]); 33 | } else { 34 | /** colinear overlaping interiors */ 35 | result = new Point2D[2]; 36 | result[0] = AlgoLine2D.evaluate(l0,itsc[index[0]]); 37 | result[1] = AlgoLine2D.evaluate(l1,itsc[index[1]]); 38 | } 39 | } else if (count==3) { 40 | if (index[0]<2 && index[1]<2) { 41 | result[0] = AlgoLine2D.evaluate(l0,itsc[index[0]]); 42 | result[1] = AlgoLine2D.evaluate(l0,itsc[index[1]]); 43 | } else { 44 | result[0] = AlgoLine2D.evaluate(l1,itsc[index[0]]); 45 | result[1] = AlgoLine2D.evaluate(l1,itsc[index[1]]); 46 | } 47 | 48 | } else if (count==4) { 49 | result[0] = AlgoLine2D.evaluate(l0,itsc[index[0]]); 50 | result[1] = AlgoLine2D.evaluate(l0,itsc[index[1]]); 51 | } 52 | 53 | } 54 | return result; 55 | } -------------------------------------------------------------------------------- /integration_tests/files/method_17.java: -------------------------------------------------------------------------------- 1 | public static void onSystemShutdown() { 2 | for(Map.Entry entry:s_sequences.entrySet()) { 3 | //} 4 | //for(String key: s_sequences.keySet()) { 5 | String[] tokens = entry.getKey().split("\\."); 6 | String TableName = tokens[1]; 7 | int AD_Client_ID = Integer.parseInt(tokens[0]); 8 | String selectSQL = "SELECT CurrentNext, CurrentNextSys, IncrementNo, AD_Sequence_ID " 9 | + "FROM AD_Sequence " 10 | + "WHERE Name=?" 11 | + " AND IsActive='Y' AND IsTableID='Y' AND IsAutoSequence='Y' " 12 | + " FOR UPDATE"; 13 | Sequence seq = entry.getValue(); 14 | //at this point there should not be a need for syncrhonization, just for safety 15 | synchronized(seq) { 16 | Trx trx = Trx.get("MSequence.onSystemShutdown()"); 17 | PreparedStatement pstmt = null; 18 | ResultSet rs = null; 19 | try { 20 | // 21 | pstmt = trx.getConnection().prepareStatement(selectSQL, ResultSet.TYPE_FORWARD_ONLY, 22 | ResultSet.CONCUR_UPDATABLE); 23 | pstmt.setString(1, TableName); 24 | // 25 | rs = pstmt.executeQuery(); 26 | if (rs.next()) { 27 | if (isCompiereSys(AD_Client_ID)) { 28 | int dbNextSeq = rs.getInt(2); 29 | // only when db nextseq equals to the jvm endseq then i'll write back. this is so if there are multiple 30 | //jvms running, i know that other jvms already advanced the sequenes so that i don't mess with it 31 | if(dbNextSeq == seq.endSeq) { 32 | seq.endSeq = seq.nextSeq; 33 | rs.updateInt(2, seq.nextSeq); 34 | } 35 | } else { 36 | int dbNextSeq = rs.getInt(1); 37 | // only when db nextseq equals to the jvm endseq then i'll write back. this is so if there are multiple 38 | //jvms running, i know that other jvms already advanced the sequenes so that i don't mess with it 39 | if(dbNextSeq == seq.endSeq) { 40 | seq.endSeq = seq.nextSeq; 41 | rs.updateInt(1, seq.nextSeq); 42 | } 43 | } 44 | rs.updateRow(); 45 | } 46 | }catch (Exception e) { 47 | s_log.log(Level.SEVERE, TableName + " - " + e.getMessage(), e); 48 | } finally { 49 | if( rs != null ) 50 | try { 51 | rs.close(); 52 | } catch (SQLException e) { 53 | s_log.log(Level.SEVERE, "Finish", e); 54 | } 55 | 56 | if (pstmt != null) 57 | try { 58 | pstmt.close(); 59 | } catch (SQLException e) { 60 | s_log.log(Level.SEVERE, "Finish", e); 61 | } 62 | pstmt = null; 63 | 64 | if (trx != null) { 65 | trx.commit(); 66 | trx.close(); 67 | } 68 | } 69 | } 70 | } 71 | } -------------------------------------------------------------------------------- /program_slicing/graph/parse/tree_sitter_parsers.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/13' 6 | 7 | import os 8 | from pathlib import Path 9 | from typing import Optional 10 | 11 | from tree_sitter import Language, Parser, Node 12 | 13 | 14 | project_path = Path(*Path(__file__).parts[:-4]) 15 | 16 | Language.build_library( 17 | # Store the library in the `build` directory 18 | os.path.join(project_path, "build", "languages.so"), 19 | 20 | # Include one or more languages 21 | [ 22 | os.path.join(project_path, "vendor", "tree-sitter-java") 23 | ] 24 | ) 25 | 26 | 27 | def java() -> Parser: 28 | parser = Parser() 29 | parser.set_language(Language(os.path.join( 30 | project_path, "build", "languages.so"), "java")) 31 | return parser 32 | 33 | 34 | def node_name(source_code_bytes: bytes, ast: Node) -> Optional[str]: 35 | """ 36 | Parse a Tree Sitter AST Node's name using the original source code bytes. 37 | :param source_code_bytes: source code bytes. 38 | :param ast: Tree Sitter AST Node. 39 | :return: string with the node's name if it exists, none otherwise. 40 | """ 41 | if ast.type in {"variable_declarator", "formal_parameter"}: 42 | return node_name(source_code_bytes, ast.child_by_field_name("name")) 43 | elif ast.type == "assignment_expression": 44 | return node_name(source_code_bytes, ast.child_by_field_name("left")) 45 | elif ast.type == "update_expression": 46 | expr_ast = ast.children[0] 47 | expr_ast = expr_ast if expr_ast.next_named_sibling is None else expr_ast.next_named_sibling 48 | return node_name(source_code_bytes, expr_ast) 49 | elif ast.type == "catch_formal_parameter": 50 | return node_name(source_code_bytes, ast.child_by_field_name("name")) 51 | elif ast.type == "break_statement": 52 | identifier_ast = ast.children[0].next_named_sibling 53 | return None if identifier_ast is None else node_name(source_code_bytes, identifier_ast) 54 | elif ast.type == "continue_statement": 55 | identifier_ast = ast.children[0].next_named_sibling 56 | return None if identifier_ast is None else node_name(source_code_bytes, identifier_ast) 57 | elif ast.type == "labeled_statement": 58 | return source_code_bytes[ast.children[0].start_byte: ast.children[0].end_byte].decode("utf8") 59 | elif ast.parent is not None and ast.parent.type == "labeled_statement": 60 | return node_name(source_code_bytes, ast.parent) 61 | elif ast.start_point[0] == ast.end_point[0]: 62 | return source_code_bytes[ast.start_byte: ast.end_byte].decode("utf8") 63 | return None 64 | -------------------------------------------------------------------------------- /integration_tests/files/method_16.java: -------------------------------------------------------------------------------- 1 | public void propertyChange (PropertyChangeEvent e) { 2 | //System.err.println("ThreadsTreeModel.propertyChange("+e+")"); 3 | //System.err.println(" "+e.getPropertyName()+", "+e.getOldValue()+" => "+e.getNewValue()); 4 | ThreadGroupReference tg; 5 | if (e.getPropertyName() == ThreadsCache.PROP_THREAD_STARTED) { 6 | ThreadReference t = (ThreadReference) e.getNewValue(); 7 | try { 8 | tg = ThreadReferenceWrapper.threadGroup(t); 9 | } catch (InternalExceptionWrapper ex) { 10 | tg = null; 11 | } catch (VMDisconnectedExceptionWrapper ex) { 12 | return ; 13 | } catch (ObjectCollectedExceptionWrapper ex) { 14 | return ; 15 | } catch (IllegalThreadStateExceptionWrapper ex) { 16 | tg = null; 17 | } 18 | } else if (e.getPropertyName() == ThreadsCache.PROP_THREAD_DIED) { 19 | ThreadReference t = (ThreadReference) e.getOldValue(); 20 | try { 21 | tg = ThreadReferenceWrapper.threadGroup(t); 22 | } catch (InternalExceptionWrapper ex) { 23 | tg = null; 24 | } catch (VMDisconnectedExceptionWrapper ex) { 25 | return ; 26 | } catch (ObjectCollectedExceptionWrapper ex) { 27 | tg = null; 28 | } catch (IllegalThreadStateExceptionWrapper ex) { 29 | tg = null; 30 | } 31 | } else if (e.getPropertyName() == ThreadsCache.PROP_GROUP_ADDED) { 32 | tg = (ThreadGroupReference) e.getNewValue(); 33 | try { 34 | tg = ThreadGroupReferenceWrapper.parent(tg); 35 | } catch (InternalExceptionWrapper ex) { 36 | tg = null; 37 | } catch (VMDisconnectedExceptionWrapper ex) { 38 | tg = null; 39 | } catch (ObjectCollectedExceptionWrapper ex) { 40 | tg = null; 41 | } 42 | } else { 43 | return ; 44 | } 45 | Object node; 46 | if (tg == null) { 47 | node = ROOT; 48 | } else { 49 | node = debugger.getThreadGroup(tg); 50 | } 51 | synchronized (this) { 52 | if (task == null) { 53 | task = createTask(); 54 | } 55 | if (nodesToRefresh == null) { 56 | nodesToRefresh = new LinkedHashSet(); 57 | } 58 | nodesToRefresh.add(node); 59 | task.schedule(100); 60 | } 61 | } -------------------------------------------------------------------------------- /integration_tests/files/method_19.java: -------------------------------------------------------------------------------- 1 | static AttributeDescriptor createAttribute(String typeSpec) throws SchemaException { 2 | int split = typeSpec.indexOf(":"); 3 | 4 | String name; 5 | String type; 6 | String hint = null; 7 | 8 | if (split == -1) { 9 | name = typeSpec; 10 | type = "String"; 11 | } else { 12 | name = typeSpec.substring(0, split); 13 | 14 | int split2 = typeSpec.indexOf(":", split + 1); 15 | 16 | if (split2 == -1) { 17 | type = typeSpec.substring(split + 1); 18 | } else { 19 | type = typeSpec.substring(split + 1, split2); 20 | hint = typeSpec.substring(split2 + 1); 21 | } 22 | } 23 | 24 | try { 25 | boolean nillable = true; 26 | CoordinateReferenceSystem crs = null; 27 | 28 | if (hint != null) { 29 | StringTokenizer st = new StringTokenizer(hint, ";"); 30 | while (st.hasMoreTokens()) { 31 | String h = st.nextToken(); 32 | h = h.trim(); 33 | 34 | // nillable? 35 | // JD: i am pretty sure this hint is useless since the 36 | // default is to make attributes nillable 37 | if (h.equals("nillable")) { 38 | nillable = true; 39 | } 40 | // spatial reference identieger? 41 | if (h.startsWith("srid=")) { 42 | String srid = h.split("=")[1]; 43 | Integer.parseInt(srid); 44 | try { 45 | crs = CRS.decode("EPSG:" + srid); 46 | } catch (Exception e) { 47 | String msg = "Error decoding srs: " + srid; 48 | throw new SchemaException(msg, e); 49 | } 50 | } 51 | } 52 | } 53 | 54 | Class clazz = type(type); 55 | if (Geometry.class.isAssignableFrom(clazz)) { 56 | GeometryType at = new GeometryTypeImpl(new NameImpl(name), clazz, crs, false, 57 | false, Collections.EMPTY_LIST, null, null); 58 | return new GeometryDescriptorImpl(at, new NameImpl(name), 0, 1, nillable, null); 59 | } else { 60 | AttributeType at = new AttributeTypeImpl(new NameImpl(name), clazz, false, false, 61 | Collections.EMPTY_LIST, null, null); 62 | return new AttributeDescriptorImpl(at, new NameImpl(name), 0, 1, nillable, null); 63 | } 64 | } catch (ClassNotFoundException e) { 65 | throw new SchemaException("Could not type " + name + " as:" + type, e); 66 | } 67 | } -------------------------------------------------------------------------------- /integration_tests/files/method_26.java: -------------------------------------------------------------------------------- 1 | public static String dumpQueue(Queue q) { 2 | StringBuilder sb=new StringBuilder(); 3 | LinkedList values=q.values(); 4 | if(values.isEmpty()) { 5 | sb.append("empty"); 6 | } 7 | else { 8 | for(Object o: values) { 9 | String s=null; 10 | if(o instanceof Event) { 11 | Event event=(Event)o; 12 | int type=event.getType(); 13 | s=Event.type2String(type); 14 | if(type == Event.VIEW_CHANGE) 15 | s+=" " + event.getArg(); 16 | if(type == Event.MSG) 17 | s+=" " + event.getArg(); 18 | 19 | if(type == Event.MSG) { 20 | s+="["; 21 | Message m=(Message)event.getArg(); 22 | Map headers=new HashMap(m.getHeaders()); 23 | for(Map.Entry entry: headers.entrySet()) { 24 | short id=entry.getKey(); 25 | Header value=entry.getValue(); 26 | String headerToString=null; 27 | if(value instanceof FD.FdHeader) { 28 | headerToString=value.toString(); 29 | } 30 | else 31 | if(value instanceof PingHeader) { 32 | headerToString=ClassConfigurator.getProtocol(id) + "-"; 33 | if(((PingHeader)value).type == PingHeader.GET_MBRS_REQ) { 34 | headerToString+="GMREQ"; 35 | } 36 | else 37 | if(((PingHeader)value).type == PingHeader.GET_MBRS_RSP) { 38 | headerToString+="GMRSP"; 39 | } 40 | else { 41 | headerToString+="UNKNOWN"; 42 | } 43 | } 44 | else { 45 | headerToString=ClassConfigurator.getProtocol(id) + "-" + (value == null ? "null" : value.toString()); 46 | } 47 | s+=headerToString; 48 | s+=" "; 49 | } 50 | s+="]"; 51 | } 52 | } 53 | else { 54 | s=o.toString(); 55 | } 56 | sb.append(s).append("\n"); 57 | } 58 | } 59 | return sb.toString(); 60 | } -------------------------------------------------------------------------------- /program_slicing/graph/point.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/06/24' 6 | 7 | from typing import Tuple 8 | 9 | 10 | class Point: 11 | 12 | def __init__(self, line_number: int, column_number: int) -> None: 13 | self.__line_number: int = line_number 14 | self.__column_number: int = column_number 15 | 16 | @classmethod 17 | def from_tuple(cls, point: Tuple[int, int]) -> 'Point': 18 | return cls(point[0], point[1]) 19 | 20 | def __repr__(self) -> str: 21 | return "Point{}".format(self) 22 | 23 | def __str__(self) -> str: 24 | return "({}, {})".format(self.__line_number, self.__column_number) 25 | 26 | def __iter__(self): 27 | yield self.__line_number 28 | yield self.__column_number 29 | 30 | def __getitem__(self, key): 31 | if key == 0: 32 | return self.__line_number 33 | if key == 1: 34 | return self.__column_number 35 | raise IndexError("Point index out of range") 36 | 37 | def __eq__(self, other: 'Point') -> bool: 38 | return \ 39 | other is not None and \ 40 | self.__line_number == other.__line_number and \ 41 | self.__column_number == other.__column_number 42 | 43 | def __hash__(self) -> hash: 44 | return hash((self.__line_number, self.__column_number)) 45 | 46 | def __ne__(self, other: 'Point') -> bool: 47 | return \ 48 | other is None or \ 49 | self.__line_number != other.__line_number or \ 50 | self.__column_number != other.__column_number 51 | 52 | def __lt__(self, other: 'Point') -> bool: 53 | return \ 54 | self.__line_number < other.__line_number or \ 55 | self.__line_number == other.__line_number and self.__column_number < other.__column_number 56 | 57 | def __le__(self, other: 'Point') -> bool: 58 | return \ 59 | self.__line_number < other.__line_number or \ 60 | self.__line_number == other.__line_number and self.__column_number <= other.__column_number 61 | 62 | def __gt__(self, other: 'Point') -> bool: 63 | return \ 64 | self.__line_number > other.__line_number or \ 65 | self.__line_number == other.__line_number and self.__column_number > other.__column_number 66 | 67 | def __ge__(self, other: 'Point') -> bool: 68 | return \ 69 | self.__line_number > other.__line_number or \ 70 | self.__line_number == other.__line_number and self.__column_number >= other.__column_number 71 | 72 | def __neg__(self) -> 'Point': 73 | return Point(-self.line_number, -self.column_number) 74 | 75 | @property 76 | def line_number(self) -> int: 77 | return self.__line_number 78 | 79 | @property 80 | def column_number(self) -> int: 81 | return self.__column_number 82 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | .idea 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | scripts/performance_benchmarks/block_slices/*.txt 133 | scripts/performance_benchmarks/block_slices/*.csv 134 | scripts/performance_benchmarks/block_slices/*.bs 135 | scripts/performance_benchmarks/block_slices/*.json 136 | 137 | program_slicing/decomposition/*.json 138 | program_slicing/decomposition/*.csv 139 | program_slicing/decomposition/*.txt 140 | program_slicing/decomposition/*.ipynb 141 | 142 | program_slicing/*.json 143 | program_slicing/*.csv 144 | program_slicing/*.ipynb 145 | -------------------------------------------------------------------------------- /integration_tests/files/method_10.java: -------------------------------------------------------------------------------- 1 | void checkMethods() { 2 | boolean mustImplementAbstractMethods = mustImplementAbstractMethods(); 3 | boolean skipInheritedMethods = mustImplementAbstractMethods && canSkipInheritedMethods(); // have a single concrete superclass so only check overridden methods 4 | char[][] methodSelectors = this.inheritedMethods.keyTable; 5 | nextSelector : for (int s = methodSelectors.length; --s >= 0;) { 6 | if (methodSelectors[s] == null) continue nextSelector; 7 | 8 | MethodBinding[] current = (MethodBinding[]) this.currentMethods.get(methodSelectors[s]); 9 | if (current == null && skipInheritedMethods) 10 | continue nextSelector; 11 | 12 | MethodBinding[] inherited = (MethodBinding[]) this.inheritedMethods.valueTable[s]; 13 | if (inherited.length == 1 && current == null) { // handle the common case 14 | if (mustImplementAbstractMethods && inherited[0].isAbstract()) 15 | checkAbstractMethod(inherited[0]); 16 | continue nextSelector; 17 | } 18 | 19 | int index = -1; 20 | MethodBinding[] matchingInherited = new MethodBinding[inherited.length]; 21 | if (current != null) { 22 | for (int i = 0, length1 = current.length; i < length1; i++) { 23 | MethodBinding currentMethod = current[i]; 24 | for (int j = 0, length2 = inherited.length; j < length2; j++) { 25 | MethodBinding inheritedMethod = computeSubstituteMethod(inherited[j], currentMethod); 26 | if (inheritedMethod != null) { 27 | if (doesMethodOverride(currentMethod, inheritedMethod)) { 28 | matchingInherited[++index] = inheritedMethod; 29 | inherited[j] = null; // do not want to find it again 30 | } 31 | } 32 | } 33 | if (index >= 0) { 34 | checkAgainstInheritedMethods(currentMethod, matchingInherited, index + 1, inherited); // pass in the length of matching 35 | while (index >= 0) matchingInherited[index--] = null; // clear the contents of the matching methods 36 | } 37 | } 38 | } 39 | 40 | for (int i = 0, length = inherited.length; i < length; i++) { 41 | MethodBinding inheritedMethod = inherited[i]; 42 | if (inheritedMethod == null) continue; 43 | 44 | matchingInherited[++index] = inheritedMethod; 45 | for (int j = i + 1; j < length; j++) { 46 | MethodBinding otherInheritedMethod = inherited[j]; 47 | if (canSkipInheritedMethods(inheritedMethod, otherInheritedMethod)) 48 | continue; 49 | otherInheritedMethod = computeSubstituteMethod(otherInheritedMethod, inheritedMethod); 50 | if (otherInheritedMethod != null) { 51 | if (doesMethodOverride(inheritedMethod, otherInheritedMethod)) { 52 | matchingInherited[++index] = otherInheritedMethod; 53 | inherited[j] = null; // do not want to find it again 54 | } 55 | } 56 | } 57 | if (index == -1) continue; 58 | if (index > 0) 59 | checkInheritedMethods(matchingInherited, index + 1); // pass in the length of matching 60 | else if (mustImplementAbstractMethods && matchingInherited[0].isAbstract()) 61 | checkAbstractMethod(matchingInherited[0]); 62 | while (index >= 0) matchingInherited[index--] = null; // clear the contents of the matching methods 63 | } 64 | } 65 | } -------------------------------------------------------------------------------- /integration_tests/files/method_27.java: -------------------------------------------------------------------------------- 1 | public IStatus applyDeltas(Collection deltas, String[] filters) { 2 | if (filters == null) { 3 | filters = new String[0]; 4 | } 5 | 6 | MultiStatus multiStatus = new MultiStatus(Activator.PI_WORKBENCH, 0, "", null); //$NON-NLS-1$ 7 | LinkedList delayedDeltas = new LinkedList(); 8 | 9 | deltaIterationLoop: for (final ModelDelta delta : deltas) { 10 | for (String filter : filters) { 11 | if (delta.getAttributeName().equals(filter)) { 12 | continue deltaIterationLoop; 13 | } 14 | } 15 | 16 | final IStatus[] status = new IStatus[1]; 17 | SafeRunner.run(new ISafeRunnable() { 18 | public void run() throws Exception { 19 | status[0] = delta.apply(); 20 | } 21 | 22 | public void handleException(Throwable exception) { 23 | status[0] = new Status(IStatus.ERROR, Activator.PI_WORKBENCH, 24 | "Failed to apply delta", exception); //$NON-NLS-1$ 25 | } 26 | }); 27 | 28 | if (status[0].getSeverity() == IStatus.CANCEL) { 29 | delayedDeltas.add(delta); 30 | continue; 31 | } 32 | multiStatus.add(status[0]); 33 | 34 | switch (status[0].getCode()) { 35 | case IStatus.INFO: 36 | logger.info(status[0].getMessage()); 37 | break; 38 | case IStatus.WARNING: 39 | logger.warn(status[0].getMessage()); 40 | break; 41 | case IStatus.ERROR: 42 | logger.error(status[0].getMessage()); 43 | break; 44 | } 45 | } 46 | 47 | for (Iterator it = delayedDeltas.iterator(); it.hasNext();) { 48 | final ModelDelta delta = it.next(); 49 | final IStatus[] status = new IStatus[1]; 50 | SafeRunner.run(new ISafeRunnable() { 51 | public void run() throws Exception { 52 | status[0] = delta.apply(); 53 | } 54 | 55 | public void handleException(Throwable exception) { 56 | status[0] = new Status(IStatus.ERROR, Activator.PI_WORKBENCH, 57 | "Failed to apply delta", exception); //$NON-NLS-1$ 58 | } 59 | }); 60 | 61 | if (status[0].getSeverity() == IStatus.CANCEL) { 62 | continue; 63 | } 64 | 65 | multiStatus.add(status[0]); 66 | 67 | switch (status[0].getCode()) { 68 | case IStatus.INFO: 69 | logger.info(status[0].getMessage()); 70 | break; 71 | case IStatus.WARNING: 72 | logger.warn(status[0].getMessage()); 73 | break; 74 | case IStatus.ERROR: 75 | logger.error(status[0].getMessage()); 76 | break; 77 | } 78 | } 79 | 80 | return multiStatus; 81 | } 82 | -------------------------------------------------------------------------------- /integration_tests/files/method_18.java: -------------------------------------------------------------------------------- 1 | public void run() { 2 | logger.info("Task Run()"); 3 | Thread.currentThread().setName("Informa Update Channel Task"); 4 | /** 5 | * ChannelBuilder is not re-entrant and it is shared by all the 6 | * UpdateChannelTasks which are created by single ChannelRegistry. 7 | * Note that all the beginTransaction() must have a corresponding endTransaction() 8 | */ 9 | synchronized (builder) { 10 | if (!info.getFormatDetected()) 11 | /** 12 | * If this is the first time we see this Channel, then we will now attempt 13 | * to parse it and if this works we remember the format and proceed. 14 | * Otherwise we trigger error case handling and eventually deactivate it. 15 | */ 16 | { 17 | try { 18 | builder.beginTransaction(); 19 | ChannelFormat format = 20 | FormatDetector.getFormat(channel.getLocation()); 21 | channel.setFormat(format); 22 | info.setFormatDetected(true); 23 | channel.setLastUpdated(new Date()); 24 | builder.endTransaction(); 25 | } catch (UnsupportedFormatException ex) { 26 | logger.info("Unsupported format for Channel"); 27 | incrementProblems(ex); 28 | return; 29 | } catch (IOException ioe) { 30 | logger.info("Cannot retrieve Channel"); 31 | incrementProblems(ioe); 32 | return; 33 | } catch (ChannelBuilderException e) { 34 | e.printStackTrace(); 35 | } 36 | } 37 | try { 38 | synchronized (channel) { 39 | builder.beginTransaction(); 40 | ChannelIF tempChannel = 41 | FeedParser.parse(tempBuilder, channel.getLocation()); 42 | logger.info( 43 | "Updating channel from " 44 | + channel.getLocation() 45 | + ": " 46 | + tempChannel 47 | + "(new) " 48 | + channel 49 | + "(old)"); 50 | InformaUtils.copyChannelProperties(tempChannel, channel); 51 | builder.update(channel); 52 | channel.setLastUpdated(new Date()); 53 | // compare with existing items, only add new ones 54 | if (tempChannel.getItems().isEmpty()) { 55 | logger.warn("No items found in channel " + channel); 56 | } else { 57 | Iterator it = tempChannel.getItems().iterator(); 58 | while (it.hasNext()) { 59 | ItemIF item = (ItemIF) it.next(); 60 | if (!channel.getItems().contains(item)) { 61 | logger.debug("Found new item: " + item); 62 | channel.addItem(builder.createItem(null, item)); 63 | // } 64 | } 65 | } // while more items 66 | } 67 | builder.endTransaction(); 68 | } 69 | } catch (ParseException pe) { 70 | incrementProblems(pe); 71 | } catch (IOException ioe) { 72 | incrementProblems(ioe); 73 | } catch (ChannelBuilderException e) { 74 | e.printStackTrace(); 75 | } 76 | } 77 | } -------------------------------------------------------------------------------- /program_slicing/graph/parse/parse.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/23' 6 | 7 | from enum import Enum 8 | 9 | from tree_sitter import Tree 10 | 11 | from program_slicing.graph.parse import tree_sitter_ast_java 12 | from program_slicing.graph.parse import cdg_java 13 | from program_slicing.graph.parse import cfg_java 14 | from program_slicing.graph.parse import ddg_java 15 | from program_slicing.graph.parse import pdg_java 16 | from program_slicing.graph.cfg import ControlFlowGraph 17 | from program_slicing.graph.cdg import ControlDependenceGraph 18 | from program_slicing.graph.ddg import DataDependenceGraph 19 | from program_slicing.graph.pdg import ProgramDependenceGraph 20 | 21 | 22 | class Lang(Enum): 23 | JAVA = ".java" 24 | XML = ".xml" 25 | 26 | 27 | def tree_sitter_ast(source_code: str, lang: Lang) -> Tree: 28 | """ 29 | Parse the source code in a specified format into a Tree Sitter AST. 30 | :param source_code: string with the source code in it. 31 | :param lang: the source code Lang. 32 | :return: Tree Sitter AST. 33 | """ 34 | if lang == Lang.JAVA: 35 | return tree_sitter_ast_java.parse(source_code) 36 | else: 37 | raise NotImplementedError() 38 | 39 | 40 | def control_flow_graph(source_code: str, lang: Lang) -> ControlFlowGraph: 41 | """ 42 | Parse the source code in a specified format into a Control Flow Graph. 43 | :param source_code: string with the source code in it. 44 | :param lang: the source code Lang. 45 | :return: Control Flow Graph. 46 | """ 47 | if lang == Lang.JAVA: 48 | return cfg_java.parse(source_code) 49 | else: 50 | raise NotImplementedError() 51 | 52 | 53 | def control_dependence_graph(source_code: str, lang: Lang) -> ControlDependenceGraph: 54 | """ 55 | Parse the source code in a specified format into a Control Dependence Graph. 56 | :param source_code: string with the source code in it. 57 | :param lang: the source code Lang. 58 | :return: Control Dependence Graph. 59 | """ 60 | if lang == Lang.JAVA: 61 | return cdg_java.parse(source_code) 62 | else: 63 | raise NotImplementedError() 64 | 65 | 66 | def data_dependence_graph(source_code: str, lang: Lang) -> DataDependenceGraph: 67 | """ 68 | Parse the source code in a specified format into a Data Dependence Graph. 69 | :param source_code: string with the source code in it. 70 | :param lang: the source code Lang. 71 | :return: Data Dependence Graph. 72 | """ 73 | if lang == Lang.JAVA: 74 | return ddg_java.parse(source_code) 75 | else: 76 | raise NotImplementedError() 77 | 78 | 79 | def program_dependence_graph(source_code: str, lang: Lang) -> ProgramDependenceGraph: 80 | """ 81 | Parse the source code in a specified format into a Program Dependence Graph. 82 | :param source_code: string with the source code in it. 83 | :param lang: the source code Lang. 84 | :return: Program Dependence Graph. 85 | """ 86 | if lang == Lang.JAVA: 87 | return pdg_java.parse(source_code) 88 | else: 89 | raise NotImplementedError() 90 | -------------------------------------------------------------------------------- /integration_tests/files/method_25.java: -------------------------------------------------------------------------------- 1 | public static Collection createProjectFromTemplate( 2 | final FileObject template, File projectLocation, 3 | final String name, final String serverID) throws IOException { 4 | assert template != null && projectLocation != null && name != null; 5 | ArrayList projects = new ArrayList(); 6 | if (template.getExt().endsWith("zip")) { //NOI18N 7 | FileObject prjLoc = createProjectFolder(projectLocation); 8 | InputStream is = template.getInputStream(); 9 | try { 10 | unzip(is, prjLoc); 11 | projects.add(prjLoc); 12 | // update project.xml 13 | File projXml = FileUtil.toFile(prjLoc.getFileObject(AntProjectHelper.PROJECT_XML_PATH)); 14 | Document doc = XMLUtil.parse(new InputSource(projXml.toURI().toString()), false, true, null, null); 15 | NodeList nlist = doc.getElementsByTagNameNS(PROJECT_CONFIGURATION_NAMESPACE, "name"); //NOI18N 16 | if (nlist != null) { 17 | for (int i=0; i < nlist.getLength(); i++) { 18 | Node n = nlist.item(i); 19 | if (n.getNodeType() != Node.ELEMENT_NODE) { 20 | continue; 21 | } 22 | Element e = (Element)n; 23 | 24 | replaceText(e, name); 25 | } 26 | saveXml(doc, prjLoc, AntProjectHelper.PROJECT_XML_PATH); 27 | } 28 | } catch (Exception e) { 29 | throw new IOException(e.toString()); 30 | } finally { 31 | if (is != null) is.close(); 32 | } 33 | prjLoc.refresh(false); 34 | } else { 35 | String files = (String) template.getAttribute("files"); 36 | if ((files != null) && (files.length() > 0)) { 37 | StringTokenizer st = new StringTokenizer(files, ","); 38 | while (st.hasMoreElements()) { 39 | String prjName = st.nextToken(); 40 | if ((prjName == null) || (prjName.trim().equals(""))) continue; 41 | InputStream is = WebSampleProjectGenerator.class.getResourceAsStream(prjName); 42 | try { 43 | FileObject prjLoc = createProjectFolder(new File(projectLocation, prjName.substring(prjName.lastIndexOf("/")+1, prjName.indexOf('.')))); 44 | unzip(is, prjLoc); 45 | projects.add(prjLoc); 46 | Boolean needsDefaults = (Boolean)template.getAttribute("needsdefaults"); 47 | if (needsDefaults) { 48 | DevDefaultsProvider.getDefault().fillDefaultsToServer(serverID); 49 | } 50 | } catch (Exception e) { 51 | Exceptions.printStackTrace(e); 52 | } finally { 53 | if (is != null) is.close(); 54 | } 55 | } 56 | } 57 | } 58 | return projects; 59 | } -------------------------------------------------------------------------------- /integration_tests/files/method_12.java: -------------------------------------------------------------------------------- 1 | protected IApiProblem createExternalDependenciesProblem(HashMap problems, IReferenceDescriptor dependency, String referenceTypeName, IMemberDescriptor referencedMember, int elementType, int flag) { 2 | String resource = referenceTypeName; 3 | String primaryTypeName = referenceTypeName.replace('$', '.'); 4 | int charStart = -1, charEnd = -1, lineNumber = -1; 5 | if (fJavaProject != null) { 6 | try { 7 | 8 | IType type = fJavaProject.findType(primaryTypeName); 9 | IResource res = Util.getResource(fJavaProject.getProject(), type); 10 | if(res == null) { 11 | return null; 12 | } 13 | if(!Util.isManifest(res.getProjectRelativePath())) { 14 | resource = res.getProjectRelativePath().toString(); 15 | } 16 | else { 17 | resource = "."; //$NON-NLS-1$ 18 | } 19 | if (type != null) { 20 | ISourceRange range = type.getNameRange(); 21 | charStart = range.getOffset(); 22 | charEnd = charStart + range.getLength(); 23 | try { 24 | IDocument document = Util.getDocument(type.getCompilationUnit()); 25 | lineNumber = document.getLineOfOffset(charStart); 26 | } catch (BadLocationException e) { 27 | // ignore 28 | } 29 | catch (CoreException ce) {} 30 | } 31 | } catch (JavaModelException e) {} 32 | } 33 | String[] msgArgs = new String[] {referenceTypeName, referencedMember.getName(), dependency.getComponent().getId()}; 34 | int kind = 0; 35 | switch (elementType) { 36 | case IElementDescriptor.TYPE : { 37 | kind = IApiProblem.API_USE_SCAN_TYPE_PROBLEM; 38 | break; 39 | } 40 | case IElementDescriptor.METHOD : { 41 | kind = IApiProblem.API_USE_SCAN_METHOD_PROBLEM; 42 | msgArgs[1] = BuilderMessages.BaseApiAnalyzer_Method + ' ' + msgArgs[1]; 43 | if ((dependency.getReferenceKind() & IReference.REF_CONSTRUCTORMETHOD) > 0) { 44 | msgArgs[1] = BuilderMessages.BaseApiAnalyzer_Constructor + ' ' + msgArgs[1]; 45 | } 46 | break; 47 | } 48 | case IElementDescriptor.FIELD : { 49 | kind = IApiProblem.API_USE_SCAN_FIELD_PROBLEM; 50 | break; 51 | } 52 | default: break; 53 | } 54 | 55 | int dependencyNameIndex = 2; // the comma separated list of dependent plugins 56 | int problemId = ApiProblemFactory.createProblemId(IApiProblem.CATEGORY_API_USE_SCAN_PROBLEM, elementType, kind, flag); 57 | String problemKey = referenceTypeName + problemId; 58 | IApiProblem similarProblem = (IApiProblem) problems.get(problemKey); 59 | if (similarProblem != null) { 60 | String[] existingMsgArgs = similarProblem.getMessageArguments()[dependencyNameIndex].split(", "); //$NON-NLS-1$ 61 | if (!Arrays.asList(existingMsgArgs).contains(msgArgs[dependencyNameIndex])) { 62 | msgArgs[dependencyNameIndex] = similarProblem.getMessageArguments()[dependencyNameIndex] + ',' + ' ' + msgArgs[dependencyNameIndex]; 63 | } else { 64 | return similarProblem; 65 | } 66 | } 67 | IApiProblem problem = ApiProblemFactory.newApiUseScanProblem( 68 | resource, 69 | primaryTypeName, 70 | msgArgs, 71 | new String[] {IApiMarkerConstants.API_USESCAN_TYPE}, 72 | new String[] {primaryTypeName }, 73 | lineNumber, 74 | charStart, 75 | charEnd, 76 | elementType, 77 | kind, 78 | flag); 79 | problems.put(problemKey, problem); 80 | return problem; 81 | } -------------------------------------------------------------------------------- /test/graph/test_basic_block.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.graph.basic_block import BasicBlock 10 | from program_slicing.graph.statement import Statement, StatementType 11 | from program_slicing.graph.point import Point 12 | 13 | 14 | class BasicBlockTestCase(TestCase): 15 | 16 | def test_constructor(self) -> None: 17 | a = BasicBlock() 18 | self.assertEqual([], a.statements) 19 | statement_a = Statement(StatementType.UNKNOWN, Point(0, 0), Point(0, 1)) 20 | statement_b = Statement(StatementType.UNKNOWN, Point(1, 1), Point(1, 2)) 21 | a = BasicBlock(statements=[statement_a, statement_b]) 22 | self.assertEqual([statement_a, statement_b], a.statements) 23 | 24 | def test_repr(self) -> None: 25 | statement_a = Statement(StatementType.UNKNOWN, Point(0, 0), Point(0, 1)) 26 | statement_b = Statement(StatementType.UNKNOWN, Point(1, 1), Point(1, 2)) 27 | basic_block = BasicBlock(statements=[statement_a, statement_b]) 28 | self.assertEqual( 29 | "[Statement(" 30 | "statement_type=StatementType.UNKNOWN, " 31 | "ast_node_type=None, " 32 | "name=None, " 33 | "affected_by=set(), " 34 | "start_point=(0, 0), " 35 | "end_point=(0, 1)), " 36 | "Statement(" 37 | "statement_type=StatementType.UNKNOWN, " 38 | "ast_node_type=None, " 39 | "name=None, " 40 | "affected_by=set(), " 41 | "start_point=(1, 1), " 42 | "end_point=(1, 2))]", str(basic_block)) 43 | self.assertEqual( 44 | "BasicBlock[Statement(" 45 | "statement_type=StatementType.UNKNOWN, " 46 | "ast_node_type=None, " 47 | "name=None, " 48 | "affected_by=set(), " 49 | "start_point=(0, 0), " 50 | "end_point=(0, 1)), " 51 | "Statement(" 52 | "statement_type=StatementType.UNKNOWN, " 53 | "ast_node_type=None, " 54 | "name=None, " 55 | "affected_by=set(), " 56 | "start_point=(1, 1), " 57 | "end_point=(1, 2))]", repr(basic_block)) 58 | 59 | def test_is_empty(self) -> None: 60 | b = BasicBlock() 61 | self.assertTrue(b.is_empty()) 62 | b = BasicBlock(statements=[Statement(StatementType.UNKNOWN, Point(0, 0), Point(0, 1))]) 63 | self.assertFalse(b.is_empty()) 64 | 65 | def test_append(self) -> None: 66 | a = BasicBlock() 67 | self.assertEqual([], a.statements) 68 | statement_a = Statement(StatementType.UNKNOWN, Point(0, 0), Point(0, 1)) 69 | a.append(statement_a) 70 | self.assertEqual([statement_a], a.statements) 71 | statement_b = Statement(StatementType.UNKNOWN, Point(0, 0), Point(0, 1)) 72 | a.append(statement_b) 73 | self.assertEqual([statement_a, statement_b], a.statements) 74 | 75 | def test_get_root(self) -> None: 76 | a = BasicBlock() 77 | self.assertIsNone(a.root) 78 | statement_a = Statement(StatementType.UNKNOWN, Point(0, 0), Point(0, 1)) 79 | statement_b = Statement(StatementType.UNKNOWN, Point(1, 1), Point(1, 2)) 80 | a = BasicBlock(statements=[statement_a, statement_b]) 81 | self.assertEqual(statement_a, a.root) 82 | -------------------------------------------------------------------------------- /scripts/performance_benchmarks/block_slices/check_perfomance.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime 3 | import sys 4 | import traceback 5 | from pathlib import Path 6 | 7 | import tqdm 8 | from numpy import mean, median, quantile 9 | 10 | from program_slicing.file_manager.reader import read_file 11 | from program_slicing.decomposition.block.slicing import get_block_slices 12 | from program_slicing.decomposition.slice_predicate import SlicePredicate 13 | from program_slicing.graph.parse import Lang 14 | 15 | 16 | def main(): 17 | parser = argparse.ArgumentParser(description='Run benchmark for block slicing') 18 | parser.add_argument( 19 | "-d", "--dir", 20 | required=True, 21 | help="Filepath to JAVA files" 22 | ) 23 | parser.add_argument( 24 | "--iterations", 25 | "-i", 26 | type=int, 27 | default=1, 28 | help="Number of iterations to run benchmark script for all files. Default is 1" 29 | ) 30 | args = parser.parse_args() 31 | arr_with_datetime_in_seconds = [] 32 | java_files = list(Path(args.dir).glob('*.java')) 33 | print(f'\rWe are going to run performance tests for Block Slicing algorithm. ' 34 | f'The algorithm will run {len(java_files)} java files with 100 ncss. ' 35 | f'The procedure will be run {args.iterations} time(s) for more accurate calculations.') 36 | sp = SlicePredicate( 37 | min_amount_of_lines=6, 38 | min_amount_of_statements=5, 39 | max_percentage_of_lines=0.8, 40 | max_amount_of_exit_statements=1, 41 | cause_code_duplication=False, 42 | lang_to_check_parsing=Lang.JAVA, 43 | lines_are_full=True 44 | ) 45 | for i in range(args.iterations): 46 | print(f"\rIteration: {i+1}/{args.iterations}") 47 | for java_file in tqdm.tqdm(java_files): 48 | text = read_file(java_file) 49 | try: 50 | start_datetime = datetime.datetime.now() 51 | list(get_block_slices( 52 | text, 53 | Lang.JAVA, 54 | slice_predicate=sp 55 | )) 56 | end_datetime = datetime.datetime.now() 57 | diff_datetime = end_datetime - start_datetime 58 | arr_with_datetime_in_seconds.append(diff_datetime.seconds + diff_datetime.microseconds / 1e6) 59 | except Exception as e: 60 | print(f'Error while reading {java_file}: {e}') 61 | exc_type, exc_value, exc_traceback = sys.exc_info() 62 | traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stdout) 63 | total_time_for_one_iteration = mean(arr_with_datetime_in_seconds) * len(java_files) 64 | print(f'Total time of running {len(java_files)} java methods is ' 65 | f'{total_time_for_one_iteration} secs for 1 iteration. \n' 66 | f'Script was executed {args.iterations} times.\n' 67 | f'Average time for 1 method: {mean(arr_with_datetime_in_seconds):0.3f} secs. \n' 68 | f'Min time of 1 method: {min(arr_with_datetime_in_seconds):0.3f} secs, \n' 69 | f'max time of 1 method: {max(arr_with_datetime_in_seconds):0.3f} secs, \n' 70 | f'median: {median(arr_with_datetime_in_seconds):0.3f} secs, \n' 71 | f'quantile 75%: {quantile(arr_with_datetime_in_seconds, 0.75):0.3f} secs, \n' 72 | f'quantile 95%: {quantile(arr_with_datetime_in_seconds, 0.95):0.3f} secs') 73 | 74 | 75 | if __name__ == '__main__': 76 | main() 77 | -------------------------------------------------------------------------------- /integration_tests/files/class_3.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) Huawei Technologies Co., Ltd. 2017-2020. All rights reserved. 3 | */ 4 | package com.huawei.mergebot.dao.ui; 5 | 6 | import com.huawei.mergebot.dao.RestClient; 7 | import com.huawei.mergebot.model.ui.CommitIds; 8 | import com.huawei.mergebot.model.ui.Repo; 9 | import com.huawei.mergebot.model.ui.ResolveParameters; 10 | import com.huawei.mergebot.utils.PropertiesUtil; 11 | 12 | import org.springframework.http.HttpEntity; 13 | import org.springframework.http.HttpHeaders; 14 | import org.springframework.http.HttpMethod; 15 | import org.springframework.http.MediaType; 16 | import org.springframework.http.ResponseEntity; 17 | import org.springframework.web.client.RestTemplate; 18 | 19 | import java.util.HashMap; 20 | 21 | /** 22 | * The type Reo dao. 23 | * 24 | * @since 4.3.3 25 | */ 26 | public class ReoDAO { 27 | /** 28 | * Update commit i ds. 29 | * 30 | * @param repoID the repo id 31 | * @param oursCommitID the ours commit id 32 | * @param theirCommitID the their commit id 33 | */ 34 | public static void updateCommitIDs(String repoID, String oursCommitID, String theirCommitID) { 35 | CommitIds commitIDs = new CommitIds().oursCommitId(oursCommitID).theirsCommitId(theirCommitID); 36 | String conflictFileEndPoint = PropertiesUtil.getValue("MergebotServer") + "/repos/" + repoID + "/commit_ids"; 37 | RestTemplate restTemplate = new RestTemplate(); 38 | restTemplate.put(conflictFileEndPoint, commitIDs); 39 | } 40 | 41 | /** 42 | * Register repo. 43 | * 44 | * @param repo the repo 45 | */ 46 | public static void registerRepo(Repo repo) { 47 | RestTemplate restTemplate = new RestTemplate(); 48 | String conflictFileEndPoint = PropertiesUtil.getValue("MergebotServer") + "/repos/register?force=true"; 49 | HttpHeaders headers = new HttpHeaders(); 50 | headers.setContentType(MediaType.APPLICATION_JSON); 51 | HttpEntity entity = new HttpEntity(repo, headers); 52 | ResponseEntity resp = restTemplate.exchange(conflictFileEndPoint, HttpMethod.PUT, entity, Repo.class); 53 | } 54 | 55 | /** 56 | * Get repo. 57 | * 58 | * @param repoID the repo id 59 | * @return the repo 60 | */ 61 | public static Repo get(String repoID) { 62 | String projectURL = PropertiesUtil.getValue("MergebotServer") + "/repos/" + repoID; 63 | RestClient client = new RestClient(); 64 | Repo returnResults = client.getForObjectOperate(projectURL, new HashMap(), Repo.class); 65 | return returnResults; 66 | } 67 | 68 | /** 69 | * Update resolved parameters. 70 | * 71 | * @param repoID the repo id 72 | * @param resolveParameters the resolve parameters 73 | */ 74 | public static void updateResolvedParameters(String repoID, ResolveParameters resolveParameters) { 75 | String conflictFileEndPoint = 76 | PropertiesUtil.getValue("MergebotServer") + "/repos/" + repoID + "/resolve_parameters"; 77 | RestTemplate restTemplate = new RestTemplate(); 78 | restTemplate.put(conflictFileEndPoint, resolveParameters); 79 | } 80 | 81 | /** 82 | * Resolve archive repo. 83 | * 84 | * @param repoID the repo id 85 | */ 86 | public static void resolveArchiveRepo(String repoID) { 87 | String conflictFileEndPoint = 88 | PropertiesUtil.getValue("MergebotServer") + "/repos/" + repoID + "/resolve_archive_repo"; 89 | RestTemplate restTemplate = new RestTemplate(); 90 | restTemplate.put(conflictFileEndPoint, null); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /integration_tests/files/test.java: -------------------------------------------------------------------------------- 1 | private String generateElementUniqueID(LineNumberElement currentElement) { 2 | // extract parent info 3 | String parentGivenId = ""; 4 | if (currentElement.getParentElement() == null) { 5 | parentGivenId = "root"; 6 | } else { 7 | LineNumberElement elementParent = (LineNumberElement) currentElement.getParentElement(); 8 | String rawId = elementParent.getName() + elementParent.getStartLine(); 9 | parentGivenId = elementCache.get(rawId); 10 | } 11 | 12 | // generate self info 13 | String selfId = currentElement.getName(); 14 | if (currentElement.getAttributes() != null && currentElement.getAttributes().size() > 0) { 15 | Attribute nameAttr = currentElement.getAttributes().get(0); 16 | if (!nameAttr.getName().equalsIgnoreCase("name")) { 17 | for (int i = 1; i < currentElement.getAttributes().size(); i++) { 18 | Attribute attr = currentElement.getAttributes().get(i); 19 | if (attr.getName().equalsIgnoreCase("name")) { 20 | nameAttr = attr; 21 | break; 22 | } 23 | } 24 | } 25 | selfId = selfId + nameAttr.toString(); 26 | } else { 27 | if (currentElement.getChildren().size() == 0 && currentElement.getContent().size() == 1) { 28 | selfId = selfId + "[" + currentElement.getText() + "]"; 29 | } else { 30 | if (currentElement.getChildren().size() > 0) { 31 | Element first = currentElement.getChildren().get(0); 32 | if (isLeafNode(first)) { 33 | Element second = null; 34 | for (int i = 1; i < currentElement.getChildren().size(); i++) { 35 | if (isLeafNode(currentElement.getChildren().get(i))) { 36 | if (second == null) { 37 | second = currentElement.getChildren().get(i); 38 | } else { 39 | break; 40 | } 41 | } 42 | } 43 | String text = first.getName() + "=" + first.getContent(0); 44 | if (second != null) { 45 | text = text + "," + second.getName() + "=" + second.getContent(0); 46 | } 47 | selfId = selfId + "[" + text + "]"; 48 | System.out.println(selfId); 49 | } 50 | } 51 | } 52 | } 53 | 54 | if (parentChildCache.get(parentGivenId) == null) { 55 | parentChildCache.put(parentGivenId, new HashMap>()); 56 | } 57 | 58 | if (parentChildCache.get(parentGivenId).get(selfId) == null) { 59 | parentChildCache.get(parentGivenId).put(selfId, new HashSet()); 60 | } 61 | 62 | if (parentChildCache.get(parentGivenId) != null && parentChildCache.get(parentGivenId).get(selfId) != null) { 63 | String selfGivenId = selfId + parentChildCache.get(parentGivenId).get(selfId).size(); 64 | parentChildCache.get(parentGivenId).get(selfId).add(selfGivenId); 65 | selfId = selfGivenId; 66 | } 67 | 68 | String fullId = parentGivenId + "[" + selfId + "]"; 69 | 70 | // save into the cache 71 | elementCache.put(currentElement.getName() + currentElement.getStartLine(), fullId); 72 | return fullId; 73 | } -------------------------------------------------------------------------------- /program_slicing/graph/statement.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/23' 6 | 7 | from typing import Optional, Set 8 | from enum import Enum 9 | 10 | from program_slicing.graph.point import Point 11 | 12 | 13 | class StatementType(Enum): 14 | FUNCTION = "FUNCTION_DECLARATION" 15 | VARIABLE = "VARIABLE_DECLARATION" 16 | ASSIGNMENT = "ASSIGNMENT" 17 | CALL = "FUNCTION_CALL" 18 | SCOPE = "SCOPE" 19 | BRANCH = "BRANCH" 20 | LOOP = "LOOP" 21 | GOTO = "BREAK_CONTINUE_ELSE_RETURN_GOTO" 22 | UNKNOWN = "UNKNOWN" 23 | EXIT = "FUNCTION_EXIT" 24 | 25 | 26 | VariableName = str 27 | 28 | 29 | class Statement: 30 | 31 | def __init__( 32 | self, 33 | statement_type: StatementType, 34 | start_point: Point, 35 | end_point: Point, 36 | affected_by: Set[VariableName] = None, 37 | name: Optional[VariableName] = None, 38 | ast_node_type: str = None) -> None: 39 | self.__statement_type: StatementType = statement_type 40 | self.__start_point: Point = start_point 41 | self.__end_point: Point = end_point 42 | self.__affected_by: Set[VariableName] = set() if affected_by is None else affected_by 43 | self.__name: Optional[VariableName] = name 44 | self.__ast_node_type: str = ast_node_type 45 | 46 | def __repr__(self) -> str: 47 | return \ 48 | "Statement(" \ 49 | "statement_type={statement_type}, " \ 50 | "ast_node_type={ast_node_type}, " \ 51 | "name={name}, " \ 52 | "affected_by={affected_by}, " \ 53 | "start_point={start_point}, " \ 54 | "end_point={end_point})".format( 55 | statement_type=self.__statement_type, 56 | ast_node_type=self.__ast_node_type, 57 | name=None if self.__name is None else "'" + self.__name + "'", 58 | affected_by=self.__affected_by, 59 | start_point=self.__start_point, 60 | end_point=self.__end_point 61 | ) 62 | 63 | def __str__(self) -> str: 64 | if not self.__name: 65 | short_name = "" 66 | else: 67 | short_name = "'" + (self.__name if len(self.__name) < 30 else (self.__name[0:27] + "...")) + "' " 68 | affected_by = "affected by variables " + str(self.__affected_by) + " " if self.__affected_by else "" 69 | return \ 70 | "{statement_type}({ast_node_type}) " \ 71 | "{name}" \ 72 | "{affected_by}" \ 73 | "position in code: {start_point} - {end_point}".format( 74 | statement_type=self.__statement_type.name, 75 | ast_node_type=self.__ast_node_type, 76 | name=short_name, 77 | affected_by=affected_by, 78 | start_point=self.__start_point, 79 | end_point=self.__end_point 80 | ) 81 | 82 | @property 83 | def statement_type(self) -> StatementType: 84 | return self.__statement_type 85 | 86 | @property 87 | def start_point(self) -> Point: 88 | return self.__start_point 89 | 90 | @property 91 | def end_point(self) -> Point: 92 | return self.__end_point 93 | 94 | @property 95 | def affected_by(self) -> Set[VariableName]: 96 | return self.__affected_by 97 | 98 | @property 99 | def name(self) -> Optional[VariableName]: 100 | return self.__name 101 | 102 | @property 103 | def ast_node_type(self) -> str: 104 | return self.__ast_node_type 105 | -------------------------------------------------------------------------------- /integration_tests/files/class_2.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved. 3 | */ 4 | 5 | package com.huawei.codebot.analyzer.cxx.kernel; 6 | 7 | import com.huawei.codebot.analyzer.cxx.CxxGenericDefectFixer; 8 | import com.huawei.codebot.analyzer.cxx.DefectFixerType4C; 9 | import com.huawei.codebot.codeparsing.cxx.CxxFileModel; 10 | import com.huawei.codebot.framework.FixerInfo; 11 | import com.huawei.codebot.framework.exception.CodeBotRuntimeException; 12 | import com.huawei.codebot.framework.model.DefectInstance; 13 | 14 | import org.eclipse.cdt.core.dom.ast.ASTVisitor; 15 | import org.eclipse.cdt.core.dom.ast.IASTStatement; 16 | import org.slf4j.Logger; 17 | import org.slf4j.LoggerFactory; 18 | 19 | import java.util.HashSet; 20 | import java.util.List; 21 | import java.util.Set; 22 | 23 | /** 24 | * 功能描述 kernel规范32:打印数字时不要使用小括号,如:\"(%d)\" 25 | * 26 | * @author m00365463 g00413998 27 | * @since 2020-07-02 28 | */ 29 | public class NumPrintUseBracketVisitor extends CxxGenericDefectFixer { 30 | private static final Logger logger = LoggerFactory.getLogger(NumPrintUseBracketVisitor.class); 31 | 32 | private static String PRINT_PATTERN = "\\w*\\s*\\(\\s*\".*\"[\\s\\S]*\\).*"; 33 | 34 | private Set errorLines = new HashSet<>(); 35 | 36 | private void create(CxxFileModel fileModel) { 37 | fileModel.getAstTranlationUnit().accept(new ASTVisitor() { 38 | { 39 | shouldVisitStatements = true; 40 | } 41 | 42 | @Override 43 | public int visit(IASTStatement statement) { 44 | check(statement); 45 | return ASTVisitor.PROCESS_CONTINUE; 46 | } 47 | }); 48 | } 49 | 50 | private void saveViolationItem(Integer line) { 51 | if (!errorLines.contains(line)) { 52 | errorLines.add(line); 53 | String defectMsg = this.getFixerInfo().getDescription(); 54 | saveDefectInstance(line, defectMsg); 55 | 56 | } 57 | } 58 | 59 | private boolean numPrintUseBracket(String sourceCode) { 60 | String sourceTemp = sourceCode; 61 | while (sourceTemp.contains("%d")) { 62 | int endIndex = sourceTemp.indexOf("%d"); 63 | int i = 0; 64 | for (i = endIndex - 1; i >= 0; i--) { 65 | if (Character.isWhitespace(sourceTemp.charAt(i))) { 66 | continue; 67 | } 68 | if (sourceTemp.charAt(i) == '(' || sourceTemp.charAt(i) == '(') { 69 | return true; 70 | } else { 71 | break; 72 | } 73 | } 74 | sourceTemp = sourceTemp.substring(endIndex + 2); 75 | } 76 | return false; 77 | } 78 | 79 | private void check(IASTStatement statement) { 80 | String sourceCode = statement.getRawSignature(); 81 | if (sourceCode.matches(PRINT_PATTERN)) { 82 | boolean violateFlag = numPrintUseBracket(sourceCode); 83 | if (violateFlag) { 84 | saveViolationItem(statement.getFileLocation().getStartingLineNumber()); 85 | } 86 | } 87 | } 88 | 89 | @Override 90 | public List detectDefectsForFileModel(CxxFileModel fileModel) throws CodeBotRuntimeException { 91 | create(fileModel); 92 | return this.getCurrentFileDefectInstances(); 93 | } 94 | 95 | @Override 96 | public FixerInfo getFixerInfo() { 97 | if (this.fixerInfo == null) { 98 | FixerInfo info = new FixerInfo(); 99 | info.type = DefectFixerType4C.C_RULE_ID_PRINT_NUMBER_DONT_USE_BRACKET; 100 | info.description = "kernel规范32:打印数字时不要使用小括号,如:\"(%d)\""; 101 | this.fixerInfo = info; 102 | } 103 | return this.fixerInfo; 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /program_slicing/decomposition/slicing.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/17' 6 | 7 | import os 8 | from typing import Iterator 9 | 10 | from program_slicing.file_manager import reader 11 | from program_slicing.file_manager import writer 12 | from program_slicing.graph.parse import Lang 13 | from program_slicing.decomposition.slice_predicate import SlicePredicate 14 | from program_slicing.decomposition.block.slicing import get_block_slices 15 | from program_slicing.decomposition.variable.slicing import get_variable_slices 16 | 17 | 18 | def decompose_dir(dir_path: str, work_dir: str = None) -> None: 19 | """ 20 | Decompose all the files in the specified directory and save the result to the work_dir or print it via stdout. 21 | :param dir_path: path to the source folder with files that should be decomposed. 22 | :param work_dir: path to the directory where the result will be saved; 23 | decomposed files will be saved into it with their original names. 24 | The stdout will be used if work_dir is not specified. 25 | """ 26 | for file_path in reader.browse_file_sub_paths(dir_path, list(map(str, Lang))): 27 | decompose_file(file_path, work_dir) 28 | 29 | 30 | def decompose_file(file_path: str, work_dir: str = None, prefix: str = None) -> None: 31 | """ 32 | Decompose the specified file and save the result to the work_dir or print it via stdout. 33 | :param file_path: path to the source file that should be decomposed. 34 | :param work_dir: path to the directory where the result will be saved; 35 | decomposed file will be saved into it with it's original name 36 | and additional suffixes if there will be more than one variants. 37 | The stdout will be used if work_dir is not specified. 38 | :param prefix: file_name prefix that should be removed while saving. 39 | Remove nothing if prefix is None. 40 | """ 41 | for i, result in enumerate(decompose_code(reader.read_file(file_path), Lang(os.path.splitext(file_path)[1]))): 42 | if work_dir is None: 43 | print(result) 44 | continue 45 | if prefix is not None and file_path.startswith(prefix): 46 | result_path = os.path.join(work_dir, file_path[len(prefix):]) 47 | else: 48 | result_path = os.path.join(work_dir, os.path.basename(file_path)) 49 | result_path, result_ext = os.path.splitext(result_path) 50 | result_path = result_path + "." + str(i) + result_ext 51 | writer.save_file(result_path, result) 52 | 53 | 54 | def decompose_code(source_code: str, lang: Lang) -> Iterator[str]: 55 | """ 56 | Decompose the specified source code and return all the decomposition variants. 57 | :param source_code: source code that should be decomposed. 58 | :param lang: the source code Lang. 59 | :return: generator of decomposed source code versions in a string format. 60 | """ 61 | slice_predicate = SlicePredicate( 62 | min_amount_of_statements=3, 63 | max_amount_of_statements=60, 64 | max_amount_of_exit_statements=1, 65 | min_amount_of_lines=3, 66 | max_amount_of_lines=40, 67 | lang_to_check_parsing=lang, 68 | has_returnable_variable=True) 69 | variable_slices = get_variable_slices(source_code, lang, slice_predicate) 70 | for program_slice in variable_slices: 71 | yield "\033[33m\nVariable slice" + \ 72 | ((" of " + program_slice.function.name) if program_slice.function.name else "") + \ 73 | " for variable '" + program_slice.variable.name + \ 74 | "': " + str([a[0].line_number + 1 for a in program_slice.ranges]) + \ 75 | "\033[00m\n" + program_slice.code 76 | 77 | slice_predicate = SlicePredicate( 78 | min_amount_of_statements=3, 79 | max_amount_of_statements=60, 80 | max_amount_of_exit_statements=1, 81 | cause_code_duplication=False, 82 | min_amount_of_lines=3, 83 | max_amount_of_lines=40, 84 | lang_to_check_parsing=lang, 85 | has_returnable_variable=True) 86 | block_slices = get_block_slices(source_code, lang, slice_predicate) 87 | for program_slice in block_slices: 88 | yield "\033[33m\nBlock slice: " + str([a[0].line_number + 1 for a in program_slice.ranges]) + \ 89 | "\033[00m\n" + program_slice.code 90 | -------------------------------------------------------------------------------- /program_slicing/file_manager/reader.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2020/05/19' 6 | 7 | import json 8 | import os 9 | import stat 10 | from pathlib import Path 11 | from typing import Any, AnyStr, List, Iterator, Tuple, Union 12 | 13 | 14 | def read_json(path: Union[str, Path]) -> Any: 15 | """ 16 | Read a JSON file and return the extracted data. 17 | :param path: string with the JSON file path. 18 | :return: data from a JSON file. An empty string will be returned if an UnicodeDecodeError occurs while parsing. 19 | """ 20 | try: 21 | with open(path, 'r', encoding='utf-8') as f: 22 | data = json.load(f) 23 | return data 24 | except UnicodeDecodeError: 25 | try: 26 | with open(path, 'r') as f: 27 | data = json.load(f) 28 | return data 29 | except UnicodeDecodeError: 30 | print("\033[93mWARNING: unable to decode file" + path + "\033[0m") 31 | return "" 32 | 33 | 34 | def read_file(path: Union[str, Path]) -> AnyStr: 35 | """ 36 | Read any file and return the extracted data as a string. 37 | :param path: string with the file path. 38 | :return: a string with data from file. It's an empty string if an UnicodeDecodeError occurs while parsing. 39 | """ 40 | try: 41 | with open(path, 'r', encoding='utf-8') as f: 42 | data = f.read() 43 | return data 44 | except UnicodeDecodeError: 45 | try: 46 | with open(path, 'r') as f: 47 | data = f.read() 48 | return data 49 | except UnicodeDecodeError: 50 | print("\033[93mWARNING: unable to decode file" + path + "\033[0m") 51 | return "" 52 | except MemoryError: 53 | print("\033[93mWARNING: not enough memory to read file" + path + "\033[0m") 54 | return "" 55 | except MemoryError: 56 | print("\033[93mWARNING: not enough memory to read file" + path + "\033[0m") 57 | return "" 58 | 59 | 60 | def read_files( 61 | path: str, 62 | suffix_list: List[str] = None, 63 | skip_hidden_dirs: bool = True) -> Iterator[Tuple[str, AnyStr]]: 64 | """ 65 | Read all the files with the given suffixes from the given directory and its sub-directories. 66 | :param path: string with the path to the directory to search in. 67 | :param suffix_list: list of suffixes of files that should be obtained. All the files will be obtained if Null. 68 | :param skip_hidden_dirs: if True - skips hidden directories, default = True. 69 | :return: generator of file sub-path and its content pairs. 70 | """ 71 | for filename in browse_file_sub_paths(path, suffix_list, skip_hidden_dirs=skip_hidden_dirs): 72 | yield filename, read_file(filename) 73 | 74 | 75 | def browse_file_sub_paths( 76 | path: str, 77 | suffix_list: List[str] = None, 78 | skip_hidden_dirs: bool = True) -> Iterator[str]: 79 | """ 80 | Browse for files with the given suffixes in the given directory and its sub-directories. 81 | :param path: string with the path to the directory to search in. 82 | :param suffix_list: list of valid suffixes. Any suffix is valid if the list is None. 83 | :param skip_hidden_dirs: if True - skips hidden directories, default = True. 84 | :return: file path strings generator. 85 | """ 86 | for root, dirs, files in os.walk(path): 87 | if not skip_hidden_dirs or \ 88 | not (bool(os.stat(root).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN) 89 | or os.path.split(root)[-1].startswith(".")): 90 | for file in files: 91 | file_path = os.path.join(root, file) 92 | if not skip_hidden_dirs \ 93 | or not (bool(os.stat(file_path).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN) 94 | or file.startswith(".")): 95 | if suffix_list is not None: 96 | for suffix in suffix_list: 97 | if file.endswith(suffix): 98 | yield file_path 99 | break 100 | else: 101 | yield file_path 102 | -------------------------------------------------------------------------------- /integration_tests/files/method_11.java: -------------------------------------------------------------------------------- 1 | public void validate(Object object, String context, ValidatorContext validatorContext, String method) throws ValidationException { 2 | List validators = getValidators(object.getClass(), context, method); 3 | Set shortcircuitedFields = null; 4 | 5 | for (final Validator validator : validators) { 6 | try { 7 | validator.setValidatorContext(validatorContext); 8 | 9 | if (LOG.isDebugEnabled()) { 10 | LOG.debug("Running validator: " + validator + " for object " + object + " and method " + method); 11 | } 12 | 13 | FieldValidator fValidator = null; 14 | String fullFieldName = null; 15 | 16 | if (validator instanceof FieldValidator) { 17 | fValidator = (FieldValidator) validator; 18 | fullFieldName = new InternalValidatorContextWrapper(fValidator.getValidatorContext()).getFullFieldName(fValidator.getFieldName()); 19 | 20 | // This is pretty crap, but needed to support short-circuited validations on nested visited objects 21 | if (validatorContext instanceof VisitorFieldValidator.AppendingValidatorContext) { 22 | VisitorFieldValidator.AppendingValidatorContext appendingValidatorContext = 23 | (VisitorFieldValidator.AppendingValidatorContext) validatorContext; 24 | fullFieldName = appendingValidatorContext.getFullFieldNameFromParent(fValidator.getFieldName()); 25 | } 26 | 27 | if ((shortcircuitedFields != null) && shortcircuitedFields.contains(fullFieldName)) { 28 | if (LOG.isDebugEnabled()) { 29 | LOG.debug("Short-circuited, skipping"); 30 | } 31 | 32 | continue; 33 | } 34 | } 35 | 36 | if (validator instanceof ShortCircuitableValidator && ((ShortCircuitableValidator) validator).isShortCircuit()) { 37 | // get number of existing errors 38 | List errs = null; 39 | 40 | if (fValidator != null) { 41 | if (validatorContext.hasFieldErrors()) { 42 | Collection fieldErrors = validatorContext.getFieldErrors().get(fullFieldName); 43 | 44 | if (fieldErrors != null) { 45 | errs = new ArrayList(fieldErrors); 46 | } 47 | } 48 | } else if (validatorContext.hasActionErrors()) { 49 | Collection actionErrors = validatorContext.getActionErrors(); 50 | 51 | if (actionErrors != null) { 52 | errs = new ArrayList(actionErrors); 53 | } 54 | } 55 | 56 | validator.validate(object); 57 | 58 | if (fValidator != null) { 59 | if (validatorContext.hasFieldErrors()) { 60 | Collection errCol = validatorContext.getFieldErrors().get(fullFieldName); 61 | 62 | if ((errCol != null) && !errCol.equals(errs)) { 63 | if (LOG.isDebugEnabled()) { 64 | LOG.debug("Short-circuiting on field validation"); 65 | } 66 | 67 | if (shortcircuitedFields == null) { 68 | shortcircuitedFields = new TreeSet(); 69 | } 70 | 71 | shortcircuitedFields.add(fullFieldName); 72 | } 73 | } 74 | } else if (validatorContext.hasActionErrors()) { 75 | Collection errCol = validatorContext.getActionErrors(); 76 | 77 | if ((errCol != null) && !errCol.equals(errs)) { 78 | if (LOG.isDebugEnabled()) { 79 | LOG.debug("Short-circuiting"); 80 | } 81 | 82 | break; 83 | } 84 | } 85 | 86 | continue; 87 | } 88 | 89 | validator.validate(object); 90 | } 91 | finally { 92 | validator.setValidatorContext(null); 93 | } 94 | } 95 | } -------------------------------------------------------------------------------- /scripts/performance_benchmarks/block_slices/dataset/Controller_createDirectories_100.0_71.0_136.0_168.java: -------------------------------------------------------------------------------- 1 | private static void createDirectories() throws FileNotFoundException { 2 | 3 | if(debug) log("Now in createDirectories()"); // NOI18N 4 | 5 | FileObject rootdir = 6 | FileUtil.getConfigRoot(); 7 | if(debug) { 8 | log("Root directory is " + rootdir.getName()); // NOI18N 9 | File rootF = FileUtil.toFile(rootdir); 10 | log("Root directory abs path " + // NOI18N 11 | rootF.getAbsolutePath()); 12 | } 13 | 14 | FileLock lock = null; 15 | 16 | if(monDir == null || !monDir.isFolder()) { 17 | try { 18 | monDir = rootdir.getFileObject(monDirStr); 19 | } 20 | catch(Exception ex) { 21 | } 22 | 23 | if(monDir == null || !monDir.isFolder()) { 24 | if(monDir != null) { 25 | try { 26 | lock = monDir.lock(); 27 | monDir.delete(lock); 28 | } 29 | catch(FileAlreadyLockedException falex) { 30 | throw new FileNotFoundException(); 31 | } 32 | catch(IOException ex) { 33 | throw new FileNotFoundException(); 34 | } 35 | finally { 36 | if(lock != null) lock.releaseLock(); 37 | } 38 | } 39 | try { 40 | monDir = rootdir.createFolder(monDirStr); 41 | } 42 | catch(IOException ioex) { 43 | if(debug) ioex.printStackTrace(); 44 | } 45 | } 46 | if(monDir == null || !monDir.isFolder()) 47 | throw new FileNotFoundException(); 48 | } 49 | 50 | if(debug) 51 | log("monitor directory is " + monDir.getName());// NOI18N 52 | 53 | // Current directory 54 | 55 | if(currDir == null || !currDir.isFolder()) { 56 | 57 | try { 58 | currDir = monDir.getFileObject(currDirStr); 59 | } 60 | catch(Exception ex) { } 61 | 62 | if(currDir == null || !currDir.isFolder()) { 63 | lock = null; 64 | if(currDir != null) { 65 | try { 66 | lock = currDir.lock(); 67 | currDir.delete(lock); 68 | } 69 | catch(FileAlreadyLockedException falex) { 70 | throw new FileNotFoundException(); 71 | } 72 | catch(IOException ex) { 73 | throw new FileNotFoundException(); 74 | } 75 | finally { 76 | if(lock != null) lock.releaseLock(); 77 | } 78 | } 79 | try { 80 | currDir = monDir.createFolder(currDirStr); 81 | } 82 | catch(IOException ex) { 83 | if(debug) ex.printStackTrace(); 84 | } 85 | } 86 | if(currDir == null || !currDir.isFolder()) 87 | throw new FileNotFoundException(); 88 | } 89 | 90 | if(debug) log("curr directory is " + currDir.getName()); // NOI18N 91 | 92 | // Save Directory 93 | if(saveDir == null || !saveDir.isFolder()) { 94 | try { 95 | saveDir = monDir.getFileObject(saveDirStr); 96 | } 97 | catch(Exception ex) { } 98 | 99 | if(saveDir == null || !saveDir.isFolder()) { 100 | if(saveDir != null) { 101 | lock = null; 102 | try { 103 | lock = saveDir.lock(); 104 | saveDir.delete(lock); 105 | } 106 | catch(FileAlreadyLockedException falex) { 107 | throw new FileNotFoundException(); 108 | } 109 | catch(IOException ex) { 110 | throw new FileNotFoundException(); 111 | } 112 | finally { 113 | if(lock != null) lock.releaseLock(); 114 | } 115 | } 116 | try { 117 | saveDir = monDir.createFolder(saveDirStr); 118 | } 119 | catch(IOException ex) { 120 | if(debug) ex.printStackTrace(); 121 | } 122 | } 123 | if(saveDir == null || !saveDir.isFolder()) 124 | throw new FileNotFoundException(); 125 | 126 | if(debug) 127 | log("save directory is " + saveDir.getName()); // NOI18N 128 | } 129 | 130 | // Replay Directory 131 | 132 | if(replayDir == null || !replayDir.isFolder()) { 133 | 134 | try { 135 | replayDir = monDir.getFileObject(replayDirStr); 136 | } 137 | catch(Exception ex) { } 138 | 139 | if(replayDir == null || !replayDir.isFolder()) { 140 | if(replayDir != null) { 141 | lock = null; 142 | try { 143 | lock = replayDir.lock(); 144 | replayDir.delete(lock); 145 | } 146 | catch(FileAlreadyLockedException falex) { 147 | throw new FileNotFoundException(); 148 | } 149 | catch(IOException ex) { 150 | throw new FileNotFoundException(); 151 | } 152 | finally { 153 | if(lock != null) lock.releaseLock(); 154 | } 155 | } 156 | try { 157 | replayDir = monDir.createFolder(replayDirStr); 158 | } 159 | catch(Exception ex) { 160 | if(debug) ex.printStackTrace(); 161 | } 162 | } 163 | if(replayDir == null || !replayDir.isFolder()) 164 | throw new FileNotFoundException(); 165 | 166 | if(debug) 167 | log("replay directory is " + replayDir.getName());// NOI18N 168 | } 169 | } -------------------------------------------------------------------------------- /test/decomposition/test_program_slice.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/05/24' 6 | 7 | from unittest import TestCase 8 | 9 | from program_slicing.decomposition.program_slice import ProgramSlice, RangeType 10 | from program_slicing.graph.statement import Statement, StatementType 11 | from program_slicing.graph.point import Point 12 | 13 | 14 | class ProgramSliceTestCase(TestCase): 15 | 16 | @staticmethod 17 | def __get_source_code_0() -> str: 18 | return """ 19 | class A { 20 | void main() { 21 | int a = 0; 22 | int b = 10; 23 | a = b; 24 | b += a; 25 | } 26 | } 27 | """ 28 | 29 | @staticmethod 30 | def __get_program_slice_0() -> ProgramSlice: 31 | program_slice = ProgramSlice(ProgramSliceTestCase.__get_source_code_0().split("\n")) 32 | function_body = Statement(StatementType.SCOPE, Point(2, 24), Point(7, 13)) 33 | variable_a = Statement(StatementType.VARIABLE, Point(3, 16), Point(3, 26)) 34 | variable_b = Statement(StatementType.VARIABLE, Point(4, 16), Point(4, 27)) 35 | program_slice.add_statement(function_body) 36 | program_slice.add_statement(variable_a) 37 | program_slice.add_statement(variable_b) 38 | program_slice.add_range(Point(5, 16), Point(5, 22), RangeType.FULL) 39 | return program_slice 40 | 41 | @staticmethod 42 | def __get_source_code_1() -> str: 43 | return """ 44 | class A { 45 | void main() { 46 | String s = "line1" + 47 | "line2" + 48 | "line3" + 49 | "very very very long line4"; 50 | } 51 | } 52 | """ 53 | 54 | @staticmethod 55 | def __get_program_slice_1() -> ProgramSlice: 56 | program_slice = ProgramSlice(ProgramSliceTestCase.__get_source_code_1().split("\n")) 57 | function_body = Statement(StatementType.SCOPE, Point(2, 24), Point(7, 13)) 58 | variable_s = Statement(StatementType.UNKNOWN, Point(3, 16), Point(6, 28)) 59 | program_slice.add_statement(function_body) 60 | program_slice.add_statement(variable_s) 61 | return program_slice 62 | 63 | def test_get_ranges(self) -> None: 64 | program_slice = ProgramSliceTestCase.__get_program_slice_0() 65 | self.assertEqual([ 66 | (Point(3, 16), Point(3, 26)), 67 | (Point(4, 16), Point(4, 27)), 68 | (Point(5, 16), Point(5, 22))], program_slice.ranges) 69 | program_slice = ProgramSliceTestCase.__get_program_slice_1() 70 | self.assertEqual([ 71 | (Point(3, 16), Point(3, 36)), 72 | (Point(4, 0), Point(4, 9)), 73 | (Point(5, 16), Point(5, 29)), 74 | (Point(6, 0), Point(6, 28))], program_slice.ranges) 75 | 76 | def test_get_slice(self) -> None: 77 | program_slice = ProgramSliceTestCase.__get_program_slice_0() 78 | self.assertEqual( 79 | "int a = 0;\n" 80 | "int b = 10;\n" 81 | "a = b;", 82 | program_slice.code) 83 | program_slice = ProgramSliceTestCase.__get_program_slice_1() 84 | self.assertEqual( 85 | "String s = \"line1\" +\n" 86 | "\"line2\" +\n" 87 | " \"line3\" +\n" 88 | "\"very very very long line4\";", 89 | program_slice.code) 90 | 91 | def test_repr(self) -> None: 92 | program_slice = ProgramSliceTestCase.__get_program_slice_0() 93 | self.assertEqual( 94 | "int a = 0;\n" 95 | "int b = 10;\n" 96 | "a = b;", 97 | str(program_slice)) 98 | self.assertEqual( 99 | "ProgramSlice(" 100 | "ranges=[" 101 | "(Point(3, 16), Point(3, 26)), " 102 | "(Point(4, 16), Point(4, 27)), " 103 | "(Point(5, 16), Point(5, 22))], " 104 | "source_lines=['', " 105 | "' class A {', " 106 | "' void main() {', " 107 | "' int a = 0;', " 108 | "' int b = 10;', " 109 | "' a = b;', " 110 | "' b += a;', " 111 | "' }', " 112 | "' }', " 113 | "' '])", 114 | repr(program_slice)) 115 | 116 | def test_hash(self) -> None: 117 | program_slice0 = ProgramSliceTestCase.__get_program_slice_0() 118 | program_slice1 = ProgramSliceTestCase.__get_program_slice_0() 119 | self.assertEqual(program_slice0, program_slice1) 120 | self.assertEqual(1, len({program_slice0, program_slice1})) 121 | program_slice1 = ProgramSliceTestCase.__get_program_slice_1() 122 | self.assertNotEqual(program_slice0, program_slice1) 123 | self.assertEqual(2, len({program_slice0, program_slice1})) 124 | -------------------------------------------------------------------------------- /scripts/performance_benchmarks/block_slices/dataset/MetaDataBuilder_emit_100.0_41.0_66.0_127.java: -------------------------------------------------------------------------------- 1 | public void emit(HttpField field) throws HpackException.SessionException 2 | { 3 | HttpHeader header = field.getHeader(); 4 | String name = field.getName(); 5 | if (name == null || name.length() == 0) 6 | throw new HpackException.SessionException("Header size 0"); 7 | String value = field.getValue(); 8 | int fieldSize = name.length() + (value == null ? 0 : value.length()); 9 | _size += fieldSize + 32; 10 | if (_size > _maxSize) 11 | throw new HpackException.SessionException("Header size %d > %d", _size, _maxSize); 12 | 13 | if (field instanceof StaticTableHttpField) 14 | { 15 | StaticTableHttpField staticField = (StaticTableHttpField)field; 16 | switch (header) 17 | { 18 | case C_STATUS: 19 | if (checkPseudoHeader(header, _status)) 20 | _status = staticField.getIntValue(); 21 | _response = true; 22 | break; 23 | 24 | case C_METHOD: 25 | if (checkPseudoHeader(header, _method)) 26 | _method = value; 27 | _request = true; 28 | break; 29 | 30 | case C_SCHEME: 31 | if (checkPseudoHeader(header, _scheme)) 32 | _scheme = (HttpScheme)staticField.getStaticValue(); 33 | _request = true; 34 | break; 35 | 36 | default: 37 | throw new IllegalArgumentException(name); 38 | } 39 | } 40 | else if (header != null) 41 | { 42 | switch (header) 43 | { 44 | case C_STATUS: 45 | if (checkPseudoHeader(header, _status)) 46 | _status = field.getIntValue(); 47 | _response = true; 48 | break; 49 | 50 | case C_METHOD: 51 | if (checkPseudoHeader(header, _method)) 52 | _method = value; 53 | _request = true; 54 | break; 55 | 56 | case C_SCHEME: 57 | if (checkPseudoHeader(header, _scheme) && value != null) 58 | _scheme = HttpScheme.CACHE.get(value); 59 | _request = true; 60 | break; 61 | 62 | case C_AUTHORITY: 63 | if (checkPseudoHeader(header, _authority)) 64 | { 65 | if (field instanceof HostPortHttpField) 66 | _authority = (HostPortHttpField)field; 67 | else if (value != null) 68 | _authority = new AuthorityHttpField(value); 69 | } 70 | _request = true; 71 | break; 72 | 73 | case C_PATH: 74 | if (checkPseudoHeader(header, _path)) 75 | { 76 | if (value != null && value.length() > 0) 77 | _path = value; 78 | else 79 | streamException("No Path"); 80 | } 81 | _request = true; 82 | break; 83 | 84 | case C_PROTOCOL: 85 | if (checkPseudoHeader(header, _protocol)) 86 | _protocol = value; 87 | _request = true; 88 | break; 89 | 90 | case HOST: 91 | _fields.add(field); 92 | break; 93 | 94 | case CONTENT_LENGTH: 95 | _contentLength = field.getLongValue(); 96 | _fields.add(field); 97 | break; 98 | 99 | case TE: 100 | if ("trailers".equalsIgnoreCase(value)) 101 | _fields.add(field); 102 | else 103 | streamException("Unsupported TE value '%s'", value); 104 | break; 105 | 106 | case CONNECTION: 107 | if ("TE".equalsIgnoreCase(value)) 108 | _fields.add(field); 109 | else 110 | streamException("Connection specific field '%s'", header); 111 | break; 112 | 113 | default: 114 | if (name.charAt(0) == ':') 115 | streamException("Unknown pseudo header '%s'", name); 116 | else 117 | _fields.add(field); 118 | break; 119 | } 120 | } 121 | else 122 | { 123 | if (name.charAt(0) == ':') 124 | streamException("Unknown pseudo header '%s'", name); 125 | else 126 | _fields.add(field); 127 | } 128 | } -------------------------------------------------------------------------------- /program_slicing/cli.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/17' 6 | 7 | import argparse 8 | import os 9 | 10 | from program_slicing.decomposition import slicing 11 | 12 | SLICE = "slice" 13 | 14 | SOURCE = "source" 15 | 16 | OUTPUT_OPT = "--output" 17 | OUTPUT_OPT_SHORT = "-o" 18 | 19 | URI_TYPE_STDIO = "s" 20 | URI_TYPE_FILE = "f" 21 | URI_TYPE_DIRECTORY = "d" 22 | URI_TYPE_URL = "u" 23 | 24 | 25 | def get_uri_type(uri: str) -> str: 26 | if uri is None: 27 | return URI_TYPE_STDIO 28 | if os.path.isfile(uri): 29 | return URI_TYPE_FILE 30 | if os.path.isdir(uri): 31 | return URI_TYPE_DIRECTORY 32 | if os.path.islink(uri): 33 | return URI_TYPE_URL 34 | if uri.startswith('http') or uri.startswith('ssh'): 35 | return URI_TYPE_URL 36 | if os.path.splitext(uri)[1] == '': 37 | return URI_TYPE_DIRECTORY 38 | return URI_TYPE_FILE 39 | 40 | 41 | def get_uri_message(uri: str) -> str: 42 | if uri is None: 43 | return "stdio" 44 | return "current work directory" if (uri == "" or uri == ".") else ("'" + uri + "'") 45 | 46 | 47 | def cli() -> None: 48 | parser = argparse.ArgumentParser() 49 | subparsers = parser.add_subparsers(dest="command", help="", required=True) 50 | 51 | generate_data = subparsers.add_parser( 52 | SLICE, 53 | help="generate slice decomposition and save it to a specific folder or file") 54 | generate_data.add_argument( 55 | SOURCE, 56 | help="source folder, file or url") 57 | generate_data.add_argument( 58 | OUTPUT_OPT_SHORT, 59 | OUTPUT_OPT, 60 | help="output file or directory: depending on what you set as output, " 61 | "you will get folder full of slice decompositions or a single file with it. " 62 | "It uses stdout if not specified") 63 | 64 | args = parser.parse_args() 65 | source = os.path.normpath(args.source) 66 | source = "" if source is None else source 67 | source_type = get_uri_type(source) 68 | if source_type != URI_TYPE_URL and not source == "" and not os.path.exists(source): 69 | source_type = URI_TYPE_URL 70 | output_option = None if args.output is None else os.path.normpath(args.output) 71 | if args.command == SLICE: 72 | __check_slice(source, source_type, output_option, args) 73 | 74 | 75 | def __check_slice(source: str, source_type: str, output_option: str, args: argparse.Namespace) -> None: 76 | if source_type == URI_TYPE_DIRECTORY: 77 | __check_slice_from_directory(source, output_option, args) 78 | 79 | elif source_type == URI_TYPE_FILE: 80 | __check_slice_from_file(source, output_option, args) 81 | 82 | else: 83 | print("Unsupported source '" + source + "'") 84 | 85 | 86 | def __check_slice_from_file(source: str, output_option: str, args: argparse.Namespace) -> None: 87 | source_message = get_uri_message(source) 88 | output_message = get_uri_message(output_option) 89 | output_option_type = get_uri_type(output_option) 90 | if output_option_type == URI_TYPE_STDIO: 91 | print("Print all possible slice decompositions of " + source_message) 92 | slicing.decompose_file(source) 93 | 94 | elif output_option_type == URI_TYPE_DIRECTORY: 95 | print("Save to the " + output_message + " all the slice decompositions of " + source_message) 96 | print("Arguments combination is not yet supported: " + str(args)) 97 | 98 | elif output_option_type == URI_TYPE_FILE: 99 | print("Unsupportable option: save slice decomposition of " + source_message + " to " + output_message) 100 | 101 | elif output_option_type == URI_TYPE_URL: 102 | print("Unsupportable option: send slice decomposition of " + source_message + " to " + output_option) 103 | 104 | else: 105 | print("Unsupported output: " + output_message) 106 | 107 | 108 | def __check_slice_from_directory(source: str, output_option: str, args: argparse.Namespace) -> None: 109 | source_message = get_uri_message(source) 110 | output_message = get_uri_message(output_option) 111 | output_option_type = get_uri_type(output_option) 112 | if output_option_type == URI_TYPE_STDIO: 113 | print("Print all possible slice decompositions of files from " + source_message) 114 | print("Arguments combination is not yet supported: " + str(args)) 115 | 116 | elif output_option_type == URI_TYPE_DIRECTORY: 117 | print("Save to the " + output_message + " all the slice decompositions of files from " + source_message) 118 | print("Arguments combination is not yet supported: " + str(args)) 119 | 120 | elif output_option_type == URI_TYPE_FILE: 121 | print("Unsupportable option: save slice decomposition of files " 122 | "from the " + source_message + " to " + output_message) 123 | 124 | elif output_option_type == URI_TYPE_URL: 125 | print("Unsupportable option: send slice decomposition of files " 126 | "from the " + source_message + " to " + output_option) 127 | 128 | else: 129 | print("Unsupported output: " + output_message) 130 | 131 | 132 | if __name__ == "__main__": 133 | print("\033[91m {}\033[00m" .format("")) 134 | cli() 135 | -------------------------------------------------------------------------------- /integration_tests/files/class_1.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) Huawei Technologies Co., Ltd. 2019-2020. All rights reserved. 3 | */ 4 | 5 | package com.huawei.codebot.analyzer.cxx.pclint; 6 | 7 | import java.util.List; 8 | import java.util.Locale; 9 | 10 | import org.eclipse.cdt.core.dom.ast.ASTVisitor; 11 | import org.eclipse.cdt.core.dom.ast.IASTDeclSpecifier; 12 | import org.eclipse.cdt.core.dom.ast.IASTDeclaration; 13 | import org.eclipse.cdt.core.dom.ast.IASTSimpleDeclaration; 14 | import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit; 15 | import org.eclipse.cdt.core.dom.ast.cpp.ICPPASTCompositeTypeSpecifier; 16 | import org.eclipse.cdt.core.dom.ast.cpp.ICPPASTSimpleDeclSpecifier; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import com.huawei.codebot.analyzer.cxx.CxxGenericDefectFixer; 21 | import com.huawei.codebot.analyzer.cxx.DefectFixerType4C; 22 | import com.huawei.codebot.codeparsing.cxx.CxxFileModel; 23 | import com.huawei.codebot.framework.FixerInfo; 24 | import com.huawei.codebot.framework.exception.CodeBotRuntimeException; 25 | import com.huawei.codebot.framework.model.DefectInstance; 26 | 27 | 28 | /** 29 | * 功能描述 内联虚函数很不寻常,对应于PCLint 1558 30 | * 31 | * @changed 2019-10-08 by bianpan 00536876 1. Associate with RuleID 201558 2. Change the warning information 32 | * @author 00406513 33 | * @since 2019-06-20 34 | */ 35 | public class InlineVirtualFunctionChecker extends CxxGenericDefectFixer { 36 | private static final Logger logger = LoggerFactory.getLogger(InlineVirtualFunctionChecker.class); 37 | 38 | private static final String VIRTUAL_FUNCTION = "InlineVirtualFunction"; 39 | 40 | private static final Integer CHECK_RULE_ID = 0; 41 | 42 | 43 | private void check(IASTTranslationUnit translationUnit, String filePath) { 44 | logger.debug(String.format(Locale.ROOT, "Run %s checker on '%s'", VIRTUAL_FUNCTION, filePath)); 45 | 46 | translationUnit.accept(new LocalVisitor()); 47 | 48 | logger.debug(String.format(Locale.ROOT, "Completed %s checker on '%s'", VIRTUAL_FUNCTION, filePath)); 49 | } 50 | 51 | @Override 52 | public List detectDefectsForFileModel(CxxFileModel fileModel) throws CodeBotRuntimeException { 53 | String filePath = fileModel.getFilePath(); 54 | check(fileModel.getAstTranlationUnit(), filePath); 55 | return this.getCurrentFileDefectInstances(); 56 | } 57 | 58 | @Override 59 | public FixerInfo getFixerInfo() { 60 | if (this.fixerInfo == null) { 61 | FixerInfo info = new FixerInfo(); 62 | info.type = DefectFixerType4C.C_RULE_ID_INLINE_VIRTUAL_FUNCTION; 63 | info.description = "base specifier with no access specifier is implicitly public/private."; 64 | this.fixerInfo = info; 65 | } 66 | return this.fixerInfo; 67 | } 68 | 69 | private class LocalVisitor extends ASTVisitor { 70 | 71 | String mClassName = ""; 72 | 73 | LocalVisitor() { 74 | this.shouldVisitDeclarations = true; 75 | } 76 | 77 | @Override 78 | public int visit(IASTDeclaration declaration) { 79 | if (declaration instanceof IASTSimpleDeclaration) { 80 | IASTSimpleDeclaration simpleDeclaration = (IASTSimpleDeclaration) declaration; 81 | IASTDeclSpecifier declSpecifier = simpleDeclaration.getDeclSpecifier(); 82 | 83 | if (declSpecifier instanceof ICPPASTCompositeTypeSpecifier) { 84 | ICPPASTCompositeTypeSpecifier compositeSpecifier = (ICPPASTCompositeTypeSpecifier) declSpecifier; 85 | 86 | // New class found, record its name 87 | mClassName = compositeSpecifier.getName().getRawSignature(); 88 | ; 89 | 90 | for (IASTDeclaration memberDeclaration : compositeSpecifier.getMembers()) { 91 | if (memberDeclaration instanceof IASTSimpleDeclaration) { 92 | checkRule((IASTSimpleDeclaration) memberDeclaration); 93 | } 94 | } 95 | } 96 | } 97 | 98 | return PROCESS_CONTINUE; 99 | } 100 | 101 | private void checkRule(IASTSimpleDeclaration simpleDeclaration) { 102 | logger.debug(String.format(Locale.ROOT, "Checking method '%s'...", simpleDeclaration.getRawSignature())); 103 | IASTDeclSpecifier simpleSpecifier = simpleDeclaration.getDeclSpecifier(); 104 | if (simpleSpecifier instanceof ICPPASTSimpleDeclSpecifier) { 105 | ICPPASTSimpleDeclSpecifier cppSpecifier = (ICPPASTSimpleDeclSpecifier) simpleSpecifier; 106 | boolean isVirtual = cppSpecifier.isVirtual(); 107 | boolean isInline = cppSpecifier.isInline(); 108 | if (isInline && isVirtual) { 109 | int lineNumber = simpleDeclaration.getFileLocation().getStartingLineNumber(); 110 | String message = String.format(Locale.ROOT, 111 | " Problem:class '%s' virtual function '%s' is declared as inline function", mClassName, 112 | simpleDeclaration.getRawSignature()); 113 | saveDefectInstance(lineNumber, message); 114 | } 115 | } 116 | } 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /program_slicing/graph/convert/cdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/01' 6 | 7 | from typing import Dict, List 8 | 9 | from program_slicing.graph.cdg import ControlDependenceGraph 10 | from program_slicing.graph.cfg import ControlFlowGraph 11 | from program_slicing.graph.ddg import DataDependenceGraph 12 | from program_slicing.graph.pdg import ProgramDependenceGraph 13 | from program_slicing.graph.basic_block import BasicBlock 14 | from program_slicing.graph.statement import Statement 15 | from program_slicing.graph.convert.cfg import to_ddg as cfg_to_ddg 16 | 17 | 18 | def to_cfg(cdg: ControlDependenceGraph) -> ControlFlowGraph: 19 | """ 20 | Convert the Control Dependence Graph into a Control Flow Graph. 21 | New graph will contain links on nodes of the original one so that 22 | any changes made after converting in the original graph's statements will affect the converted one. 23 | :param cdg: Control Dependence Graph that should to be converted. 24 | :return: Control Flow Graph which nodes contain nodes of the Control Dependence Graph on which it was based on. 25 | """ 26 | cfg = ControlFlowGraph() 27 | block: Dict[Statement, BasicBlock] = {} 28 | for root in cdg.entry_points: 29 | __to_cfg(root, cdg=cdg, cfg=cfg, block=block) 30 | cfg.set_scope_dependency(cdg.scope_dependency) 31 | return cfg 32 | 33 | 34 | def to_ddg(cdg: ControlDependenceGraph) -> DataDependenceGraph: 35 | """ 36 | Convert the Control Dependence Graph into a Data Dependence Graph. 37 | New graph will contain same nodes as in the original one so that 38 | any changes made after converting in the original graph's statements will affect the converted one. 39 | :param cdg: Control Dependence Graph that should to be converted. 40 | :return: Data Dependence Graph which nodes where presented in the Control Dependence Graph on which it was based on. 41 | """ 42 | cfg = to_cfg(cdg) 43 | return cfg_to_ddg(cfg) 44 | 45 | 46 | def to_pdg(cdg: ControlDependenceGraph) -> ProgramDependenceGraph: 47 | """ 48 | Convert the Control Dependence Graph into a Program Dependence Graph. 49 | New graph will contain same nodes as in the original one so that 50 | any changes made after converting in the original graph's statements will affect the converted one. 51 | :param cdg: Control Dependence Graph that should to be converted. 52 | :return: Program Dependence Graph which nodes where presented in the original Control Dependence Graph. 53 | """ 54 | ddg = to_ddg(cdg) 55 | pdg = ProgramDependenceGraph() 56 | for node in cdg: 57 | pdg.add_node(node) 58 | for cdg_successor in cdg.successors(node): 59 | pdg.add_edge(node, cdg_successor) 60 | if node in ddg: 61 | for ddg_successor in ddg.successors(node): 62 | pdg.add_edge(node, ddg_successor) 63 | for entry_point in cdg.entry_points: 64 | pdg.add_entry_point(entry_point) 65 | pdg.set_scope_dependency(cdg.scope_dependency) 66 | return pdg 67 | 68 | 69 | def __to_cfg( 70 | statement: Statement, 71 | cdg: ControlDependenceGraph, 72 | cfg: ControlFlowGraph, 73 | block: Dict[Statement, BasicBlock]) -> None: 74 | f_children: List[Statement] = cdg.control_flow.get(statement, []) 75 | prev_block: BasicBlock = block.get(statement, None) 76 | process_list: List[Statement] = [] 77 | for child in f_children: 78 | if child in block: 79 | __process_loop(child, cfg, block, prev_block) 80 | elif len(f_children) > 1: 81 | new_block = BasicBlock(statements=[child]) 82 | cfg.add_node(new_block) 83 | if prev_block is None: 84 | cfg.add_entry_point(new_block) 85 | else: 86 | cfg.add_edge(prev_block, new_block) 87 | block[child] = new_block 88 | process_list.append(child) 89 | else: 90 | if prev_block is None: 91 | prev_block = BasicBlock() 92 | cfg.add_node(prev_block) 93 | cfg.add_entry_point(prev_block) 94 | prev_block.append(child) 95 | block[child] = prev_block 96 | process_list.append(child) 97 | for child in process_list: 98 | __to_cfg(child, cdg, cfg, block) 99 | 100 | 101 | def __process_loop( 102 | child: Statement, 103 | cfg: ControlFlowGraph, 104 | block: Dict[Statement, BasicBlock], 105 | prev_block: BasicBlock) -> None: 106 | old_block: BasicBlock = block[child] 107 | index = old_block.statements.index(child) 108 | if index == 0: 109 | if prev_block is not None: 110 | cfg.add_edge(prev_block, old_block) 111 | return 112 | new_block = old_block.split(index) 113 | for new_block_statement in new_block.statements: 114 | block[new_block_statement] = new_block 115 | cfg.add_node(new_block) 116 | old_successors: List[BasicBlock] = [successor for successor in cfg.successors(old_block)] 117 | for old_successor in old_successors: 118 | cfg.remove_edge(old_block, old_successor) 119 | cfg.add_edge(new_block, old_successor) 120 | cfg.add_edge(old_block, new_block) 121 | if prev_block is not None: 122 | cfg.add_edge(prev_block, new_block) 123 | -------------------------------------------------------------------------------- /program_slicing/graph/cdg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/03/30' 6 | 7 | import bisect 8 | from typing import Set, Dict, List, Tuple, Optional 9 | 10 | import networkx 11 | 12 | from program_slicing.graph.statement import Statement, StatementType 13 | from program_slicing.graph.point import Point 14 | 15 | 16 | class ControlDependenceGraph(networkx.DiGraph): 17 | 18 | def __init__(self) -> None: 19 | super().__init__() 20 | self.__entry_points: Set[Statement] = set() 21 | self.__control_flow: Dict[Statement, List[Statement]] = {} 22 | self.__scope_dependency: Dict[Statement, Statement] = {} 23 | 24 | @property 25 | def entry_points(self) -> Set[Statement]: 26 | return self.__entry_points 27 | 28 | @property 29 | def control_flow(self) -> Dict[Statement, List[Statement]]: 30 | return self.__control_flow 31 | 32 | @property 33 | def scope_dependency(self) -> Dict[Statement, Statement]: 34 | return self.__scope_dependency 35 | 36 | def add_entry_point(self, root: Statement) -> None: 37 | self.__entry_points.add(root) 38 | 39 | def set_scope_dependency(self, scope_dependency: Dict[Statement, Statement]) -> None: 40 | self.__scope_dependency = scope_dependency 41 | 42 | def build_scope_dependency(self) -> None: 43 | scopes_for_start_point: Dict[Point, Statement] = {} 44 | scopes_for_end_point: Dict[Point, Statement] = {} 45 | scopes: List[Statement] = sorted([ 46 | statement for statement in self if 47 | statement.statement_type == StatementType.SCOPE or 48 | statement.statement_type == StatementType.GOTO or 49 | statement.statement_type == StatementType.BRANCH or 50 | statement.statement_type == StatementType.LOOP or 51 | statement.statement_type == StatementType.FUNCTION 52 | ], key=lambda statement: ( 53 | statement.start_point, 54 | (-statement.end_point.line_number, -statement.end_point.column_number) 55 | )) 56 | points = [] 57 | scope_container = {} 58 | for scope in scopes: 59 | interval_start, interval_end = ControlDependenceGraph.__obtain_interval(points, scope) 60 | scope_containing_scope = None if (interval_start is None or interval_end is None) else \ 61 | scopes_for_start_point.get(interval_start, None) 62 | if scope_containing_scope is not None: 63 | scope_container[scope] = scope_containing_scope 64 | if interval_start != scope.start_point: 65 | bisect.insort(points, scope.start_point) 66 | # insort is slow on array list, use linked list instead 67 | if interval_start in scopes_for_start_point: 68 | scopes_for_end_point[scope.start_point] = scopes_for_start_point[interval_start] 69 | if interval_end != scope.end_point: 70 | bisect.insort(points, scope.end_point) 71 | # insort is slow on array list, use linked list instead 72 | if interval_end in scopes_for_end_point: 73 | scopes_for_start_point[scope.end_point] = scopes_for_end_point[interval_end] 74 | scopes_for_start_point[scope.start_point] = scope 75 | scopes_for_end_point[scope.end_point] = scope 76 | self.__fill_scope_dependency(points, scopes_for_start_point, scopes_for_end_point, scope_container) 77 | 78 | def __fill_scope_dependency( 79 | self, 80 | points: List[Point], 81 | scopes_for_start_point: Dict[Point, Statement], 82 | scopes_for_end_point: Dict[Point, Statement], 83 | scope_container: Dict[Statement, Statement]): 84 | for statement in self: 85 | interval_start, interval_end = self.__obtain_interval(points, statement) 86 | scope = None 87 | if interval_start is not None and interval_end is not None: 88 | start_point_scope = scopes_for_start_point.get(interval_start, None) 89 | end_point_scope = scopes_for_end_point.get(interval_end, None) 90 | if start_point_scope is None or end_point_scope is None: 91 | scope = None 92 | elif start_point_scope == end_point_scope or \ 93 | start_point_scope.start_point <= end_point_scope.start_point and \ 94 | start_point_scope.end_point >= end_point_scope.end_point: 95 | scope = start_point_scope 96 | else: 97 | scope = end_point_scope 98 | if scope == statement: 99 | scope = scope_container.get(scope, None) 100 | if scope is not None: 101 | self.__scope_dependency[statement] = scope 102 | 103 | @staticmethod 104 | def __obtain_interval(points: List[Point], statement: Statement) -> Tuple[Optional[Point], Optional[Point]]: 105 | nearest_start_point_id = bisect.bisect_right(points, statement.start_point) - 1 106 | nearest_start_point = None if nearest_start_point_id < 0 else points[nearest_start_point_id] 107 | nearest_end_point_id = bisect.bisect_left(points, statement.end_point) 108 | nearest_end_point = None if nearest_end_point_id >= len(points) else points[nearest_end_point_id] 109 | return nearest_start_point, nearest_end_point 110 | -------------------------------------------------------------------------------- /integration_tests/files/class_5.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) Huawei Technologies Co., Ltd. 2019-2020. All rights reserved. 3 | */ 4 | 5 | package com.huawei.codebot.analyzer.cxx.astviewer.treeview.ast; 6 | 7 | import org.eclipse.cdt.core.dom.ast.IASTCompoundStatement; 8 | import org.eclipse.cdt.core.dom.ast.IASTName; 9 | import org.eclipse.cdt.core.dom.ast.IASTNamedTypeSpecifier; 10 | import org.eclipse.cdt.core.dom.ast.IASTNode; 11 | import org.eclipse.cdt.core.dom.ast.IASTNodeLocation; 12 | import org.eclipse.cdt.core.dom.ast.IASTSimpleDeclaration; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | 16 | import com.huawei.codebot.analyzer.cxx.astviewer.treeview.AbstractTreeTableModel; 17 | import com.huawei.codebot.analyzer.cxx.astviewer.treeview.TreeTableModel; 18 | 19 | /** 20 | * 功能描述 21 | * 22 | * @author j00404394 23 | * @since 2018-01-24 24 | */ 25 | public class ASTTreeModel extends AbstractTreeTableModel implements TreeTableModel { 26 | private static final Logger logger = LoggerFactory.getLogger(ASTTreeModel.class); 27 | 28 | private static String[] cNames = {"Node", "Name", "LineNo", "Offset/Position"}; 29 | 30 | private static Class[] cTypes = {TreeTableModel.class, String.class, String.class, String.class}; 31 | 32 | /** 33 | * @param root 根节点 34 | */ 35 | public ASTTreeModel(IASTNode root) { 36 | super(new TASTNode(root)); 37 | } 38 | 39 | /** 40 | * 获取子节点个数 41 | * 42 | * @param node node 43 | * @return int 44 | */ 45 | public int getChildCount(Object node) { 46 | Object[] children = getChildren((IASTNode) node); 47 | return children == null ? 0 : children.length; 48 | } 49 | 50 | /** 51 | * 获取子节点 52 | * 53 | * @param node node 54 | * @param i index 55 | * @return Object 56 | */ 57 | public Object getChild(Object node, int i) { 58 | return getChildren((IASTNode) node)[i]; 59 | } 60 | 61 | /** 62 | * 判断叶子节点 63 | * 64 | * @param node node 65 | * @return boolean 66 | */ 67 | public boolean isLeaf(Object node) { 68 | IASTNode astnode = (IASTNode) node; 69 | IASTNode[] children = getChildren(astnode); 70 | return (children == null) || (children.length == 0); 71 | } 72 | 73 | /** 74 | * 获取子节点 75 | * 76 | * @param node node 77 | * @return IASTNode[] 78 | */ 79 | protected IASTNode[] getChildren(IASTNode node) { 80 | return node.getChildren(); 81 | } 82 | 83 | /** 84 | * 得到column个数 85 | * 86 | * @return int 87 | */ 88 | public int getColumnCount() { 89 | return cNames.length; 90 | } 91 | 92 | /** 93 | * column name 94 | * 95 | * @param column column 96 | * @return String 97 | */ 98 | public String getColumnName(int column) { 99 | return cNames[column]; 100 | } 101 | 102 | /** 103 | * @param column column 104 | * @return Class 105 | */ 106 | public Class getColumnClass(int column) { 107 | return cTypes[column]; 108 | } 109 | 110 | /** 111 | * @param node node 112 | * @param column column 113 | * @return Object 114 | */ 115 | public Object getValueAt(Object node, int column) { 116 | TASTNode noderef = (TASTNode) node; 117 | 118 | try { 119 | switch (column) { 120 | case 0: 121 | return "nada"; 122 | case 1: 123 | IASTNode original = noderef.getOriginalNode(); 124 | 125 | if ((original instanceof IASTSimpleDeclaration)) { 126 | return original.getRawSignature(); 127 | } 128 | 129 | if (((original instanceof IASTNamedTypeSpecifier)) || ((original instanceof IASTName))) { 130 | return original.toString(); 131 | } 132 | 133 | if ((original instanceof IASTCompoundStatement)) { 134 | return "{"; 135 | } 136 | 137 | return "[[" + original.getRawSignature() + "]]"; 138 | case 2: { 139 | IASTNodeLocation[] nodeLocations = noderef.getNodeLocations(); 140 | StringBuilder sb = new StringBuilder(); 141 | for (IASTNodeLocation iastNodeLocation : nodeLocations) { 142 | sb.append(noderef.getFileLocation().getStartingLineNumber()); 143 | sb.append(","); 144 | } 145 | 146 | if (sb.length() > 0) { 147 | sb.deleteCharAt(sb.length() - 1); 148 | } 149 | return sb.toString(); 150 | } 151 | case 3: { 152 | IASTNodeLocation[] nodeLocations = noderef.getNodeLocations(); 153 | StringBuilder sb = new StringBuilder(); 154 | for (IASTNodeLocation iastNodeLocation : nodeLocations) { 155 | sb.append(iastNodeLocation.getNodeOffset() + ":" + iastNodeLocation.getNodeLength()); 156 | sb.append(","); 157 | } 158 | 159 | if (sb.length() > 0) { 160 | sb.deleteCharAt(sb.length() - 1); 161 | } 162 | return sb.toString(); 163 | } 164 | } 165 | } catch (SecurityException localSecurityException) { 166 | logger.error(localSecurityException.getMessage()); 167 | } 168 | return null; 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /program_slicing/graph/convert/cfg.py: -------------------------------------------------------------------------------- 1 | __licence__ = 'MIT' 2 | __author__ = 'kuyaki' 3 | __credits__ = ['kuyaki'] 4 | __maintainer__ = 'kuyaki' 5 | __date__ = '2021/04/01' 6 | 7 | from typing import Dict, Set 8 | 9 | import networkx 10 | 11 | from program_slicing.graph.cdg import ControlDependenceGraph 12 | from program_slicing.graph.cfg import ControlFlowGraph 13 | from program_slicing.graph.ddg import DataDependenceGraph 14 | from program_slicing.graph.pdg import ProgramDependenceGraph 15 | from program_slicing.graph.basic_block import BasicBlock 16 | from program_slicing.graph.statement import Statement, StatementType 17 | 18 | 19 | def to_cdg(cfg: ControlFlowGraph) -> ControlDependenceGraph: 20 | """ 21 | Convert the Control Flow Graph into a Control Dependence Graph. 22 | New graph will contain nodes, links on which where listed in the original one so that 23 | any changes made after converting in the original graph's statements will affect the converted one. 24 | :param cfg: Control Flow Graph that should to be converted. 25 | :return: Control Dependence Graph which nodes where contained in the Control Flow Graph on which it was based on. 26 | """ 27 | raise NotImplementedError() 28 | 29 | 30 | def to_ddg(cfg: ControlFlowGraph) -> DataDependenceGraph: 31 | """ 32 | Convert the Control Flow Graph into a Data Dependence Graph. 33 | New graph will contain nodes, links on which where listed in the original one so that 34 | any changes made after converting in the original graph's statements will affect the converted one. 35 | :param cfg: Control Flow Graph that should to be converted. 36 | :return: Data Dependence Graph which nodes where contained in the Control Flow Graph on which it was based on. 37 | """ 38 | ddg = DataDependenceGraph() 39 | visited: Dict[BasicBlock, Dict[str, Set[Statement]]] = {} 40 | variables: Dict[str, Set[Statement]] = {} 41 | for root in cfg.entry_points: 42 | __to_ddg(root, cfg=cfg, ddg=ddg, visited=visited, variables=variables) 43 | ddg.add_entry_point(root.root) 44 | ddg.set_scope_dependency(cfg.scope_dependency) 45 | __correct_scope_relations(ddg) 46 | return ddg 47 | 48 | 49 | def to_pdg(cfg: ControlFlowGraph) -> ProgramDependenceGraph: 50 | """ 51 | Convert the Control Flow Graph into a Program Dependence Graph. 52 | New graph will contain nodes, links on which where listed in the original one so that 53 | any changes made after converting in the original graph's statements will affect the converted one. 54 | :param cfg: Control Flow Graph that should to be converted. 55 | :return: Program Dependence Graph which nodes where contained in the Control Flow Graph on which it was based on. 56 | """ 57 | raise NotImplementedError() 58 | 59 | 60 | def __to_ddg( 61 | root: BasicBlock, 62 | cfg: ControlFlowGraph, 63 | ddg: DataDependenceGraph, 64 | visited: Dict[BasicBlock, Dict[str, Set[Statement]]], 65 | variables: Dict[str, Set[Statement]]) -> None: 66 | if root in visited: 67 | if not __update_variables(visited[root], variables): 68 | return 69 | else: 70 | visited[root] = {variable: variable_set.copy() for variable, variable_set in variables.items()} 71 | variables_entered: Dict[str, Set[Statement]] = visited[root] 72 | variables_passed: Dict[str, Set[Statement]] = { 73 | variable: variable_set for variable, variable_set in variables_entered.items() 74 | } 75 | for statement in root: 76 | ddg.add_node(statement) 77 | for affecting_variable_name in statement.affected_by: 78 | if statement.statement_type == StatementType.VARIABLE and affecting_variable_name == statement.name: 79 | continue 80 | if affecting_variable_name in variables_passed: 81 | for variable_statement in variables_passed[affecting_variable_name]: 82 | ddg.add_edge(variable_statement, statement) 83 | if statement.statement_type == StatementType.VARIABLE or statement.statement_type == StatementType.ASSIGNMENT: 84 | variables_passed[statement.name] = {statement} 85 | for child in cfg.successors(root): 86 | __to_ddg(child, cfg=cfg, ddg=ddg, visited=visited, variables=variables_passed) 87 | 88 | 89 | def __update_variables(old_variables: Dict[str, Set[Statement]], new_variables: Dict[str, Set[Statement]]) -> bool: 90 | updated = False 91 | for variable, variable_set in new_variables.items(): 92 | if variable not in old_variables: 93 | old_variables[variable] = variable_set.copy() 94 | updated = True 95 | else: 96 | variable_entered_set = old_variables[variable] 97 | diff = variable_set.difference(variable_entered_set) 98 | variable_entered_set.update(diff) 99 | if not updated: 100 | updated = len(diff) > 0 101 | return updated 102 | 103 | 104 | def __correct_scope_relations(ddg: DataDependenceGraph) -> None: 105 | variable_statements = [statement for statement in ddg if statement.statement_type == StatementType.VARIABLE] 106 | for variable_statement in variable_statements: 107 | if variable_statement not in ddg.scope_dependency: 108 | continue 109 | variable_scope = ddg.scope_dependency[variable_statement] 110 | remove_statements = [] 111 | for statement in networkx.descendants(ddg, variable_statement): 112 | if statement.start_point < variable_scope.start_point or statement.end_point > variable_scope.end_point: 113 | remove_statements.append(statement) 114 | for statement in remove_statements: 115 | remove_edges = [] 116 | for predecessor in ddg.predecessors(statement): 117 | if predecessor.name == variable_statement.name and \ 118 | variable_scope.start_point <= predecessor.start_point and \ 119 | variable_scope.end_point >= predecessor.end_point: 120 | remove_edges.append((predecessor, statement)) 121 | ddg.remove_edges_from(remove_edges) 122 | -------------------------------------------------------------------------------- /integration_tests/files/class_4.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) Huawei Technologies Co., Ltd. 2017-2020. All rights reserved. 3 | */ 4 | package com.huawei.mergebot.archive; 5 | 6 | import com.huawei.mergebot.codeanalysis.xml.XMLFileWhiteListUtil; 7 | import com.huawei.mergebot.conflictresolving.AutoResolvedGenerator; 8 | import com.huawei.mergebot.conflictresolving.resolver.FileSetResolver; 9 | import com.huawei.mergebot.dao.ArchivesDAO; 10 | import com.huawei.mergebot.dao.ui.ConflictFileDAO; 11 | import com.huawei.mergebot.dao.ui.ReoDAO; 12 | import com.huawei.mergebot.model.ConflictHandleMode; 13 | import com.huawei.mergebot.model.MergeResultRenderingMode; 14 | import com.huawei.mergebot.model.ThreeWayConflictFileSet; 15 | import com.huawei.mergebot.model.ui.ConflictFile; 16 | import com.huawei.mergebot.model.ui.ConflictFile.StatusEnum; 17 | import com.huawei.mergebot.model.ui.Repo; 18 | import com.huawei.mergebot.model.ui.ResolveParameters; 19 | import com.huawei.mergebot.model.helps.ConflictFileHelper; 20 | import com.huawei.mergebot.utils.FileUtil; 21 | 22 | import java.io.IOException; 23 | import java.util.List; 24 | import java.util.UUID; 25 | 26 | /** 27 | * The type Repo resolver. 28 | * 29 | * @since 4.3.3 30 | */ 31 | public class RepoResolver { 32 | /** 33 | * Resolver string. 34 | * 35 | * @param originalRepoId the original repo id 36 | * @return the string 37 | * @throws IOException the io exception 38 | */ 39 | public String resolver(String originalRepoId) throws IOException { 40 | Repo repo = ReoDAO.get(originalRepoId); 41 | if (repo == null) { 42 | return null; 43 | } 44 | ResolveParameters resolveParameters = repo.getResolveParameters(); 45 | FileSetResolver resolver = 46 | new FileSetResolver( 47 | resolveParameters.isEnableAutoResolve(), 48 | MergeResultRenderingMode.getEnum(resolveParameters.getMergeRenderingMode().toString()), 49 | resolveParameters.isEnableAIMode(), 50 | resolveParameters.isIsPython3(), 51 | resolveParameters.isEnableAuthorAnalysis(), 52 | resolveParameters.isEnableRename(), 53 | resolveParameters.isEnableComplicateRule()); 54 | 55 | List relativePaths = ArchivesDAO.getFilePathList(originalRepoId, "gitMerged/"); 56 | 57 | String repoId = originalRepoId; 58 | 59 | XMLFileWhiteListUtil xmlFileWhiteListUtil = 60 | new XMLFileWhiteListUtil(resolveParameters.getWhiteListPathContent()); 61 | 62 | String taskId = UUID.randomUUID().toString(); 63 | for (String relativePath : relativePaths) { 64 | relativePath = relativePath.substring("gitMerged/".length()); 65 | ThreeWayFilePath threeWayFilePath = 66 | new ThreeWayFilePath( 67 | System.getProperty("java.io.tmpdir") + "/mergebot/" + taskId + "/" + repoId, relativePath); 68 | FileUtil.writeFileInOverideMode( 69 | threeWayFilePath.getFullFilePath("base"), 70 | ArchivesDAO.getOriginalConflictFile(originalRepoId, threeWayFilePath.getVersionFilePath("base")), 71 | "UTF-8"); 72 | FileUtil.writeFileInOverideMode( 73 | threeWayFilePath.getFullFilePath("ours"), 74 | ArchivesDAO.getOriginalConflictFile(originalRepoId, threeWayFilePath.getVersionFilePath("ours")), 75 | "UTF-8"); 76 | FileUtil.writeFileInOverideMode( 77 | threeWayFilePath.getFullFilePath("theirs"), 78 | ArchivesDAO.getOriginalConflictFile(originalRepoId, threeWayFilePath.getVersionFilePath("theirs")), 79 | "UTF-8"); 80 | FileUtil.writeFileInOverideMode( 81 | threeWayFilePath.getFullFilePath("gitMerged"), 82 | ArchivesDAO.getOriginalConflictFile( 83 | originalRepoId, threeWayFilePath.getVersionFilePath("gitMerged")), 84 | "UTF-8"); 85 | String manualContent = 86 | ArchivesDAO.getOriginalConflictFile( 87 | originalRepoId, threeWayFilePath.getVersionFilePath("manualMerged")); 88 | if (manualContent.isEmpty()) { 89 | continue; 90 | } 91 | FileUtil.writeFileInOverideMode(threeWayFilePath.getFullFilePath("manualMerged"), manualContent, "UTF-8"); 92 | ConflictHandleMode conflictHandleModeEnum = ConflictHandleMode.KEEP_CONFLICT; 93 | if (xmlFileWhiteListUtil.whetherInWhiteList(relativePath)) { 94 | conflictHandleModeEnum = 95 | ConflictHandleMode.getEnum(resolveParameters.getConflictHandleMode().toString()); 96 | } 97 | ThreeWayConflictFileSet fileSet = 98 | resolver.autoResolveConflicts( 99 | "", 100 | threeWayFilePath.getFullFilePath("base"), 101 | threeWayFilePath.getFullFilePath("ours"), 102 | threeWayFilePath.getFullFilePath("theirs"), 103 | threeWayFilePath.getFullFilePath("gitMerged"), 104 | threeWayFilePath.getFullFilePath("manualMerged"), 105 | conflictHandleModeEnum); 106 | 107 | if (fileSet == null) { 108 | continue; 109 | } 110 | 111 | AutoResolvedGenerator.generateAutoResolvedFileOptimized( 112 | fileSet, 113 | threeWayFilePath.getFullFilePath("mergebotMerged"), 114 | threeWayFilePath.getFullFilePath("mergebotMerged_backUp")); 115 | ArchivesDAO.postOriginalConflictFIle( 116 | threeWayFilePath.getFullFilePath("mergebotMerged"), 117 | threeWayFilePath.getVersionFilePath("mergebotMerged"), 118 | originalRepoId); 119 | 120 | putArchiveConflictFile(originalRepoId, repoId, relativePath, fileSet); 121 | } 122 | return repoId; 123 | } 124 | 125 | private void putArchiveConflictFile(String originalRepoId, String repoId, String relativePath, 126 | ThreeWayConflictFileSet fileSet) { 127 | ConflictFile conflictFile = ConflictFileHelper.extractForOnlineReview(fileSet, "base/" + relativePath); 128 | conflictFile.setRepoId(repoId); 129 | conflictFile.setForkFromRepoId(originalRepoId); 130 | conflictFile.setStatus(StatusEnum.CONFLICTING); 131 | ConflictFileDAO.putArchiveConflictFile(conflictFile); 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /scripts/performance_benchmarks/block_slices/dataset/SubCommonRdbmsWriter_fillPreparedStatementColumnType_100.0_43.0_34.0_138.java: -------------------------------------------------------------------------------- 1 | @Override 2 | protected PreparedStatement fillPreparedStatementColumnType( 3 | PreparedStatement preparedStatement, int columnIndex, 4 | int columnSqltype, Column column) throws SQLException { 5 | java.util.Date utilDate; 6 | try { 7 | switch (columnSqltype) { 8 | case Types.CHAR: 9 | case Types.NCHAR: 10 | case Types.CLOB: 11 | case Types.NCLOB: 12 | case Types.VARCHAR: 13 | case Types.LONGVARCHAR: 14 | case Types.NVARCHAR: 15 | case Types.LONGNVARCHAR: 16 | if (null == column.getRawData()) { 17 | preparedStatement.setObject(columnIndex + 1, null); 18 | } else { 19 | preparedStatement.setString(columnIndex + 1, 20 | column.asString()); 21 | } 22 | break; 23 | 24 | case Types.SMALLINT: 25 | case Types.INTEGER: 26 | case Types.BIGINT: 27 | case Types.TINYINT: 28 | String strLongValue = column.asString(); 29 | if (emptyAsNull && "".equals(strLongValue)) { 30 | preparedStatement.setObject(columnIndex + 1, null); 31 | } else if (null == column.getRawData()) { 32 | preparedStatement.setObject(columnIndex + 1, null); 33 | } else { 34 | preparedStatement.setLong(columnIndex + 1, 35 | column.asLong()); 36 | } 37 | break; 38 | case Types.NUMERIC: 39 | case Types.DECIMAL: 40 | case Types.FLOAT: 41 | case Types.REAL: 42 | case Types.DOUBLE: 43 | String strValue = column.asString(); 44 | if (emptyAsNull && "".equals(strValue)) { 45 | preparedStatement.setObject(columnIndex + 1, null); 46 | } else if (null == column.getRawData()) { 47 | preparedStatement.setObject(columnIndex + 1, null); 48 | } else { 49 | preparedStatement.setDouble(columnIndex + 1, 50 | column.asDouble()); 51 | } 52 | break; 53 | 54 | case Types.DATE: 55 | java.sql.Date sqlDate = null; 56 | utilDate = column.asDate(); 57 | if (null != utilDate) { 58 | sqlDate = new java.sql.Date(utilDate.getTime()); 59 | preparedStatement.setDate(columnIndex + 1, sqlDate); 60 | } else { 61 | preparedStatement.setNull(columnIndex + 1, Types.DATE); 62 | } 63 | break; 64 | 65 | case Types.TIME: 66 | java.sql.Time sqlTime = null; 67 | utilDate = column.asDate(); 68 | if (null != utilDate) { 69 | sqlTime = new java.sql.Time(utilDate.getTime()); 70 | preparedStatement.setTime(columnIndex + 1, sqlTime); 71 | } else { 72 | preparedStatement.setNull(columnIndex + 1, Types.TIME); 73 | } 74 | break; 75 | 76 | case Types.TIMESTAMP: 77 | java.sql.Timestamp sqlTimestamp = null; 78 | utilDate = column.asDate(); 79 | if (null != utilDate) { 80 | sqlTimestamp = new java.sql.Timestamp( 81 | utilDate.getTime()); 82 | preparedStatement.setTimestamp(columnIndex + 1, 83 | sqlTimestamp); 84 | } else { 85 | preparedStatement.setNull(columnIndex + 1, 86 | Types.TIMESTAMP); 87 | } 88 | break; 89 | 90 | case Types.BINARY: 91 | case Types.VARBINARY: 92 | case Types.BLOB: 93 | case Types.LONGVARBINARY: 94 | if (null == column.getRawData()) { 95 | preparedStatement.setObject(columnIndex + 1, null); 96 | } else { 97 | preparedStatement.setBytes(columnIndex + 1, 98 | column.asBytes()); 99 | } 100 | break; 101 | 102 | case Types.BOOLEAN: 103 | if (null == column.getRawData()) { 104 | preparedStatement.setNull(columnIndex + 1, 105 | Types.BOOLEAN); 106 | } else { 107 | preparedStatement.setBoolean(columnIndex + 1, 108 | column.asBoolean()); 109 | } 110 | break; 111 | 112 | // warn: bit(1) -> Types.BIT 可使用setBoolean 113 | // warn: bit(>1) -> Types.VARBINARY 可使用setBytes 114 | case Types.BIT: 115 | if (null == column.getRawData()) { 116 | preparedStatement.setObject(columnIndex + 1, null); 117 | } else if (this.dataBaseType == DataBaseType.MySql) { 118 | preparedStatement.setBoolean(columnIndex + 1, 119 | column.asBoolean()); 120 | } else { 121 | preparedStatement.setString(columnIndex + 1, 122 | column.asString()); 123 | } 124 | break; 125 | default: 126 | preparedStatement.setObject(columnIndex + 1, 127 | column.getRawData()); 128 | break; 129 | } 130 | } catch (DataXException e) { 131 | throw new SQLException(String.format( 132 | "类型转换错误:[%s] 字段名:[%s], 字段类型:[%d], 字段Java类型:[%s].", 133 | column, 134 | this.resultSetMetaData.getLeft().get(columnIndex), 135 | this.resultSetMetaData.getMiddle().get(columnIndex), 136 | this.resultSetMetaData.getRight().get(columnIndex))); 137 | } 138 | return preparedStatement; 139 | } --------------------------------------------------------------------------------