├── .gitignore ├── LICENSE ├── README.md ├── keras2ncnn ├── __init__.py ├── __main__.py ├── graph_optimizer.py ├── graph_tool.py ├── h5df_parser.py ├── keras2ncnn.py ├── keras_converter.py ├── keras_debugger.py ├── ncnn_emitter.py └── ncnn_param.py └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # Keras files 132 | *.h5 133 | *.h5df 134 | 135 | # ncnn files 136 | *.param 137 | *.bin 138 | 139 | # Generate C template 140 | *.c 141 | 142 | # Generate Graph 143 | *.gv 144 | *.pdf 145 | *.jpg 146 | 147 | .keras2ncnn_build -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Martin Han 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # keras2ncnn 2 | 3 | ### Now availabel on pypi! 4 | ### If you failed to convert a model, welcome to open an issue and attach the h5 file. 5 | 6 | --- 7 | ## Usage: 8 | ``` 9 | # Install keras2ncnn (only h5py and numpy is required) 10 | python3 -mpip install --upgrade keras2ncnn 11 | 12 | # If you only want to convert the model 13 | python3 -m keras2ncnn -i SOME_H5DF_FILE.h5 -o ./ 14 | 15 | # You can see the structure of the converted model and the original model(after optimization) 16 | python3 -m keras2ncnn -i SOME_H5DF_FILE.h5 -o DIR_TO_SAVE_NCNN_PARAM --plot_graph/-p 17 | ``` 18 | --- 19 | ## Supported Op 20 | - InputLayer 21 | - Conv2D (Linear, Softmax, ReLU, Sigmoid) 22 | - Conv2DTranspose (Linear, ReLU, Sigmoid) 23 | - DepthwiseConv2D 24 | - SeparableConv2D (Linear, Softmax, ReLU, Sigmoid) 25 | - Add 26 | - Multiply 27 | - Concatenate 28 | - ZeroPadding2D 29 | - ReLU 30 | - LeakyReLU 31 | - Activation (Softmax, ReLU, Sigmoid) 32 | - UpSampling2D 33 | - BilinearUpsampling 34 | - Cropping2D 35 | - GlobalAveragePooling2D 36 | - GlobalMaxPooling2D 37 | - AveragePooling2D 38 | - MaxPooling2D 39 | - BatchNormalization 40 | - Dense (Linear, Softmax, ReLU, Sigmoid) 41 | - Flatten 42 | - Reshape 43 | - Maximum 44 | - TensorFlowOpLayer (Mul with constant) 45 | - Permute (Need more testing) 46 | 47 | ## Ops that will be dropped by converter 48 | - Dropout 49 | - Lambda 50 | - TimeDistributed 51 | - InputLayer with inbound nodes 52 | -------------------------------------------------------------------------------- /keras2ncnn/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MarsTechHAN/keras2ncnn/6a4edd8e52ef79ba068ee0f559b17a26f43d2f95/keras2ncnn/__init__.py -------------------------------------------------------------------------------- /keras2ncnn/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import sys 5 | 6 | if sys.path[0] in ('', os.getcwd()): 7 | sys.path.pop(0) 8 | 9 | from keras2ncnn.keras2ncnn import main as _main # isort:skip # noqa 10 | 11 | if __name__ == '__main__': 12 | sys.exit(_main()) 13 | -------------------------------------------------------------------------------- /keras2ncnn/graph_optimizer.py: -------------------------------------------------------------------------------- 1 | class GraphOptimization: 2 | 3 | @staticmethod 4 | def removing_unused_nodes(graph): 5 | UNUSED_NODES = ['Dropout', 'Lambda', 'TimeDistributed'] 6 | nodes_to_remove = [] 7 | 8 | for target_node_name in graph.get_graph().keys(): 9 | if graph.get_node_attr(target_node_name)[ 10 | 'layer']['class_name'] in UNUSED_NODES or \ 11 | ( graph.get_node_attr(target_node_name)[ 12 | 'layer']['class_name'] == 'InputLayer' and \ 13 | len(graph.get_node_inbounds(target_node_name)) != 0 ): 14 | for layer_name in graph.get_graph().keys(): 15 | if target_node_name in graph.get_graph()[ 16 | layer_name]['inbounds']: 17 | graph.remove_node_inbounds( 18 | layer_name, target_node_name) 19 | graph.add_node_inbounds( 20 | layer_name, graph.get_graph()[target_node_name]['inbounds'][0]) 21 | nodes_to_remove.append(target_node_name) 22 | 23 | for removed_nodes_name in nodes_to_remove: 24 | graph.remove_node(removed_nodes_name) 25 | 26 | @staticmethod 27 | def removing_reshape_after_global_pooling(graph): 28 | GLOBAL_POOLING_NODES = [ 29 | 'GlobalAveragePooling2D', 30 | 'MaxAveragePooling2D'] 31 | nodes_to_remove = [] 32 | 33 | for target_node_name in graph.get_graph().keys(): 34 | if graph.get_node_attr(target_node_name)[ 35 | 'layer']['class_name'] in GLOBAL_POOLING_NODES: 36 | for out_nodes in graph.get_node_outbounds(target_node_name): 37 | if graph.get_node_attr(out_nodes)[ 38 | 'layer']['class_name'] == 'Reshape': 39 | for layer_name in graph.get_graph().keys(): 40 | if out_nodes in graph.get_graph()[ 41 | layer_name]['inbounds']: 42 | graph.remove_node_inbounds( 43 | layer_name, out_nodes) 44 | graph.add_node_inbounds( 45 | layer_name, graph.get_graph()[out_nodes]['inbounds'][0]) 46 | nodes_to_remove.append(out_nodes) 47 | 48 | for removed_nodes_name in nodes_to_remove: 49 | graph.remove_node(removed_nodes_name) 50 | -------------------------------------------------------------------------------- /keras2ncnn/graph_tool.py: -------------------------------------------------------------------------------- 1 | class Grapher: 2 | def __init__(self): 3 | self.graph = {} 4 | 5 | def node(self, name, inbound_nodes=None): 6 | self.graph[name] = {} 7 | if inbound_nodes is not None: 8 | self.graph[name]['inbounds'] = inbound_nodes 9 | for node in inbound_nodes: 10 | if node not in self.graph.keys(): 11 | self.graph[node] = {} 12 | if 'outbounds' not in self.graph[node].keys(): 13 | self.graph[node]['outbounds'] = [] 14 | self.graph[node]['outbounds'].append(name) 15 | 16 | def refresh(self): 17 | for name in self.graph.keys(): 18 | self.graph[name]['outbounds'] = [] 19 | 20 | for name in self.graph.keys(): 21 | for node in self.graph[name]['inbounds']: 22 | if node not in self.graph.keys(): 23 | while node in self.graph[name]['inbounds']: 24 | self.graph[name]['inbounds'].remove(node) 25 | else: 26 | if 'outbounds' not in self.graph[node].keys(): 27 | self.graph[node]['outbounds'] = [] 28 | 29 | self.graph[node]['outbounds'].append(name) 30 | 31 | spare_nodes = [] 32 | 33 | for name in self.graph.keys(): 34 | if len(self.graph[name]['outbounds']) == 0 and \ 35 | len(self.graph[name]['inbounds']) == 0: 36 | spare_nodes.append(name) 37 | 38 | for removing_node_name in spare_nodes: 39 | del self.graph[removing_node_name] 40 | 41 | def get_graph(self): 42 | return self.graph 43 | 44 | def get_node_inbounds(self, name): 45 | if 'inbounds' in self.graph[name]: 46 | return self.graph[name]['inbounds'] 47 | else: 48 | return [] 49 | 50 | def get_node_outbounds(self, name): 51 | if 'outbounds' in self.graph[name]: 52 | return self.graph[name]['outbounds'] 53 | else: 54 | return [] 55 | 56 | def set_node_inbounds(self, name, inbounds): 57 | self.graph[name]['inbounds'] = inbounds 58 | 59 | def set_node_outbounds(self, name, outbounds): 60 | self.graph[name]['outbounds'] = outbounds 61 | 62 | def remove_node(self, name): 63 | if name in self.graph.keys(): 64 | del self.graph[name] 65 | 66 | def remove_node_inbounds(self, name, inbound): 67 | if inbound in self.graph[name]['inbounds']: 68 | self.graph[name]['inbounds'].remove(inbound) 69 | 70 | def remove_node_outbounds(self, name, outbound): 71 | if outbound in self.graph[name]['outbound']: 72 | self.graph[name]['outbounds'].remove(outbound) 73 | 74 | def add_node_inbounds(self, name, inbound): 75 | self.graph[name]['inbounds'].append(inbound) 76 | 77 | def add_node_outbounds(self, name, outbound): 78 | self.graph[name]['outbounds'].append(outbound) 79 | 80 | def get_graph_head(self): 81 | self.heads = [] 82 | for (key, value) in self.graph.items(): 83 | if 'inbounds' not in value.keys()\ 84 | or len(value['inbounds']) == 0: 85 | self.heads.append(key) 86 | return self.heads 87 | 88 | def get_graph_tail(self): 89 | self.tails = [] 90 | for (key, value) in self.graph.items(): 91 | if 'outbounds' not in value.keys()\ 92 | or len(value['outbounds']) == 0: 93 | self.tails.append(key) 94 | return self.tails 95 | 96 | def set_node_attr(self, name, attr): 97 | if name not in self.graph.keys(): 98 | self.graph[name] = {} 99 | self.graph[name]['attr'] = attr 100 | 101 | def get_node_attr(self, name): 102 | if name in self.graph.keys(): 103 | return self.graph[name]['attr'] 104 | else: 105 | return None 106 | 107 | def plot_graphs(self, filename='kears2ncnn'): 108 | from graphviz import Digraph # pylint: disable=import-outside-toplevel 109 | 110 | dot = Digraph(comment='Network Grapher View') 111 | for (key, value) in self.graph.items(): 112 | dot.node(key, key) 113 | if 'inbounds' in value.keys(): 114 | for node in value['inbounds']: 115 | dot.edge(node, key) 116 | dot.render(filename, view=False) 117 | -------------------------------------------------------------------------------- /keras2ncnn/h5df_parser.py: -------------------------------------------------------------------------------- 1 | import json 2 | import h5py 3 | import sys 4 | 5 | 6 | class H5dfParser: 7 | 8 | def __decode(self, payload): 9 | if isinstance(payload, (bytes, bytearray)): 10 | return payload.decode('utf-8') 11 | if isinstance(payload, str): 12 | return payload 13 | return str(payload) 14 | 15 | def __init__(self, h5_file): 16 | try: 17 | f = h5py.File(h5_file, mode='r') 18 | self.f = f 19 | model_config_raw = f.attrs.get('model_config') 20 | 21 | except Exception: 22 | print('[ERROR] Failed to read h5df file.') 23 | print('You are not selecting a valid keras model file.') 24 | print('You can check it by either opening it by Keras or Netron.') 25 | print('If you are very confident of your file, please report a bug at:') 26 | print('https://github.com/MarsTechHAN/keras2ncnn') 27 | sys.exit(-1) 28 | 29 | if not isinstance(model_config_raw, (str, bytes, bytearray)): 30 | print('[ERROR] Failed to load structure descriptor from h5df file.') 31 | print('You may load a weight only file.') 32 | print('Such issue may caused by following ways:') 33 | print('\t1. You are using model.save_weights instead of model.save') 34 | print('\t2. You are trying to load a weight file download from somewhere.') 35 | print('If you are very confident of your file, please report a bug at:') 36 | print('https://github.com/MarsTechHAN/keras2ncnn') 37 | sys.exit(-1) 38 | 39 | self.model_config = json.loads(self.__decode(model_config_raw)) 40 | self.keras_version = self.get_keras_version() 41 | 42 | if self.keras_version != '1': 43 | weight_layers = self.f['model_weights'] 44 | else: 45 | weight_layers = self.f 46 | 47 | self.weight_dict = {} 48 | weight_layers.visititems(self._get_weight_names) 49 | 50 | def get_h5df_file(self): 51 | return self.f 52 | 53 | def get_model_config(self): 54 | return self.model_config 55 | 56 | def get_keras_version(self): 57 | if 'keras_version' in self.f['model_weights'].attrs: 58 | original_keras_version = self.__decode(self.f['model_weights']\ 59 | .attrs['keras_version']) 60 | return original_keras_version 61 | else: 62 | return '1' 63 | 64 | def get_backend_version(self): 65 | if 'backend' in self.f['model_weights'].attrs: 66 | original_backend = self.__decode(self.f['model_weights']\ 67 | .attrs['backend']) 68 | return original_backend 69 | else: 70 | return None 71 | 72 | def _get_weight_names(self, name, obj): 73 | for key, val in obj.attrs.items(): 74 | if key == 'weight_names': 75 | weight_names = list( 76 | map(lambda x: self.__decode(x), val.tolist())) 77 | if len(weight_names) > 0: 78 | wegith_group = '/'.join(weight_names[0].split('/')[0:-1]) 79 | self.weight_dict[name] = obj[wegith_group] 80 | for weight_name in weight_names: 81 | wegith_group = '/'.join(weight_name.split('/')[0:-1]) 82 | self.weight_dict[weight_name.split( 83 | '/')[-2]] = obj[wegith_group] 84 | self.weight_dict[wegith_group] = obj[wegith_group] 85 | 86 | def find_weights_root(self, layer_name): 87 | if layer_name in self.weight_dict.keys(): 88 | return self.weight_dict[layer_name] 89 | else: 90 | return None 91 | 92 | def join_inbound_nodes(self, layer): 93 | inbound_nodes = [] 94 | 95 | def get_inbound_nodes(inbound_list, inbound_nodes): 96 | for entry in inbound_list: 97 | if isinstance(entry, list): 98 | get_inbound_nodes(entry, inbound_nodes) 99 | else: 100 | if isinstance(entry, str): 101 | inbound_nodes.append(entry) 102 | 103 | if 'inbound_nodes' in layer.keys(): 104 | get_inbound_nodes(layer['inbound_nodes'], inbound_nodes) 105 | 106 | return inbound_nodes 107 | 108 | def parse_graph(self, graph_helper): 109 | self.joined_layers = [] 110 | if isinstance(self.model_config['config'], list): 111 | self.parse_model_graph(self.model_config['config'], 112 | graph_helper) 113 | return 114 | elif isinstance(self.model_config['config'], dict): 115 | if 'layers' in self.model_config['config'].keys(): 116 | for layers in self.model_config['config']['layers']: 117 | if layers['class_name'] == 'Model' or \ 118 | layers['class_name'] == 'Functional': 119 | self.parse_model_graph( 120 | layers['config']['layers'], graph_helper) 121 | else: 122 | if layers['class_name'] == 'TensorFlowOpLayer': 123 | layer_name = layers['name'] 124 | else: 125 | layer_name = layers['config']['name'] 126 | layers['name'] = layers['config']['name'] 127 | 128 | inbound_nodes = self.join_inbound_nodes(layers) 129 | if len(inbound_nodes) == 0: 130 | inbound_nodes = graph_helper.get_graph_tail() 131 | 132 | graph_helper.node(layer_name, inbound_nodes) 133 | graph_helper.set_node_attr( 134 | layer_name, { 135 | 'layer': layers, 'weight': self.find_weights_root( 136 | layer_name)}) 137 | return 138 | 139 | print('[ERROR] Failed to load model config from h5df file.') 140 | print('You may use an unsupported version of keras h5df format.') 141 | print('Please report a bug and attach your file at:') 142 | print('https://github.com/MarsTechHAN/keras2ncnn') 143 | sys.exit(-1) 144 | 145 | 146 | def parse_model_graph(self, model_layers, graph_helper): 147 | layer_idx = 0 148 | for layer in model_layers: 149 | inbound_nodes = self.join_inbound_nodes(layer) 150 | if len(inbound_nodes) == 0: 151 | inbound_nodes = graph_helper.get_graph_tail() 152 | if 'name' in layer.keys(): 153 | pass 154 | elif 'name' in layer['config'].keys(): 155 | layer['name'] = layer['config']['name'] 156 | else: 157 | layer['name'] = layer['class_name'] + '_' + str(layer_idx) 158 | graph_helper.node(layer['name'], inbound_nodes) 159 | graph_helper.set_node_attr( 160 | layer['name'], { 161 | 'layer': layer, 'weight': self.find_weights_root( 162 | layer['name'])}) 163 | layer_idx += 1 164 | -------------------------------------------------------------------------------- /keras2ncnn/keras2ncnn.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from pathlib import Path 4 | import argparse 5 | 6 | from keras2ncnn.graph_tool import Grapher 7 | from keras2ncnn.graph_optimizer import GraphOptimization 8 | from keras2ncnn.h5df_parser import H5dfParser 9 | from keras2ncnn.keras_converter import KerasConverter 10 | from keras2ncnn.keras_debugger import KerasDebugger 11 | from keras2ncnn.ncnn_emitter import NcnnEmitter 12 | from keras2ncnn.ncnn_param import NcnnParamDispatcher 13 | 14 | 15 | def main(): 16 | parser = argparse.ArgumentParser() 17 | 18 | if parser.prog == '__main__.py': 19 | parser.prog = 'python3 -m keras2ncnn' 20 | 21 | parser.add_argument( 22 | '-i', 23 | '--input_file', 24 | type=str, 25 | help='Input h5df file', 26 | required=True) 27 | 28 | parser.add_argument( 29 | '-o', 30 | '--output_dir', 31 | type=str, 32 | help='Output file dir', 33 | default='') 34 | 35 | parser.add_argument( 36 | '-p', 37 | '--plot_graph', 38 | action='store_true', 39 | help='Virtualize graph.') 40 | 41 | parser.add_argument( 42 | '-d', 43 | '--debug', 44 | action='store_true', 45 | help='Run accuracy debug.') 46 | 47 | args = parser.parse_args() 48 | 49 | # Create a source graph and a dest graph 50 | keras_graph = Grapher() 51 | ncnn_graph = Grapher() 52 | 53 | # Read and parse keras file to graph 54 | print('Reading and parsing keras h5df file...') 55 | H5dfParser(args.input_file).parse_graph(keras_graph) 56 | 57 | # Graph Optimization 58 | print('Start graph optimizing pass...') 59 | print('\tRemoving unused nodes...') 60 | GraphOptimization.removing_unused_nodes(keras_graph) 61 | print('\tRemoving squeeze reshape after pooling...') 62 | GraphOptimization.removing_reshape_after_global_pooling(keras_graph) 63 | 64 | print('\tRefreshing graph...') 65 | keras_graph.refresh() 66 | 67 | # Convert keras to ncnn representations 68 | print('Converting keras graph to ncnn graph...') 69 | KerasConverter().parse_keras_graph(keras_graph, ncnn_graph, NcnnParamDispatcher()) 70 | 71 | if args.plot_graph: 72 | print('Rendering graph plots...') 73 | keras_graph.plot_graphs(Path(args.input_file).stem + '_keras') 74 | ncnn_graph.plot_graphs(Path(args.input_file).stem + '_ncnn') 75 | 76 | # Emit the graph to params and bin 77 | 78 | if args.output_dir != '': 79 | print('Start emitting to ncnn files.') 80 | emitter = NcnnEmitter(ncnn_graph) 81 | graph_seq = emitter.get_graph_seq() 82 | 83 | print('\tEmitting param...') 84 | emitter.emit_param( 85 | os.path.join( 86 | args.output_dir, 87 | Path( 88 | args.input_file).stem + 89 | '.param'), graph_seq) 90 | 91 | print('\tEmitting binary...') 92 | emitter.emit_binary( 93 | os.path.join( 94 | args.output_dir, 95 | Path( 96 | args.input_file).stem + 97 | '.bin'), graph_seq) 98 | 99 | if args.debug: 100 | print('Running accuracy matcher...') 101 | debugger = KerasDebugger() 102 | 103 | print('\tIniting Env...') 104 | debugger.init_env() 105 | 106 | emitter = NcnnEmitter(ncnn_graph) 107 | graph_seq = emitter.get_graph_seq() 108 | 109 | print('\tPreparing File...') 110 | emitter.emit_param( 111 | os.path.join( 112 | debugger.tmp_dir, 113 | Path( 114 | args.input_file).stem + 115 | '.param'), graph_seq) 116 | 117 | emitter.emit_binary( 118 | os.path.join( 119 | debugger.tmp_dir, 120 | Path( 121 | args.input_file).stem + 122 | '.bin'), graph_seq) 123 | 124 | print('\tGenerting File...') 125 | debugger.emit_file( 126 | Path( 127 | args.input_file).stem, 128 | ncnn_graph, 129 | keras_graph, 130 | graph_seq) 131 | 132 | print('\tCompiling...') 133 | debugger.run_debug() 134 | 135 | print('\tRunning Keras Model...') 136 | debugger.decode(args.input_file, keras_graph, graph_seq) 137 | 138 | print('Done!') 139 | -------------------------------------------------------------------------------- /keras2ncnn/keras_converter.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import numpy as np 3 | import sys 4 | 5 | 6 | class KerasConverter: 7 | MULTI_OUTPUT_OP = [] 8 | 9 | @staticmethod 10 | def replaceDefault(content, key, default=1): 11 | if key in content.keys(): 12 | return content[key] 13 | else: 14 | return default 15 | 16 | def InputLayer_helper(self, layer, keras_graph_helper, 17 | ncnn_graph_helper, ncnn_helper): 18 | 19 | def replaceNone(x): return -1 if x is None else x 20 | 21 | input_w = replaceNone(layer['layer']['config']['batch_input_shape'][1]) 22 | input_h = replaceNone(layer['layer']['config']['batch_input_shape'][2]) 23 | input_c = replaceNone(layer['layer']['config']['batch_input_shape'][3]) 24 | 25 | ncnn_graph_attr = ncnn_helper.dump_args( 26 | 'Input', w=input_w, h=input_h, c=input_c) 27 | 28 | ncnn_graph_helper.node( 29 | layer['layer']['name'], 30 | keras_graph_helper.get_node_inbounds( 31 | layer['layer']['name'])) 32 | ncnn_graph_helper.set_node_attr( 33 | layer['layer']['name'], { 34 | 'type': 'Input', 'param': ncnn_graph_attr, 'binary': []}) 35 | 36 | def Conv2D_helper(self, layer, keras_graph_helper, 37 | ncnn_graph_helper, ncnn_helper): 38 | 39 | CONV2D_SUPPORTED_ACTIVATION = ['softmax'] 40 | CONV2D_FUSED_ACTIVATION_TYPE = { 41 | '': 0, 42 | 'linear': 0, 43 | 'relu': 1, 44 | 'sigmoid': 4 45 | } 46 | 47 | num_output = layer['layer']['config']['filters'] 48 | kernel_w, kernel_h = layer['layer']['config']['kernel_size'] 49 | dilation_w, dilation_h = layer['layer']['config']['dilation_rate'] 50 | stride_w, stride_h = layer['layer']['config']['strides'] 51 | 52 | if layer['layer']['config']['padding'] == 'valid': 53 | pad_left = 0 54 | elif layer['layer']['config']['padding'] == 'same': 55 | pad_left = -233 56 | else: 57 | print('[ERROR] Explicit padding is not supported yet.') 58 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 59 | print('Failed to convert at %s:%d %s()' % 60 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 61 | sys.exit(-1) 62 | 63 | bias_term = layer['layer']['config']['use_bias'] 64 | if bias_term: 65 | weight_data_size = int(layer['weight']['kernel:0'].size) 66 | # Reorder weight, h-w-i-o to o-i-h-w 67 | kernel_weight = np.insert( 68 | np.transpose( 69 | layer['weight']['kernel:0'], [ 70 | 3, 2, 0, 1]).flatten(), 0, 0) 71 | bias_weight = layer['weight']['bias:0'] 72 | else: 73 | # Reorder weight, h-w-i-o to o-i-h-w 74 | weight_data_size = int(layer['weight']['kernel:0'].size) 75 | # Reorder weight, h-w-i-o to o-i-h-w 76 | weight = np.insert(np.transpose(layer['weight']['kernel:0'], 77 | [3, 2, 0, 1]).flatten(), 0, 0) 78 | 79 | if 'activation' in layer['layer']['config']: 80 | if layer['layer']['config']['activation'] in CONV2D_FUSED_ACTIVATION_TYPE.keys(): 81 | activation_type = CONV2D_FUSED_ACTIVATION_TYPE[layer['layer'][ 82 | 'config']['activation']] 83 | else: 84 | activation_type = -1 85 | else: 86 | activation_type = 0 87 | 88 | if activation_type > -1: 89 | ncnn_graph_attr = ncnn_helper.dump_args( 90 | 'Convolution', 91 | num_output=num_output, 92 | kernel_w=kernel_w, 93 | dilation_w=dilation_w, 94 | stride_w=stride_w, 95 | pad_left=pad_left, 96 | bias_term=bias_term, 97 | weight_data_size=weight_data_size, 98 | kernel_h=kernel_h, 99 | dilation_h=dilation_h, 100 | stride_h=stride_h, 101 | activation_type=activation_type) 102 | 103 | ncnn_graph_helper.node( 104 | layer['layer']['name'], 105 | keras_graph_helper.get_node_inbounds( 106 | layer['layer']['name'])) 107 | 108 | if bias_term: 109 | ncnn_graph_helper.set_node_attr( 110 | layer['layer']['name'], { 111 | 'type': 'Convolution', 'param': ncnn_graph_attr, 'binary': [ 112 | kernel_weight, bias_weight]}) 113 | else: 114 | ncnn_graph_helper.set_node_attr( 115 | layer['layer']['name'], { 116 | 'type': 'Convolution', 'param': ncnn_graph_attr, 'binary': [weight]}) 117 | else: 118 | if layer['layer']['config']['activation'] in CONV2D_SUPPORTED_ACTIVATION: 119 | if layer['layer']['config']['activation'] == 'softmax': 120 | ncnn_graph_attr = ncnn_helper.dump_args( 121 | 'Convolution', 122 | num_output=num_output, 123 | kernel_w=kernel_w, 124 | dilation_w=dilation_w, 125 | stride_w=stride_w, 126 | pad_left=pad_left, 127 | bias_term=bias_term, 128 | weight_data_size=weight_data_size, 129 | kernel_h=kernel_h, 130 | dilation_h=dilation_h, 131 | stride_h=stride_h) 132 | 133 | ncnn_graph_helper.node( 134 | layer['layer']['name'], 135 | keras_graph_helper.get_node_inbounds( 136 | layer['layer']['name'])) 137 | 138 | if bias_term: 139 | ncnn_graph_helper.set_node_attr( 140 | layer['layer']['name'], { 141 | 'type': 'Convolution', 'param': ncnn_graph_attr, 'binary': [ 142 | kernel_weight, bias_weight]}) 143 | else: 144 | ncnn_graph_helper.set_node_attr( 145 | layer['layer']['name'], { 146 | 'type': 'Convolution', 'param': ncnn_graph_attr, 'binary': [weight]}) 147 | 148 | outbound_layers = [] 149 | 150 | for name in keras_graph_helper.get_graph().keys(): 151 | for node in keras_graph_helper.get_graph()[ 152 | name]['inbounds']: 153 | if layer['layer']['name'] == node: 154 | outbound_layers.append(name) 155 | 156 | ncnn_graph_attr = ncnn_helper.dump_args('Softmax') 157 | ncnn_graph_helper.node( 158 | layer['layer']['name'] + '_Softmax', [layer['layer']['name'], ]) 159 | ncnn_graph_helper.set_node_attr( 160 | layer['layer']['name'] + '_Softmax', { 161 | 'type': 'Softmax', 'param': ncnn_graph_attr, 'binary': []}) 162 | 163 | keras_graph_helper.node( 164 | layer['layer']['name'] + '_Softmax', [layer['layer']['name'], ]) 165 | 166 | for outbound_layer in outbound_layers: 167 | keras_graph_helper.remove_node_inbounds( 168 | outbound_layer, layer['layer']['name']) 169 | keras_graph_helper.add_node_inbounds( 170 | outbound_layer, layer['layer']['name'] + '_Softmax') 171 | else: 172 | print( 173 | '[ERROR] Activation type %s is is not supported yet.' % 174 | layer['layer']['config']['activation']) 175 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 176 | print('Failed to convert at %s:%d %s()' % 177 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 178 | sys.exit(-1) 179 | 180 | def Conv2DTranspose_helper(self, layer, keras_graph_helper, 181 | ncnn_graph_helper, ncnn_helper): 182 | 183 | CONV2D_T_ACTIVATION_TYPE = { 184 | 'linear': 0, 185 | 'relu': 1, 186 | 'sigmoid': 4 187 | } 188 | 189 | num_output = layer['layer']['config']['filters'] 190 | kernel_w, kernel_h = layer['layer']['config']['kernel_size'] 191 | dilation_w, dilation_h = layer['layer']['config']['dilation_rate'] 192 | stride_w, stride_h = layer['layer']['config']['strides'] 193 | 194 | if layer['layer']['config']['padding'] == 'valid': 195 | print('[WARN] Valid padding is not tested yet.') 196 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 197 | elif layer['layer']['config']['padding'] == 'same': 198 | pad_left = kernel_w - stride_w 199 | pad_top = kernel_h - stride_h 200 | if pad_left < 0 or pad_top < 0: 201 | print('[ERROR] Failed to calculate output shape.') 202 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 203 | print('Failed to convert at %s:%d %s()' % 204 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 205 | sys.exit(-1) 206 | else: 207 | print('[ERROR] Explicit padding is not supported yet.') 208 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 209 | print('Failed to convert at %s:%d %s()' % 210 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 211 | sys.exit(-1) 212 | 213 | bias_term = layer['layer']['config']['use_bias'] 214 | if bias_term: 215 | weight_data_size = int(layer['weight']['kernel:0'].size) 216 | # Reorder weight, h-w-i-o to o-i-h-w 217 | kernel_weight = np.insert( 218 | np.transpose( 219 | layer['weight']['kernel:0'], [ 220 | 2, 3, 0, 1]).flatten(), 0, 0) 221 | bias_weight = layer['weight']['bias:0'] 222 | else: 223 | # Reorder weight, h-w-i-o to o-i-h-w 224 | weight_data_size = int(layer['weight']['kernel:0'].size) 225 | # Reorder weight, h-w-i-o to o-i-h-w 226 | weight = np.insert(np.transpose(layer['weight']['kernel:0'], 227 | [2, 3, 0, 1]).flatten(), 0, 0) 228 | 229 | if 'activation' in layer['layer']['config']: 230 | if layer['layer']['config']['activation'] in CONV2D_T_ACTIVATION_TYPE.keys(): 231 | activation_type = CONV2D_T_ACTIVATION_TYPE[layer['layer'][ 232 | 'config']['activation']] 233 | else: 234 | print( 235 | '[ERROR] Activation type %s is is not supported yet.' % 236 | layer['layer']['config']['activation']) 237 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 238 | print('Failed to convert at %s:%d %s()' % 239 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 240 | sys.exit(-1) 241 | else: 242 | activation_type = 0 243 | 244 | ncnn_graph_attr = ncnn_helper.dump_args( 245 | 'Deconvolution', 246 | num_output=num_output, 247 | kernel_w=kernel_w, 248 | dilation_w=dilation_w, 249 | stride_w=stride_w, 250 | pad_left=pad_left, 251 | pad_top=pad_top, 252 | bias_term=bias_term, 253 | weight_data_size=weight_data_size, 254 | kernel_h=kernel_h, 255 | dilation_h=dilation_h, 256 | stride_h=stride_h, 257 | activation_type=activation_type) 258 | 259 | ncnn_graph_helper.node( 260 | layer['layer']['name'], 261 | keras_graph_helper.get_node_inbounds( 262 | layer['layer']['name'])) 263 | 264 | if bias_term: 265 | ncnn_graph_helper.set_node_attr( 266 | layer['layer']['name'], { 267 | 'type': 'Deconvolution', 'param': ncnn_graph_attr, 'binary': [ 268 | kernel_weight, bias_weight]}) 269 | else: 270 | ncnn_graph_helper.set_node_attr( 271 | layer['layer']['name'], { 272 | 'type': 'Deconvolution', 'param': ncnn_graph_attr, 'binary': [weight]}) 273 | 274 | def DepthwiseConv2D_helper( 275 | self, 276 | layer, 277 | keras_graph_helper, 278 | ncnn_graph_helper, 279 | ncnn_helper): 280 | # Reorder weight, h-w-i-o to o-i-h-w 281 | weight = np.insert( 282 | np.transpose( 283 | layer['weight']['depthwise_kernel:0'], [ 284 | 3, 2, 0, 1]).flatten(), 0, 0) 285 | 286 | num_output = layer['weight']['depthwise_kernel:0'].shape[2] * \ 287 | layer['layer']['config']['depth_multiplier'] 288 | group = layer['weight']['depthwise_kernel:0'].shape[2] 289 | 290 | kernel_w, kernel_h = layer['layer']['config']['kernel_size'] 291 | 292 | dilation_w, dilation_h = layer['layer']['config']['dilation_rate'] 293 | 294 | stride_w, stride_h = layer['layer']['config']['strides'] 295 | 296 | if layer['layer']['config']['padding'] == 'valid': 297 | pad_left = 0 298 | elif layer['layer']['config']['padding'] == 'same': 299 | pad_left = -233 300 | else: 301 | print('[ERROR] Explicit padding is not supported yet.') 302 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 303 | print('Failed to convert at %s:%d %s()' % 304 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 305 | sys.exit(-1) 306 | 307 | bias_term = layer['layer']['config']['use_bias'] 308 | 309 | if bias_term: 310 | bias_weight = layer['weight']['bias:0'] 311 | 312 | weight_data_size = int(layer['weight']['depthwise_kernel:0'].size) 313 | 314 | ncnn_graph_attr = ncnn_helper.dump_args( 315 | 'ConvolutionDepthWise', 316 | num_output=num_output, 317 | kernel_w=kernel_w, 318 | dilation_w=dilation_w, 319 | stride_w=stride_w, 320 | pad_left=pad_left, 321 | bias_term=bias_term, 322 | weight_data_size=weight_data_size, 323 | group=group, 324 | kernel_h=kernel_h, 325 | dilation_h=dilation_h, 326 | stride_h=stride_h) 327 | 328 | ncnn_graph_helper.node( 329 | layer['layer']['name'], 330 | keras_graph_helper.get_node_inbounds( 331 | layer['layer']['name'])) 332 | 333 | if bias_term: 334 | ncnn_graph_helper.set_node_attr( 335 | layer['layer']['name'], { 336 | 'type': 'ConvolutionDepthWise', 'param': ncnn_graph_attr, 'binary': [ 337 | weight, bias_weight]}) 338 | else: 339 | ncnn_graph_helper.set_node_attr( 340 | layer['layer']['name'], { 341 | 'type': 'ConvolutionDepthWise', 'param': ncnn_graph_attr, 'binary': [weight]}) 342 | 343 | def SeparableConv2D_helper( 344 | self, 345 | layer, 346 | keras_graph_helper, 347 | ncnn_graph_helper, 348 | ncnn_helper): 349 | 350 | SEPCONV2D_ACTIVATION_TYPE = { 351 | 'linear': 0, 352 | 'relu': 1, 353 | 'sigmoid': 4 354 | } 355 | 356 | # Fetch weight 357 | dw_weight = np.insert( 358 | np.transpose( 359 | layer['weight']['depthwise_kernel:0'], [ 360 | 3, 2, 0, 1]).flatten(), 0, 0) 361 | 362 | pw_weight = np.insert( 363 | np.transpose( 364 | layer['weight']['pointwise_kernel:0'], [ 365 | 3, 2, 0, 1]).flatten(), 0, 0) 366 | 367 | # Insert dwconv 368 | num_output = layer['weight']['depthwise_kernel:0'].shape[2] * \ 369 | self.replaceDefault(layer['layer']['config'], 'depth_multiplier') 370 | 371 | group = layer['weight']['depthwise_kernel:0'].shape[2] 372 | 373 | kernel_w, kernel_h = layer['layer']['config']['kernel_size'] 374 | 375 | dilation_w, dilation_h = layer['layer']['config']['dilation_rate'] 376 | 377 | stride_w, stride_h = layer['layer']['config']['strides'] 378 | 379 | if layer['layer']['config']['padding'] == 'valid': 380 | pad_left = 0 381 | elif layer['layer']['config']['padding'] == 'same': 382 | pad_left = -233 383 | else: 384 | print('[ERROR] Explicit padding is not supported yet.') 385 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 386 | print('Failed to convert at %s:%d %s()' % 387 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 388 | sys.exit(-1) 389 | 390 | weight_data_size = int(layer['weight']['depthwise_kernel:0'].size) 391 | 392 | ncnn_graph_attr = ncnn_helper.dump_args( 393 | 'ConvolutionDepthWise', 394 | num_output=num_output, 395 | kernel_w=kernel_w, 396 | dilation_w=dilation_w, 397 | stride_w=stride_w, 398 | pad_left=pad_left, 399 | weight_data_size=weight_data_size, 400 | group=group, 401 | kernel_h=kernel_h, 402 | dilation_h=dilation_h, 403 | stride_h=stride_h) 404 | 405 | ncnn_graph_helper.node( 406 | layer['layer']['name'] + '_dw', 407 | keras_graph_helper.get_node_inbounds( 408 | layer['layer']['name'])) 409 | 410 | ncnn_graph_helper.set_node_attr( 411 | layer['layer']['name'] + '_dw', 412 | { 413 | 'type': 'ConvolutionDepthWise', 414 | 'param': ncnn_graph_attr, 415 | 'binary': [dw_weight]}) 416 | 417 | # Fill pwconv params 418 | num_output = layer['layer']['config']['filters'] 419 | bias_term = layer['layer']['config']['use_bias'] 420 | if bias_term: 421 | bias_weight = layer['weight']['bias:0'] 422 | 423 | if 'activation' in layer['layer']['config']: 424 | if layer['layer']['config']['activation'] in SEPCONV2D_ACTIVATION_TYPE.keys(): 425 | activation_type = SEPCONV2D_ACTIVATION_TYPE[layer['layer'][ 426 | 'config']['activation']] 427 | else: 428 | print( 429 | '[ERROR] Activation type %s is is not supported yet.' % 430 | layer['layer']['config']['activation']) 431 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 432 | print('Failed to convert at %s:%d %s()' % 433 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 434 | sys.exit(-1) 435 | else: 436 | activation_type = 0 437 | 438 | weight_data_size = int(layer['weight']['pointwise_kernel:0'].size) 439 | 440 | ncnn_graph_attr = ncnn_helper.dump_args( 441 | 'Convolution', 442 | num_output=num_output, 443 | kernel_w=1, 444 | dilation_w=1, 445 | stride_w=1, 446 | pad_left=pad_left, 447 | bias_term=bias_term, 448 | weight_data_size=weight_data_size, 449 | kernel_h=1, 450 | dilation_h=1, 451 | stride_h=1, 452 | activation_type=activation_type) 453 | 454 | ncnn_graph_helper.node( 455 | layer['layer']['name'], 456 | [layer['layer']['name'] + '_dw']) 457 | 458 | if bias_term: 459 | ncnn_graph_helper.set_node_attr( 460 | layer['layer']['name'], { 461 | 'type': 'Convolution', 'param': ncnn_graph_attr, 'binary': [ 462 | pw_weight, bias_weight]}) 463 | else: 464 | ncnn_graph_helper.set_node_attr( 465 | layer['layer']['name'], { 466 | 'type': 'Convolution', 'param': ncnn_graph_attr, 'binary': [pw_weight]}) 467 | 468 | def BatchNormalization_helper( 469 | self, 470 | layer, 471 | keras_graph_helper, 472 | ncnn_graph_helper, 473 | ncnn_helper): 474 | num_output = layer['weight']['beta:0'].shape[0] 475 | bn_eps = layer['layer']['config']['epsilon'] 476 | 477 | bn_params = {} 478 | bn_params['bn_beta'] = np.full([num_output, ], 0, dtype=np.float) 479 | bn_params['bn_gamma'] = np.full([num_output, ], 1, dtype=np.float) 480 | bn_params['bn_moving_mean'] = np.full( 481 | [num_output, ], 0, dtype=np.float) 482 | bn_params['bn_moving_variance'] = np.full( 483 | [num_output, ], 1, dtype=np.float) 484 | 485 | for weight_name in layer['weight'].keys(): 486 | bn_params['bn_' + 487 | weight_name.replace(':0', '')] = layer['weight'][weight_name] 488 | 489 | ncnn_graph_attr = ncnn_helper.dump_args( 490 | 'BatchNorm', channels=num_output, eps=bn_eps) 491 | 492 | ncnn_graph_helper.node( 493 | layer['layer']['name'], 494 | keras_graph_helper.get_node_inbounds( 495 | layer['layer']['name'])) 496 | ncnn_graph_helper.set_node_attr( 497 | layer['layer']['name'], 498 | { 499 | 'type': 'BatchNorm', 500 | 'param': ncnn_graph_attr, 501 | 'binary': [ 502 | bn_params['bn_gamma'], 503 | bn_params['bn_moving_mean'], 504 | bn_params['bn_moving_variance'], 505 | bn_params['bn_beta']]}) 506 | 507 | def insert_binary_op( 508 | self, 509 | layer, 510 | op_type, 511 | keras_graph_helper, 512 | ncnn_graph_helper, 513 | ncnn_helper): 514 | ncnn_graph_attr = ncnn_helper.dump_args( 515 | 'BinaryOp', op_type=op_type, with_scalar=0) 516 | inbounds = keras_graph_helper.get_node_inbounds(layer['layer']['name']) 517 | last_node_name = inbounds[0] 518 | for node_idx in range(1, len(inbounds)): 519 | if node_idx == len(inbounds) - 1: 520 | node_name = layer['layer']['name'] 521 | else: 522 | node_name = layer['layer']['name']+'_slop_'+str(node_idx) 523 | ncnn_graph_helper.node( 524 | node_name, 525 | [last_node_name, inbounds[node_idx]]) 526 | ncnn_graph_helper.set_node_attr( 527 | node_name, { 528 | 'type': 'BinaryOp', 'param': ncnn_graph_attr, 'binary': []}) 529 | last_node_name = node_name 530 | 531 | def Add_helper( 532 | self, 533 | layer, 534 | keras_graph_helper, 535 | ncnn_graph_helper, 536 | ncnn_helper): 537 | self.insert_binary_op(layer, 538 | 0, 539 | keras_graph_helper, 540 | ncnn_graph_helper, 541 | ncnn_helper) 542 | 543 | def Multiply_helper( 544 | self, 545 | layer, 546 | keras_graph_helper, 547 | ncnn_graph_helper, 548 | ncnn_helper): 549 | self.insert_binary_op(layer, 550 | 2, 551 | keras_graph_helper, 552 | ncnn_graph_helper, 553 | ncnn_helper) 554 | 555 | def Activation_helper( 556 | self, 557 | layer, 558 | keras_graph_helper, 559 | ncnn_graph_helper, 560 | ncnn_helper): 561 | 562 | SUPPORTED_ACTIVATION = [ 563 | 'relu', 564 | 'relu6', 565 | '_relu6', 566 | 'sigmoid', 567 | 'softmax', 568 | '_hard_swish', 569 | 'hard_sigmoid'] 570 | 571 | if layer['layer']['config']['activation'] not in SUPPORTED_ACTIVATION: 572 | print( 573 | '[ERROR] Activation type %s is is not supported yet.' % 574 | layer['layer']['config']['activation']) 575 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 576 | print('Failed to convert at %s:%d %s()' % 577 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 578 | sys.exit(-1) 579 | 580 | if layer['layer']['config']['activation'] in [ 581 | 'relu', 'relu6', '_relu6']: 582 | if 'alpha' in layer['layer']['config'].keys(): 583 | negative_slope = layer['layer']['config']['alpha'] 584 | else: 585 | negative_slope = 0.0 586 | 587 | if '_relu6' in layer['layer']['config']['activation'] or \ 588 | 'relu6' in layer['layer']['config']['activation']: 589 | layer['layer']['config']['max_value'] = 6.0 590 | layer['layer']['config']['activation'] = 'relu' 591 | 592 | if 'max_value' in layer['layer']['config'].keys(): 593 | if layer['layer']['config']['max_value'] is not None: 594 | ncnn_graph_attr = ncnn_helper.dump_args( 595 | 'Clip', max=layer['layer']['config']['max_value']) 596 | ncnn_graph_helper.node( 597 | layer['layer']['name'] + '_Clip', 598 | keras_graph_helper.get_node_inbounds( 599 | layer['layer']['name'])) 600 | ncnn_graph_helper.set_node_attr( 601 | layer['layer']['name'] + '_Clip', 602 | { 603 | 'type': 'Clip', 604 | 'param': ncnn_graph_attr, 605 | 'binary': [], 606 | 'output_blobs': layer['layer']['name'] + '_Clip_blob'}) 607 | 608 | ncnn_graph_attr = ncnn_helper.dump_args( 609 | 'ReLU', slope=negative_slope) 610 | ncnn_graph_helper.node( 611 | layer['layer']['name'], [ 612 | layer['layer']['name'] + '_Clip', ]) 613 | ncnn_graph_helper.set_node_attr( 614 | layer['layer']['name'], { 615 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 616 | else: 617 | ncnn_graph_attr = ncnn_helper.dump_args( 618 | 'ReLU', slope=negative_slope) 619 | ncnn_graph_helper.node( 620 | layer['layer']['name'], 621 | keras_graph_helper.get_node_inbounds( 622 | layer['layer']['name'])) 623 | ncnn_graph_helper.set_node_attr( 624 | layer['layer']['name'], { 625 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 626 | else: 627 | ncnn_graph_attr = ncnn_helper.dump_args( 628 | 'ReLU', slope=negative_slope) 629 | ncnn_graph_helper.node( 630 | layer['layer']['name'], 631 | keras_graph_helper.get_node_inbounds( 632 | layer['layer']['name'])) 633 | ncnn_graph_helper.set_node_attr( 634 | layer['layer']['name'], { 635 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 636 | return 637 | 638 | if layer['layer']['config']['activation'] == 'sigmoid': 639 | ncnn_graph_attr = ncnn_helper.dump_args( 640 | 'Sigmoid') 641 | ncnn_graph_helper.node( 642 | layer['layer']['name'], 643 | keras_graph_helper.get_node_inbounds( 644 | layer['layer']['name'])) 645 | ncnn_graph_helper.set_node_attr( 646 | layer['layer']['name'], { 647 | 'type': 'Sigmoid', 'param': ncnn_graph_attr, 'binary': []}) 648 | 649 | if layer['layer']['config']['activation'] == 'softmax': 650 | ncnn_graph_attr = ncnn_helper.dump_args( 651 | 'Softmax') 652 | ncnn_graph_helper.node( 653 | layer['layer']['name'], 654 | keras_graph_helper.get_node_inbounds( 655 | layer['layer']['name'])) 656 | ncnn_graph_helper.set_node_attr( 657 | layer['layer']['name'], { 658 | 'type': 'Softmax', 'param': ncnn_graph_attr, 'binary': []}) 659 | 660 | if layer['layer']['config']['activation'] == '_hard_swish': 661 | ncnn_graph_attr = ncnn_helper.dump_args('HardSwish') 662 | ncnn_graph_helper.node( 663 | layer['layer']['name'], 664 | keras_graph_helper.get_node_inbounds( 665 | layer['layer']['name'])) 666 | ncnn_graph_helper.set_node_attr( 667 | layer['layer']['name'], { 668 | 'type': 'HardSwish', 'param': ncnn_graph_attr, 'binary': []}) 669 | 670 | if layer['layer']['config']['activation'] == 'hard_sigmoid': 671 | ncnn_graph_attr = ncnn_helper.dump_args('HardSigmoid') 672 | ncnn_graph_helper.node( 673 | layer['layer']['name'], 674 | keras_graph_helper.get_node_inbounds( 675 | layer['layer']['name'])) 676 | ncnn_graph_helper.set_node_attr( 677 | layer['layer']['name'], { 678 | 'type': 'HardSigmoid', 'param': ncnn_graph_attr, 'binary': []}) 679 | 680 | def Flatten_helper( 681 | self, 682 | layer, 683 | keras_graph_helper, 684 | ncnn_graph_helper, 685 | ncnn_helper): 686 | 687 | ncnn_graph_attr = ncnn_helper.dump_args( 688 | 'Reshape', w=-1) 689 | ncnn_graph_helper.node( 690 | layer['layer']['name'], 691 | keras_graph_helper.get_node_inbounds( 692 | layer['layer']['name'])) 693 | ncnn_graph_helper.set_node_attr( 694 | layer['layer']['name'], { 695 | 'type': 'Reshape', 'param': ncnn_graph_attr, 'binary': []}) 696 | 697 | def ZeroPadding2D_helper( 698 | self, 699 | layer, 700 | keras_graph_helper, 701 | ncnn_graph_helper, 702 | ncnn_helper): 703 | 704 | padding_top = layer['layer']['config']['padding'][0][0] 705 | padding_bottom = layer['layer']['config']['padding'][0][1] 706 | padding_left = layer['layer']['config']['padding'][1][0] 707 | padding_right = layer['layer']['config']['padding'][1][1] 708 | 709 | ncnn_graph_attr = ncnn_helper.dump_args( 710 | 'Padding', 711 | top=padding_top, 712 | bottom=padding_bottom, 713 | left=padding_left, 714 | right=padding_right) 715 | 716 | ncnn_graph_helper.node( 717 | layer['layer']['name'], 718 | keras_graph_helper.get_node_inbounds( 719 | layer['layer']['name'])) 720 | ncnn_graph_helper.set_node_attr( 721 | layer['layer']['name'], { 722 | 'type': 'Padding', 'param': ncnn_graph_attr, 'binary': []}) 723 | 724 | def ReLU_helper( 725 | self, 726 | layer, 727 | keras_graph_helper, 728 | ncnn_graph_helper, 729 | ncnn_helper): 730 | 731 | if 'threshold' in layer['layer']['config'].keys(): 732 | if layer['layer']['config']['threshold'] != 0: 733 | print('[ERROR] Leaky Clip ReLU is supported by ncnn.') 734 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 735 | print('Failed to convert at %s:%d %s()' % 736 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 737 | sys.exit(-1) 738 | 739 | if 'negative_slope' in layer['layer']['config'].keys(): 740 | negative_slope = layer['layer']['config']['negative_slope'] 741 | else: 742 | negative_slope = 0.0 743 | 744 | if 'max_value' in layer['layer']['config'].keys(): 745 | if layer['layer']['config']['max_value'] is not None: 746 | ncnn_graph_attr = ncnn_helper.dump_args( 747 | 'Clip', max=layer['layer']['config']['max_value']) 748 | ncnn_graph_helper.node( 749 | layer['layer']['name'] + '_Clip', 750 | keras_graph_helper.get_node_inbounds( 751 | layer['layer']['name'])) 752 | ncnn_graph_helper.set_node_attr( 753 | layer['layer']['name'] + '_Clip', 754 | { 755 | 'type': 'Clip', 756 | 'param': ncnn_graph_attr, 757 | 'binary': [], 758 | 'output_blobs': layer['layer']['name'] + '_Clip_blob'}) 759 | 760 | ncnn_graph_attr = ncnn_helper.dump_args( 761 | 'ReLU', slope=negative_slope) 762 | ncnn_graph_helper.node( 763 | layer['layer']['name'], [ 764 | layer['layer']['name'] + '_Clip', ]) 765 | ncnn_graph_helper.set_node_attr( 766 | layer['layer']['name'], { 767 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 768 | else: 769 | ncnn_graph_attr = ncnn_helper.dump_args( 770 | 'ReLU', slope=negative_slope) 771 | ncnn_graph_helper.node( 772 | layer['layer']['name'], 773 | keras_graph_helper.get_node_inbounds( 774 | layer['layer']['name'])) 775 | ncnn_graph_helper.set_node_attr( 776 | layer['layer']['name'], { 777 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 778 | else: 779 | ncnn_graph_attr = ncnn_helper.dump_args( 780 | 'ReLU', slope=negative_slope) 781 | ncnn_graph_helper.node( 782 | layer['layer']['name'], 783 | keras_graph_helper.get_node_inbounds( 784 | layer['layer']['name'])) 785 | ncnn_graph_helper.set_node_attr( 786 | layer['layer']['name'], { 787 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 788 | 789 | def LeakyReLU_helper( 790 | self, 791 | layer, 792 | keras_graph_helper, 793 | ncnn_graph_helper, 794 | ncnn_helper): 795 | 796 | ncnn_graph_attr = ncnn_helper.dump_args( 797 | 'ReLU', slope=layer['layer']['config']['alpha']) 798 | ncnn_graph_helper.node( 799 | layer['layer']['name'], 800 | keras_graph_helper.get_node_inbounds( 801 | layer['layer']['name'])) 802 | ncnn_graph_helper.set_node_attr( 803 | layer['layer']['name'], { 804 | 'type': 'ReLU', 'param': ncnn_graph_attr, 'binary': []}) 805 | 806 | def Dense_helper( 807 | self, 808 | layer, 809 | keras_graph_helper, 810 | ncnn_graph_helper, 811 | ncnn_helper): 812 | 813 | SUPPORTED_ACTIVATION = ['', 'linear', 'softmax', 'hard_sigmoid', 'tanh'] 814 | SUPPORTED_FUSED_ACTIVATION_TYPE = { 815 | 'relu': 1, 816 | 'sigmoid': 4 817 | } 818 | 819 | if layer['layer']['config']['activation'] not in SUPPORTED_ACTIVATION and \ 820 | layer['layer']['config']['activation'] not in SUPPORTED_FUSED_ACTIVATION_TYPE: 821 | print( 822 | '[ERROR] Activation type %s is is not supported yet.' % 823 | layer['layer']['config']['activation']) 824 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 825 | print('Failed to convert at %s:%d %s()' % 826 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 827 | sys.exit(-1) 828 | 829 | num_output = layer['weight']['kernel:0'].shape[1] 830 | 831 | bn_params = {} 832 | for weight_name in layer['weight'].keys(): 833 | bn_params['bn_' + 834 | weight_name.replace(':0', '')] = layer['weight'][weight_name] 835 | bn_params['bn_kernel'] = np.transpose(bn_params['bn_kernel']) 836 | weight_data_size = int(bn_params['bn_kernel'].size) 837 | 838 | bn_params['bn_bias'] = np.asarray(bn_params['bn_bias']) 839 | bn_params['bn_kernel'] = np.insert( 840 | bn_params['bn_kernel'].flatten(), 0, 0) 841 | 842 | if layer['layer']['config']['activation'] == '' or layer['layer']['config']['activation'] == 'linear': 843 | ncnn_graph_attr = ncnn_helper.dump_args( 844 | 'InnerProduct', 845 | num_output=num_output, 846 | bias_term=1, 847 | weight_data_size=weight_data_size) 848 | ncnn_graph_helper.node( 849 | layer['layer']['name'], 850 | keras_graph_helper.get_node_inbounds( 851 | layer['layer']['name'])) 852 | ncnn_graph_helper.set_node_attr( 853 | layer['layer']['name'], { 854 | 'type': 'InnerProduct', 'param': ncnn_graph_attr, 'binary': [ 855 | bn_params['bn_kernel'], bn_params['bn_bias']]}) 856 | 857 | if layer['layer']['config']['activation'] == 'softmax': 858 | ncnn_graph_attr = ncnn_helper.dump_args( 859 | 'InnerProduct', 860 | num_output=num_output, 861 | bias_term=1, 862 | weight_data_size=weight_data_size) 863 | ncnn_graph_helper.node( 864 | layer['layer']['name'], 865 | keras_graph_helper.get_node_inbounds( 866 | layer['layer']['name'])) 867 | ncnn_graph_helper.set_node_attr( 868 | layer['layer']['name'], { 869 | 'type': 'InnerProduct', 'param': ncnn_graph_attr, 'binary': [ 870 | bn_params['bn_kernel'], bn_params['bn_bias']]}) 871 | 872 | outbound_layers = [] 873 | 874 | for name in keras_graph_helper.get_graph().keys(): 875 | for node in keras_graph_helper.get_graph()[ 876 | name]['inbounds']: 877 | if layer['layer']['name'] == node: 878 | outbound_layers.append(name) 879 | 880 | ncnn_graph_attr = ncnn_helper.dump_args('Softmax') 881 | ncnn_graph_helper.node( 882 | layer['layer']['name'] + '_Softmax', [layer['layer']['name'], ]) 883 | ncnn_graph_helper.set_node_attr( 884 | layer['layer']['name'] + '_Softmax', { 885 | 'type': 'Softmax', 'param': ncnn_graph_attr, 'binary': []}) 886 | 887 | keras_graph_helper.node( 888 | layer['layer']['name'] + '_Softmax', [layer['layer']['name'], ]) 889 | 890 | for outbound_layer in outbound_layers: 891 | keras_graph_helper.remove_node_inbounds( 892 | outbound_layer, layer['layer']['name']) 893 | keras_graph_helper.add_node_inbounds( 894 | outbound_layer, layer['layer']['name'] + '_Softmax') 895 | 896 | if layer['layer']['config']['activation'] == 'tanh': 897 | ncnn_graph_attr = ncnn_helper.dump_args( 898 | 'InnerProduct', 899 | num_output=num_output, 900 | bias_term=1, 901 | weight_data_size=weight_data_size) 902 | ncnn_graph_helper.node( 903 | layer['layer']['name'], 904 | keras_graph_helper.get_node_inbounds( 905 | layer['layer']['name'])) 906 | ncnn_graph_helper.set_node_attr( 907 | layer['layer']['name'], { 908 | 'type': 'InnerProduct', 'param': ncnn_graph_attr, 'binary': [ 909 | bn_params['bn_kernel'], bn_params['bn_bias']]}) 910 | 911 | outbound_layers = [] 912 | 913 | for name in keras_graph_helper.get_graph().keys(): 914 | for node in keras_graph_helper.get_graph()[ 915 | name]['inbounds']: 916 | if layer['layer']['name'] == node: 917 | outbound_layers.append(name) 918 | 919 | ncnn_graph_attr = ncnn_helper.dump_args('TanH') 920 | ncnn_graph_helper.node( 921 | layer['layer']['name'] + '_TanH', [layer['layer']['name'], ]) 922 | ncnn_graph_helper.set_node_attr( 923 | layer['layer']['name'] + '_TanH', { 924 | 'type': 'TanH', 'param': ncnn_graph_attr, 'binary': []}) 925 | 926 | keras_graph_helper.node( 927 | layer['layer']['name'] + '_TanH', [layer['layer']['name'], ]) 928 | 929 | for outbound_layer in outbound_layers: 930 | keras_graph_helper.remove_node_inbounds( 931 | outbound_layer, layer['layer']['name']) 932 | keras_graph_helper.add_node_inbounds( 933 | outbound_layer, layer['layer']['name'] + '_TanH') 934 | 935 | if layer['layer']['config']['activation'] == 'hard_sigmoid': 936 | ncnn_graph_attr = ncnn_helper.dump_args( 937 | 'InnerProduct', 938 | num_output=num_output, 939 | bias_term=1, 940 | weight_data_size=weight_data_size) 941 | ncnn_graph_helper.node( 942 | layer['layer']['name'], 943 | keras_graph_helper.get_node_inbounds( 944 | layer['layer']['name'])) 945 | ncnn_graph_helper.set_node_attr( 946 | layer['layer']['name'], { 947 | 'type': 'InnerProduct', 'param': ncnn_graph_attr, 'binary': [ 948 | bn_params['bn_kernel'], bn_params['bn_bias']]}) 949 | 950 | outbound_layers = [] 951 | 952 | for name in keras_graph_helper.get_graph().keys(): 953 | for node in keras_graph_helper.get_graph()[ 954 | name]['inbounds']: 955 | if layer['layer']['name'] == node: 956 | outbound_layers.append(name) 957 | 958 | ncnn_graph_attr = ncnn_helper.dump_args('HardSigmoid') 959 | ncnn_graph_helper.node( 960 | layer['layer']['name'] + '_HardSigmoid', [layer['layer']['name'], ]) 961 | ncnn_graph_helper.set_node_attr( 962 | layer['layer']['name'] + '_HardSigmoid', { 963 | 'type': 'HardSigmoid', 'param': ncnn_graph_attr, 'binary': []}) 964 | 965 | keras_graph_helper.node( 966 | layer['layer']['name'] + '_HardSigmoid', [layer['layer']['name'], ]) 967 | 968 | for outbound_layer in outbound_layers: 969 | keras_graph_helper.remove_node_inbounds( 970 | outbound_layer, layer['layer']['name']) 971 | keras_graph_helper.add_node_inbounds( 972 | outbound_layer, layer['layer']['name'] + '_HardSigmoid') 973 | 974 | if layer['layer']['config']['activation'] in SUPPORTED_FUSED_ACTIVATION_TYPE: 975 | ncnn_graph_attr = ncnn_helper.dump_args( 976 | 'InnerProduct', 977 | num_output=num_output, 978 | bias_term=1, 979 | activation_type=SUPPORTED_FUSED_ACTIVATION_TYPE[layer['layer']['config']['activation']], 980 | weight_data_size=weight_data_size) 981 | ncnn_graph_helper.node( 982 | layer['layer']['name'], 983 | keras_graph_helper.get_node_inbounds( 984 | layer['layer']['name'])) 985 | ncnn_graph_helper.set_node_attr( 986 | layer['layer']['name'], { 987 | 'type': 'InnerProduct', 'param': ncnn_graph_attr, 'binary': [ 988 | bn_params['bn_kernel'], bn_params['bn_bias']]}) 989 | 990 | def Concatenate_helper( 991 | self, 992 | layer, 993 | keras_graph_helper, 994 | ncnn_graph_helper, 995 | ncnn_helper): 996 | 997 | DIM_SEQ = [3, 2, 0, 1] 998 | 999 | if DIM_SEQ[layer['layer']['config']['axis']] == 0: 1000 | print('[ERROR] Concat asix = 0 is not support. ncnn only have C/H/W Dim.') 1001 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 1002 | print('Failed to convert at %s:%d %s()' % 1003 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1004 | sys.exit(-1) 1005 | 1006 | ncnn_graph_attr = ncnn_helper.dump_args( 1007 | 'Concat', axis=DIM_SEQ[layer['layer']['config']['axis']] - 1) 1008 | ncnn_graph_helper.node( 1009 | layer['layer']['name'], 1010 | keras_graph_helper.get_node_inbounds( 1011 | layer['layer']['name'])) 1012 | ncnn_graph_helper.set_node_attr( 1013 | layer['layer']['name'], { 1014 | 'type': 'Concat', 'param': ncnn_graph_attr, 'binary': []}) 1015 | 1016 | def BilinearUpsampling_helper( 1017 | self, 1018 | layer, 1019 | keras_graph_helper, 1020 | ncnn_graph_helper, 1021 | ncnn_helper): 1022 | ncnn_graph_attr = ncnn_helper.dump_args( 1023 | 'Interp', 1024 | resize_type=2, 1025 | output_height=layer['layer']['config']['output_size'][0], 1026 | output_width=layer['layer']['config']['output_size'][1]) 1027 | ncnn_graph_helper.node( 1028 | layer['layer']['name'], 1029 | keras_graph_helper.get_node_inbounds( 1030 | layer['layer']['name'])) 1031 | ncnn_graph_helper.set_node_attr( 1032 | layer['layer']['name'], { 1033 | 'type': 'Interp', 'param': ncnn_graph_attr, 'binary': []}) 1034 | 1035 | def UpSampling2D_helper( 1036 | self, 1037 | layer, 1038 | keras_graph_helper, 1039 | ncnn_graph_helper, 1040 | ncnn_helper): 1041 | 1042 | RESIZE_TYPE = ['', 'nearest', 'bilinear', 'bicubic'] 1043 | if 'interpolation' in layer['layer']['config'].keys(): 1044 | resize_type = RESIZE_TYPE.index( 1045 | layer['layer']['config']['interpolation']) 1046 | else: 1047 | resize_type = RESIZE_TYPE.index('bilinear') 1048 | 1049 | ncnn_graph_attr = ncnn_helper.dump_args( 1050 | 'Interp', resize_type=resize_type, height_scale=float( 1051 | layer['layer']['config']['size'][0]), width_scale=float( 1052 | layer['layer']['config']['size'][0])) 1053 | ncnn_graph_helper.node( 1054 | layer['layer']['name'], 1055 | keras_graph_helper.get_node_inbounds( 1056 | layer['layer']['name'])) 1057 | ncnn_graph_helper.set_node_attr( 1058 | layer['layer']['name'], { 1059 | 'type': 'Interp', 'param': ncnn_graph_attr, 'binary': []}) 1060 | 1061 | def GlobalAveragePooling2D_helper( 1062 | self, 1063 | layer, 1064 | keras_graph_helper, 1065 | ncnn_graph_helper, 1066 | ncnn_helper): 1067 | ncnn_graph_attr = ncnn_helper.dump_args( 1068 | 'Pooling', pooling_type=1, global_pooling=1) 1069 | ncnn_graph_helper.node( 1070 | layer['layer']['name'], 1071 | keras_graph_helper.get_node_inbounds( 1072 | layer['layer']['name'])) 1073 | ncnn_graph_helper.set_node_attr( 1074 | layer['layer']['name'], { 1075 | 'type': 'Pooling', 'param': ncnn_graph_attr, 'binary': []}) 1076 | 1077 | def GlobalMaxPooling2D_helper( 1078 | self, 1079 | layer, 1080 | keras_graph_helper, 1081 | ncnn_graph_helper, 1082 | ncnn_helper): 1083 | ncnn_graph_attr = ncnn_helper.dump_args( 1084 | 'Pooling', pooling_type=0, global_pooling=1) 1085 | ncnn_graph_helper.node( 1086 | layer['layer']['name'], 1087 | keras_graph_helper.get_node_inbounds( 1088 | layer['layer']['name'])) 1089 | ncnn_graph_helper.set_node_attr( 1090 | layer['layer']['name'], { 1091 | 'type': 'Pooling', 'param': ncnn_graph_attr, 'binary': []}) 1092 | 1093 | def Reshape_helper( 1094 | self, 1095 | layer, 1096 | keras_graph_helper, 1097 | ncnn_graph_helper, 1098 | ncnn_helper): 1099 | target_shape = layer['layer']['config']['target_shape'] 1100 | 1101 | if(len(target_shape) == 4): 1102 | ncnn_graph_attr = ncnn_helper.dump_args( 1103 | 'Reshape', w=target_shape[2], h=target_shape[1], c=target_shape[3]) 1104 | else: 1105 | if(len(target_shape) == 3): 1106 | ncnn_graph_attr = ncnn_helper.dump_args( 1107 | 'Reshape', w=target_shape[1], h=target_shape[0], c=target_shape[2]) 1108 | else: 1109 | if(len(target_shape) == 2): 1110 | ncnn_graph_attr = ncnn_helper.dump_args( 1111 | 'Reshape', w=target_shape[1], h=target_shape[0]) 1112 | else: 1113 | if(len(target_shape) == 1): 1114 | ncnn_graph_attr = ncnn_helper.dump_args( 1115 | 'Reshape', w=target_shape[0]) 1116 | else: 1117 | print( 1118 | '[ERROR] Reshape Layer Dim %d is not supported.' % 1119 | len(target_shape)) 1120 | frameinfo = inspect.getframeinfo( 1121 | inspect.currentframe()) 1122 | print( 1123 | 'Failed to convert at %s:%d %s()' % 1124 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1125 | sys.exit(-1) 1126 | 1127 | ncnn_graph_helper.node( 1128 | layer['layer']['name'], 1129 | keras_graph_helper.get_node_inbounds( 1130 | layer['layer']['name'])) 1131 | ncnn_graph_helper.set_node_attr( 1132 | layer['layer']['name'], { 1133 | 'type': 'Reshape', 'param': ncnn_graph_attr, 'binary': []}) 1134 | 1135 | def Permute_helper( 1136 | self, 1137 | layer, 1138 | keras_graph_helper, 1139 | ncnn_graph_helper, 1140 | ncnn_helper): 1141 | PERMUTE_LUT = { 1142 | # Dim=2 1143 | '1,2': (2, 0), 1144 | '2,1': (2, 1), 1145 | 1146 | # Dim=3 1147 | '1,2,3': (3, 0), 1148 | '1,3,2': (3, 5), 1149 | '2,1,3': (3, 1), 1150 | '2,3,1': (3, 4), 1151 | '3,1,2': (3, 3), 1152 | '3,2,1': (3, 2), 1153 | } 1154 | 1155 | dims = layer['layer']['config']['dims'] 1156 | if(len(dims) in [2, 3]): 1157 | order_type = PERMUTE_LUT[','.join(list(map(str, dims)))] 1158 | else: 1159 | print( 1160 | '[ERROR] Permute Layer Dim [%s] is not supported.' % 1161 | str(dims)) 1162 | frameinfo = inspect.getframeinfo( 1163 | inspect.currentframe()) 1164 | print( 1165 | 'Failed to convert at %s:%d %s()' % 1166 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1167 | sys.exit(-1) 1168 | 1169 | ncnn_graph_attr = ncnn_helper.dump_args( 1170 | 'Permute', order_type=order_type) 1171 | ncnn_graph_helper.node( 1172 | layer['layer']['name'], 1173 | keras_graph_helper.get_node_inbounds( 1174 | layer['layer']['name'])) 1175 | ncnn_graph_helper.set_node_attr( 1176 | layer['layer']['name'], { 1177 | 'type': 'Permute', 'param': ncnn_graph_attr, 'binary': []}) 1178 | 1179 | def Cropping2D_helper( 1180 | self, 1181 | layer, 1182 | keras_graph_helper, 1183 | ncnn_graph_helper, 1184 | ncnn_helper): 1185 | ncnn_graph_attr = ncnn_helper.dump_args( 1186 | 'Crop', 1187 | woffset=layer['layer']['config']['cropping'][1][0], 1188 | hoffset=layer['layer']['config']['cropping'][0][0], 1189 | woffset2=layer['layer']['config']['cropping'][1][1], 1190 | hoffset2=layer['layer']['config']['cropping'][0][1]) 1191 | 1192 | ncnn_graph_helper.node( 1193 | layer['layer']['name'], 1194 | keras_graph_helper.get_node_inbounds( 1195 | layer['layer']['name'])) 1196 | ncnn_graph_helper.set_node_attr( 1197 | layer['layer']['name'], { 1198 | 'type': 'Crop', 'param': ncnn_graph_attr, 'binary': []}) 1199 | 1200 | def AveragePooling2D_helper( 1201 | self, 1202 | layer, 1203 | keras_graph_helper, 1204 | ncnn_graph_helper, 1205 | ncnn_helper): 1206 | 1207 | if 'kernel_size' in layer['layer']['config'].keys(): 1208 | kernel_w, kernel_h = layer['layer']['config']['kernel_size'] 1209 | else: 1210 | if 'pool_size' in layer['layer']['config'].keys(): 1211 | kernel_w, kernel_h = layer['layer']['config']['pool_size'] 1212 | else: 1213 | print('[ERROR] Invalid configuration for pooling.') 1214 | print('=========================================') 1215 | print(layer['layer']['config']) 1216 | print('=========================================') 1217 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 1218 | print('Failed to convert at %s:%d %s()' % 1219 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1220 | sys.exit(-1) 1221 | 1222 | if 'dilation_rate' in layer['layer']['config'].keys(): 1223 | dilation_w, dilation_h = layer['layer']['config']['dilation_rate'] 1224 | else: 1225 | dilation_w = 1 1226 | dilation_h = 1 1227 | 1228 | stride_w, stride_h = layer['layer']['config']['strides'] 1229 | 1230 | if layer['layer']['config']['padding'] == 'valid': 1231 | pad_mode = 1 1232 | elif layer['layer']['config']['padding'] == 'same': 1233 | pad_mode = 2 1234 | else: 1235 | pad_mode = 0 1236 | 1237 | ncnn_graph_attr = ncnn_helper.dump_args( 1238 | 'Pooling', 1239 | pooling_type=1, 1240 | kernel_w=kernel_w, 1241 | dilation_w=dilation_w, 1242 | stride_w=stride_w, 1243 | kernel_h=kernel_h, 1244 | dilation_h=dilation_h, 1245 | stride_h=stride_h, 1246 | pad_mode=pad_mode) 1247 | 1248 | ncnn_graph_helper.node( 1249 | layer['layer']['name'], 1250 | keras_graph_helper.get_node_inbounds( 1251 | layer['layer']['name'])) 1252 | ncnn_graph_helper.set_node_attr( 1253 | layer['layer']['name'], { 1254 | 'type': 'Pooling', 'param': ncnn_graph_attr, 'binary': []}) 1255 | 1256 | def MaxPooling2D_helper( 1257 | self, 1258 | layer, 1259 | keras_graph_helper, 1260 | ncnn_graph_helper, 1261 | ncnn_helper): 1262 | 1263 | if 'kernel_size' in layer['layer']['config'].keys(): 1264 | kernel_w, kernel_h = layer['layer']['config']['kernel_size'] 1265 | else: 1266 | if 'pool_size' in layer['layer']['config'].keys(): 1267 | kernel_w, kernel_h = layer['layer']['config']['pool_size'] 1268 | else: 1269 | print('[ERROR] Invalid configuration for pooling.') 1270 | print('=========================================') 1271 | print(layer['layer']['config']) 1272 | print('=========================================') 1273 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 1274 | print('Failed to convert at %s:%d %s()' % 1275 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1276 | sys.exit(-1) 1277 | 1278 | if 'dilation_rate' in layer['layer']['config'].keys(): 1279 | dilation_w, dilation_h = layer['layer']['config']['dilation_rate'] 1280 | else: 1281 | dilation_w = 1 1282 | dilation_h = 1 1283 | 1284 | stride_w, stride_h = layer['layer']['config']['strides'] 1285 | 1286 | if layer['layer']['config']['padding'] == 'valid': 1287 | pad_mode = 1 1288 | elif layer['layer']['config']['padding'] == 'same': 1289 | pad_mode = 2 1290 | else: 1291 | pad_mode = 0 1292 | 1293 | ncnn_graph_attr = ncnn_helper.dump_args( 1294 | 'Pooling', 1295 | pooling_type=0, 1296 | kernel_w=kernel_w, 1297 | dilation_w=dilation_w, 1298 | stride_w=stride_w, 1299 | kernel_h=kernel_h, 1300 | dilation_h=dilation_h, 1301 | stride_h=stride_h, 1302 | pad_mode=pad_mode) 1303 | 1304 | ncnn_graph_helper.node( 1305 | layer['layer']['name'], 1306 | keras_graph_helper.get_node_inbounds( 1307 | layer['layer']['name'])) 1308 | ncnn_graph_helper.set_node_attr( 1309 | layer['layer']['name'], { 1310 | 'type': 'Pooling', 'param': ncnn_graph_attr, 'binary': []}) 1311 | 1312 | def Maximum_helper( 1313 | self, 1314 | layer, 1315 | keras_graph_helper, 1316 | ncnn_graph_helper, 1317 | ncnn_helper): 1318 | self.insert_binary_op(layer, 1319 | 4, 1320 | keras_graph_helper, 1321 | ncnn_graph_helper, 1322 | ncnn_helper) 1323 | 1324 | def TensorFlowOpLayer_helper( 1325 | self, 1326 | layer, 1327 | keras_graph_helper, 1328 | ncnn_graph_helper, 1329 | ncnn_helper): 1330 | TFOPL_SUPPORTED_OP = ['Mul'] 1331 | 1332 | operator = layer['layer']['config']['node_def']['op'] 1333 | if operator not in TFOPL_SUPPORTED_OP: 1334 | print('[ERROR] Config for TensorFlowOpLayer is not supported yet.') 1335 | print('=========================================') 1336 | print(layer['layer']) 1337 | print('=========================================') 1338 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 1339 | print('Failed to convert at %s:%d %s()' % 1340 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1341 | sys.exit(-1) 1342 | 1343 | if operator == 'Mul': 1344 | # Create a MemoryData for storing the constant 1345 | constant = layer['layer']['config']['constants'] 1346 | if len(constant) != 1: 1347 | print('[ERROR] Config for TensorFlowOpLayer is not supported yet.') 1348 | print('=========================================') 1349 | print(layer['layer']) 1350 | print('=========================================') 1351 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 1352 | print('Failed to convert at %s:%d %s()' % 1353 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1354 | sys.exit(-1) 1355 | 1356 | # ncnn_graph_attr = ncnn_helper.dump_args('MemoryData', w=1) 1357 | # ncnn_graph_helper.node( 1358 | # layer['layer']['name']+'_const', []) 1359 | # ncnn_graph_helper.set_node_attr( 1360 | # layer['layer']['name']+'_const', { 1361 | # 'type': 'MemoryData', 'param': ncnn_graph_attr, 'binary': [[constant['0']]]}) 1362 | 1363 | # Insert the mul layer 1364 | ncnn_graph_attr = ncnn_helper.dump_args( 1365 | 'BinaryOp', op_type=2, with_scalar=1, b=constant['0']) 1366 | 1367 | ncnn_graph_helper.node( 1368 | layer['layer']['name'], 1369 | keras_graph_helper.get_node_inbounds( 1370 | layer['layer']['name'])) 1371 | ncnn_graph_helper.set_node_attr( 1372 | layer['layer']['name'], { 1373 | 'type': 'BinaryOp', 'param': ncnn_graph_attr, 'binary': []}) 1374 | 1375 | def insert_split( 1376 | self, 1377 | layer_name, 1378 | keras_graph_helper, 1379 | ncnn_graph_helper, 1380 | ncnn_helper): 1381 | outbound_layers = [] 1382 | for name in keras_graph_helper.get_graph().keys(): 1383 | for node in keras_graph_helper.get_graph()[name]['inbounds']: 1384 | if layer_name == node: 1385 | outbound_layers.append(name) 1386 | 1387 | if len(outbound_layers) > 1: 1388 | 1389 | ncnn_graph_attr = ncnn_helper.dump_args('Split') 1390 | ncnn_graph_helper.node(layer_name + '_Split', [layer_name, ]) 1391 | ncnn_graph_helper.set_node_attr( 1392 | layer_name + '_Split', {'type': 'Split', 'param': ncnn_graph_attr, 'binary': []}) 1393 | 1394 | keras_graph_helper.node(layer_name + '_Split', [layer_name, ]) 1395 | 1396 | for outbound_layer in outbound_layers: 1397 | keras_graph_helper.remove_node_inbounds( 1398 | outbound_layer, layer_name) 1399 | keras_graph_helper.add_node_inbounds( 1400 | outbound_layer, layer_name + '_Split') 1401 | 1402 | def parse_keras_graph( 1403 | self, 1404 | keras_graph_helper, 1405 | ncnn_graph_helper, 1406 | ncnn_helper): 1407 | keras_graph_nodes = list(keras_graph_helper.get_graph().keys()) 1408 | for node_name in keras_graph_nodes: 1409 | node_helper_name = keras_graph_helper.get_node_attr( 1410 | node_name)['layer']['class_name'] + '_helper' 1411 | if node_helper_name in dir(self): 1412 | eval( 1413 | 'self.' + 1414 | node_helper_name)( 1415 | keras_graph_helper.get_node_attr(node_name), 1416 | keras_graph_helper, 1417 | ncnn_graph_helper, 1418 | ncnn_helper) 1419 | 1420 | if keras_graph_helper.get_node_attr( 1421 | node_name)['layer']['class_name'] not in self.MULTI_OUTPUT_OP: 1422 | self.insert_split( 1423 | keras_graph_helper.get_node_attr(node_name)['layer']['name'], 1424 | keras_graph_helper, 1425 | ncnn_graph_helper, 1426 | ncnn_helper) 1427 | else: 1428 | layer = keras_graph_helper.get_node_attr(node_name)['layer'] 1429 | print('[ERROR] Operator %s not support.' % layer['class_name']) 1430 | print('=========================================') 1431 | print(layer['config']) 1432 | print('=========================================') 1433 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 1434 | print('Failed to convert at %s:%d %s()' % 1435 | (frameinfo.filename, frameinfo.lineno, frameinfo.function)) 1436 | sys.exit(-1) 1437 | 1438 | ncnn_graph_helper.refresh() 1439 | 1440 | for graph_head in ncnn_graph_helper.get_graph_head(): 1441 | node_attr = ncnn_graph_helper.get_node_attr(graph_head) 1442 | if node_attr['type'] not in ['Input', 'MemoryData']: 1443 | ncnn_graph_attr = ncnn_helper.dump_args( 1444 | 'Input', w=-1, h=-1, c=-1) 1445 | ncnn_graph_helper.node( 1446 | graph_head + '_input', []) 1447 | ncnn_graph_helper.set_node_attr( 1448 | graph_head + '_input', { 1449 | 'type': 'Input', 'param': ncnn_graph_attr, 'binary': []}) 1450 | ncnn_graph_helper.add_node_inbounds( 1451 | graph_head, graph_head + '_input') 1452 | 1453 | ncnn_graph_helper.refresh() 1454 | -------------------------------------------------------------------------------- /keras2ncnn/keras_debugger.py: -------------------------------------------------------------------------------- 1 | from distutils import spawn as sp 2 | import glob 3 | import h5py 4 | import os 5 | import subprocess 6 | import shutil 7 | import sys 8 | 9 | 10 | class KerasDebugger: 11 | ncnn_prog_template = \ 12 | ''' 13 | #include "net.h" 14 | 15 | #include 16 | #include 17 | #include 18 | 19 | float rand_float() { return (float)rand() / (float)RAND_MAX; } 20 | 21 | static int rand_mat(ncnn::Mat& m){ 22 | for(int i=0; i<3; i++){ 23 | float *ptr = m.channel(i); 24 | for(int j=0; j<(m.w * m.h); j++){ 25 | ptr[j] = rand_float(); 26 | } 27 | } 28 | 29 | return 0; 30 | } 31 | 32 | static int dump_mat(const ncnn::Mat& m, const char *m_name) 33 | { 34 | char filename[1000] = ""; 35 | sprintf(filename, "%s-%d-%d-%d-layer_dump.dat", m_name, m.c, m.h, m.w); 36 | 37 | FILE* fp = fopen(filename, "w+"); 38 | if(fp == NULL){ 39 | return -1; 40 | } 41 | 42 | for (int q=0; q%s ncnn->%s' % 386 | (output_node_name, str( 387 | keras_layer_dumps[output_node_name].shape), str( 388 | ncnn_det_out[output_node_name].shape))) 389 | print( 390 | 'Max: \tkeras->%.03f ncnn->%.03f \tMin: keras->%.03f ncnn->%.03f' % 391 | (keras_layer_dumps[output_node_name].flatten().max(), 392 | ncnn_det_out[output_node_name].flatten().max(), 393 | keras_layer_dumps[output_node_name].flatten().min(), 394 | ncnn_det_out[output_node_name].flatten().min())) 395 | print( 396 | 'Mean: \tkeras->%.03f ncnn->%.03f \tVar: keras->%.03f ncnn->%.03f' % 397 | (keras_layer_dumps[output_node_name].flatten().mean(), 398 | ncnn_det_out[output_node_name].flatten().mean(), 399 | keras_layer_dumps[output_node_name].flatten().std(), 400 | ncnn_det_out[output_node_name].flatten().std())) 401 | 402 | if keras_layer_dumps[output_node_name][0].ndim == 3: 403 | if keras_layer_dumps[output_node_name].size != ncnn_det_out[output_node_name].size: 404 | print('Size not matched, not able to calculate similarity.') 405 | else: 406 | print( 407 | 'Cosine Similarity: %.05f' % 408 | distance.cosine( 409 | keras_layer_dumps[output_node_name][0].transpose( 410 | (2, 0, 1)).flatten(), ncnn_det_out[output_node_name].flatten())) 411 | 412 | print('Keras Feature Map: \t%s' % np.array2string( 413 | keras_layer_dumps[output_node_name][0][0:10, 0, 0], suppress_small=True, precision=3)) 414 | print('Ncnn Feature Map: \t%s' % np.array2string( 415 | ncnn_det_out[output_node_name][0, 0:10, 0], suppress_small=True, precision=3)) 416 | 417 | elif keras_layer_dumps[output_node_name][0].ndim == 2: 418 | if keras_layer_dumps[output_node_name].size != ncnn_det_out[output_node_name].size: 419 | print('Size not matched, not able to calculate similarity.') 420 | else: 421 | print( 422 | 'Cosine Similarity: %.05f' % 423 | distance.cosine( 424 | keras_layer_dumps[output_node_name][0].transpose( 425 | (1, 0)).flatten(), ncnn_det_out[output_node_name].flatten())) 426 | 427 | print('Keras Feature Map: \t%s' % np.array2string( 428 | keras_layer_dumps[output_node_name][0][0:10, 0], suppress_small=True, precision=3)) 429 | print('Ncnn Feature Map: \t%s' % np.array2string( 430 | ncnn_det_out[output_node_name][0, 0:10], suppress_small=True, precision=3)) 431 | 432 | elif keras_layer_dumps[output_node_name][0].ndim == 1\ 433 | and (ncnn_det_out[output_node_name].shape[:2] == (1, 1) 434 | or ncnn_det_out[output_node_name].ndim == 1): 435 | 436 | print( 437 | 'Cosine Similarity: %.05f' % 438 | distance.cosine( 439 | keras_layer_dumps[output_node_name][0].flatten(), 440 | ncnn_det_out[output_node_name].flatten())) 441 | 442 | print('Keras Feature Map: \t%s' % np.array2string( 443 | keras_layer_dumps[output_node_name][0][0:10], suppress_small=True, precision=3)) 444 | 445 | if ncnn_det_out[output_node_name].ndim == 3: 446 | print('Ncnn Feature Map: \t%s' % np.array2string( 447 | ncnn_det_out[output_node_name][0, 0, 0:10], suppress_small=True, precision=3)) 448 | 449 | keras_index = keras_layer_dumps[output_node_name][0].argsort( 450 | )[-5:][::-1] 451 | keras_top_value = keras_layer_dumps[output_node_name][0][keras_index] 452 | keras_topk = dict(zip(keras_index, keras_top_value)) 453 | 454 | if ncnn_det_out[output_node_name].ndim == 3: 455 | ncnn_index = ncnn_det_out[output_node_name][0, 0].argsort( 456 | )[-5:][::-1] 457 | ncnn_top_value = ncnn_det_out[output_node_name][0, 458 | 0][ncnn_index] 459 | ncnn_topk = dict(zip(ncnn_index, ncnn_top_value)) 460 | 461 | if os.path.exists('./ImageNetLabels.txt'): 462 | labels = open('ImageNetLabels.txt').readlines() 463 | 464 | keras_topk_str = ", ".join( 465 | ("%s:%.03f" % (labels[i[0] + 1].strip(), i[1]) for i in keras_topk.items())) 466 | 467 | ncnn_topk_str = ", ".join( 468 | ("%s:%.03f" % (labels[i[0] + 1].strip(), i[1]) for i in ncnn_topk.items())) 469 | 470 | else: 471 | keras_topk_str = ", ".join( 472 | ("%d:%.03f" % i for i in keras_topk.items())) 473 | 474 | ncnn_topk_str = ", ".join( 475 | ("%d:%.03f" % i for i in ncnn_topk.items())) 476 | 477 | print( 478 | 'Top-k:\nKeras Top-k: \t%s\nncnn Top-k: \t%s' % 479 | (keras_topk_str, ncnn_topk_str)) -------------------------------------------------------------------------------- /keras2ncnn/ncnn_emitter.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | class NcnnEmitter: 5 | 6 | def __init__(self, ncnn_graph): 7 | self.MAGGGGGIC = 7767517 8 | self.ncnn_graph = ncnn_graph 9 | 10 | def get_graph_seq(self): 11 | 12 | graph_head = self.ncnn_graph.get_graph_head() 13 | 14 | # Thanks to Blckknght for the topological sort alg 15 | seen = set() 16 | stack = [] 17 | order = [] 18 | q = [graph_head[0]] 19 | 20 | for head in graph_head: 21 | q = [head] 22 | while q: 23 | v = q.pop() 24 | if v not in seen: 25 | seen.add(v) 26 | q.extend(self.ncnn_graph.get_node_outbounds(v)) 27 | 28 | while stack and v not in self.ncnn_graph.get_node_outbounds( 29 | stack[-1]): 30 | order.append(stack.pop()) 31 | stack.append(v) 32 | 33 | return stack + order[::-1] 34 | 35 | def emit_param(self, file_name, seq): 36 | ncnn_param_file = open(file_name, 'w+') 37 | 38 | ncnn_param_file.write('%d\n' % self.MAGGGGGIC) 39 | 40 | param_contect = '' 41 | blob_count = 0 42 | 43 | for layer_name in seq: 44 | layer_type = self.ncnn_graph.get_node_attr(layer_name)['type'] 45 | input_count = len(self.ncnn_graph.get_node_inbounds(layer_name)) 46 | 47 | output_count = len(self.ncnn_graph.get_node_outbounds(layer_name)) 48 | output_count = 1 if output_count == 0 else output_count 49 | 50 | input_blobs = [] 51 | inbound_nodes = self.ncnn_graph.get_node_inbounds(layer_name) 52 | for in_node in inbound_nodes: 53 | if len(self.ncnn_graph.get_node_outbounds(in_node)) > 1: 54 | input_blobs.append( 55 | '%s_blob_idx_%d' % 56 | (in_node, self.ncnn_graph.get_node_outbounds(in_node).index(layer_name))) 57 | else: 58 | input_blobs.append('%s_blob' % in_node) 59 | 60 | output_blobs = [] 61 | if output_count > 1: 62 | for i in range(output_count): 63 | output_blobs.append('%s_blob_idx_%d' % (layer_name, i)) 64 | else: 65 | output_blobs.append('%s_blob' % layer_name) 66 | 67 | blob_count += len(output_blobs) 68 | 69 | param_contect += ( 70 | ('%s' + ( 71 | max(1, 25 - len(layer_type))) * ' ' + '%s' + ( 72 | max(1, 40 - len(layer_name))) * ' ' + '%d %d %s %s %s\n') % (layer_type, 73 | layer_name, 74 | input_count, 75 | output_count, 76 | ' '.join(input_blobs), 77 | ' '.join(output_blobs), 78 | self.ncnn_graph.get_node_attr(layer_name)['param'])) 79 | 80 | layer_count = len(self.ncnn_graph.get_graph()) 81 | ncnn_param_file.write('%d %d\n' % (layer_count, blob_count)) 82 | ncnn_param_file.write(param_contect) 83 | 84 | ncnn_param_file.close() 85 | 86 | def emit_binary(self, file_name, seq): 87 | f = open(file_name, 'w+b') 88 | for layer_name in seq: 89 | for weight in self.ncnn_graph.get_node_attr(layer_name)['binary']: 90 | f.write(np.asarray(weight, dtype=np.float32).tobytes()) 91 | f.close() 92 | -------------------------------------------------------------------------------- /keras2ncnn/ncnn_param.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import sys 3 | import copy 4 | 5 | 6 | class NcnnParamDispatcher: 7 | operation_param_table = { 8 | 'BatchNorm': { 9 | 0: {'channels': 0}, 10 | 1: {'eps': 0}, 11 | }, 12 | 13 | 'BinaryOp': { 14 | 0: {'op_type': 0}, 15 | 1: {'with_scalar': 0}, 16 | 2: {'b': 0.}, 17 | }, 18 | 19 | 'Clip': { 20 | 0: {'min': -sys.float_info.max}, 21 | 1: {'max': sys.float_info.max}, 22 | }, 23 | 24 | 'Concat': { 25 | 0: {'axis': 0}, 26 | }, 27 | 28 | 'Convolution': { 29 | 0: {'num_output': 0}, 30 | 1: {'kernel_w': 0}, 31 | 2: {'dilation_w': 1}, 32 | 3: {'stride_w': 0}, 33 | 4: {'pad_left': 0}, 34 | 5: {'bias_term': 0}, 35 | 6: {'weight_data_size': 0}, 36 | 37 | 9: {'activation_type': 0}, 38 | # 10: {'activation_params': 0}, 39 | 40 | 11: {'kernel_h': 0}, 41 | 12: {'dilation_h': 1}, 42 | 13: {'stride_h': 1}, 43 | }, 44 | 45 | 'Crop': { 46 | 0: {'woffset': 0}, 47 | 1: {'hoffset': 0}, 48 | 2: {'coffset': 0}, 49 | 3: {'outw': 0}, 50 | 4: {'outh': 0}, 51 | 5: {'outc': 0}, 52 | 6: {'woffset2': 0}, 53 | 7: {'hoffset2': 0}, 54 | 8: {'coffset2': 0}, 55 | }, 56 | 57 | 'Deconvolution': { 58 | 0: {'num_output': 0}, 59 | 1: {'kernel_w': 0}, 60 | 2: {'dilation_w': 1}, 61 | 3: {'stride_w': 0}, 62 | 4: {'pad_left': 0}, 63 | 5: {'bias_term': 0}, 64 | 6: {'weight_data_size': 0}, 65 | 66 | 9: {'activation_type': 0}, 67 | # 10: {'activation_params': 0}, 68 | 69 | 11: {'kernel_h': 0}, 70 | 12: {'dilation_h': 1}, 71 | 13: {'stride_h': 1}, 72 | 14: {'pad_top': 0}, 73 | 15: {'pad_right': 0}, 74 | 16: {'pad_bottom': 0}, 75 | 76 | 18: {'output_pad_right': 0}, 77 | 19: {'output_pad_bottom': 0}, 78 | 79 | # 20: {'output_w': 0}, 80 | # 21: {'output_h': 0}, 81 | }, 82 | 83 | 'ConvolutionDepthWise': { 84 | 0: {'num_output': 0}, 85 | 1: {'kernel_w': 0}, 86 | 2: {'dilation_w': 1}, 87 | 3: {'stride_w': 0}, 88 | 4: {'pad_left': 0}, 89 | 5: {'bias_term': 0}, 90 | 6: {'weight_data_size': 0}, 91 | 7: {'group': 1}, 92 | 93 | 11: {'kernel_h': 0}, 94 | 12: {'dilation_h': 1}, 95 | 13: {'stride_h': 1}, 96 | }, 97 | 98 | 'Eltwise': { 99 | 0: {'op_type': 0}, 100 | # 1: {'coeffs': []}, 101 | }, 102 | 103 | 'InnerProduct': { 104 | 0: {'num_output': 0}, 105 | 1: {'bias_term': 0}, 106 | 2: {'weight_data_size': 0}, 107 | 108 | 9: {'activation_type': 0}, 109 | }, 110 | 111 | 'Input': { 112 | 0: {'w': 0}, 113 | 1: {'h': 0}, 114 | 2: {'c': 0}, 115 | }, 116 | 117 | 'Interp': { 118 | 0: {'resize_type': 0}, 119 | 1: {'height_scale': 1.0}, 120 | 2: {'width_scale': 1.0}, 121 | 3: {'output_height': 0}, 122 | 4: {'output_width': 0}, 123 | }, 124 | 125 | 'TanH': { 126 | }, 127 | 128 | 'Padding': { 129 | 0: {'top': 0}, 130 | 1: {'bottom': 0}, 131 | 2: {'left': 0}, 132 | 3: {'right': 0}, 133 | # 4: {'type': 0}, 134 | # 5: {'value': 0} 135 | }, 136 | 137 | 'Pooling': { 138 | 0: {'pooling_type': 0}, 139 | 1: {'kernel_w': 0}, 140 | 11: {'kernel_h': 0}, 141 | 2: {'stride_w': 1}, 142 | 12: {'stride_h': 1}, 143 | 3: {'pad_left': 0}, 144 | 4: {'global_pooling': 0}, 145 | 5: {'pad_mode': 0}, 146 | }, 147 | 148 | 'ReLU': { 149 | 0: {'slope': 0}, 150 | 1: {'stride': 0}, 151 | }, 152 | 153 | 'HardSwish': { 154 | 0: {'alpha': 0.1666667}, 155 | 1: {'beta': 0.5} 156 | }, 157 | 158 | 'HardSigmoid': { 159 | 0: {'alpha': 0.181818}, # Pytorch take +/- 3, Keras take +/- 2.5 160 | 1: {'beta': 0.454545} 161 | }, 162 | 163 | 'Reshape': { 164 | 0: {'w': -233}, 165 | 1: {'h': -233}, 166 | 2: {'c': -233}, 167 | 3: {'flag': 1} 168 | }, 169 | 170 | 'Permute': { 171 | 0: {'order_type': 0} 172 | }, 173 | 174 | 'Sigmoid': { 175 | }, 176 | 177 | 'Softmax': { 178 | 0: {'axis': 0}, 179 | }, 180 | 181 | 'Split': { 182 | }, 183 | 184 | 'MemoryData': { 185 | 0: {'w': 0}, 186 | 1: {'h': 0}, 187 | 2: {'c': 0} 188 | } 189 | 190 | } 191 | 192 | def dump_args(self, operator, **kwargs): 193 | params = copy.deepcopy(self.operation_param_table[operator]) 194 | ncnn_args_phrase = '' 195 | for arg in params.keys(): 196 | arg_name = list(params[arg].keys())[0] 197 | if arg_name in kwargs: 198 | params[arg][arg_name] = kwargs[arg_name] 199 | 200 | params_arg = params[arg][arg_name] 201 | 202 | if isinstance(params_arg, str): 203 | ncnn_args_phrase = ncnn_args_phrase + \ 204 | '%d=%s ' % (arg, params_arg) 205 | 206 | elif isinstance(params_arg, int): 207 | ncnn_args_phrase = ncnn_args_phrase + \ 208 | '%d=%d ' % (arg, params_arg) 209 | 210 | elif isinstance(params_arg, float): 211 | ncnn_args_phrase = ncnn_args_phrase + \ 212 | '%d=%e ' % (arg, params_arg) 213 | 214 | elif isinstance(params_arg, (list, tuple)): 215 | ncnn_args_phrase = ncnn_args_phrase + \ 216 | '%d=%d,%s ' % (-23300 - arg, len(params_arg), 217 | ','.join(list(map(str, params_arg)))) 218 | else: 219 | print(arg_name, params_arg, type(params_arg)) 220 | print('[ERROR] Failed to dump arg %s with type %s.' % 221 | (arg_name, type(params_arg))) 222 | frameinfo = inspect.getframeinfo(inspect.currentframe()) 223 | calframe = inspect.getouterframes(inspect.currentframe(), 2) 224 | print( 225 | 'Failed to convert at %s:%d %s() called from %s()' % 226 | (frameinfo.filename, frameinfo.lineno, frameinfo.function, calframe[1][3])) 227 | sys.exit(-1) 228 | return ncnn_args_phrase 229 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | setuptools.setup( 4 | name="keras2ncnn", 5 | version="0.2.0", 6 | author="Han Xiao", 7 | author_email="hansh-sz@hotmail.com", 8 | description="A keras h5df to ncnn converter", 9 | url="https://github.com/MarsTechHAN/keras2ncnn", 10 | packages=setuptools.find_packages(), 11 | classifiers=[ 12 | "Programming Language :: Python :: 3", 13 | "License :: OSI Approved :: MIT License", 14 | "Operating System :: OS Independent", 15 | ], 16 | python_requires='>=3.5', 17 | install_requires=[ 18 | 'h5py>=2.10.0' 19 | ] 20 | ) 21 | --------------------------------------------------------------------------------