├── .gitignore ├── .travis.yml ├── AUTHORS.rst ├── LICENSE ├── README.rst ├── docs ├── Makefile ├── make.bat └── source │ ├── autoencoder.rst │ ├── conf.py │ ├── ensemble_learning.rst │ ├── feature_extraction.rst │ ├── index.rst │ ├── statistical_classification.rst │ └── transfer_learning.rst ├── keras_resnet ├── __init__.py ├── backend │ ├── __init__.py │ ├── cntk_backend.py │ ├── common.py │ ├── tensorflow_backend.py │ └── theano_backend.py ├── benchmarks │ └── __init__.py ├── blocks │ ├── _1d.py │ ├── _2d.py │ ├── _3d.py │ ├── __init__.py │ └── _time_distributed_2d.py ├── classifiers │ ├── _2d.py │ └── __init__.py ├── data │ ├── checkpoints │ │ └── CIFAR-10 │ │ │ └── ResNet-18.hdf5 │ └── logs │ │ └── CIFAR-10 │ │ ├── ResNet-152.csv │ │ ├── ResNet-18.csv │ │ └── ResNet-50.csv ├── layers │ ├── __init__.py │ └── _batch_normalization.py ├── metrics.py └── models │ ├── _1d.py │ ├── _2d.py │ ├── _3d.py │ ├── __init__.py │ ├── _feature_pyramid_2d.py │ └── _time_distributed_2d.py ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── conftest.py ├── test_block.py └── test_models.py └── tools ├── export-caffe-weights.py └── import-caffe-weights.py /.gitignore: -------------------------------------------------------------------------------- 1 | *$py.class 2 | *,cover 3 | *.egg 4 | *.egg-info/ 5 | *.log 6 | *.manifest 7 | *.mo 8 | *.pot 9 | *.py[cod] 10 | *.so 11 | *.spec 12 | .cache 13 | .coverage 14 | .coverage.* 15 | .eggs/ 16 | .env 17 | .hypothesis/ 18 | .idea/ 19 | .installed.cfg 20 | .ipynb_checkpoints 21 | .Python 22 | .python-version 23 | .ropeproject 24 | .scrapy 25 | .spyderproject 26 | .tox/ 27 | .webassets-cache 28 | __pycache__/ 29 | build/ 30 | celerybeat-schedule 31 | coverage.xml 32 | develop-eggs/ 33 | dist/ 34 | docs/_build/ 35 | downloads/ 36 | eggs/ 37 | env/ 38 | ENV/ 39 | htmlcov/ 40 | instance/ 41 | lib/ 42 | lib64/ 43 | local_settings.py 44 | nosetests.xml 45 | parts/ 46 | pip-delete-this-directory.txt 47 | pip-log.txt 48 | sdist/ 49 | target/ 50 | var/ 51 | venv/ 52 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | before_install: pip install tensorflow 2 | cache: pip 3 | deploy: 4 | on: 5 | distributions: sdist bdist_wheel 6 | repo: broadinstitute/keras-resnet 7 | tags: true 8 | password: 9 | secure: DobqdpjbXARYUE+CKj3BeQichq3iMIPzl7q9tpjdsTYlSucFfUokTaWn2iH1X6qlUGafYbou/5TPPmR8NV1Qe4ZbiMj+545WrxvoCubg1Nju+zY+f4zgBd/VUD4wWX5P1lLKWwLbYGtfgf/3z69jewFu/LmFmJphnWcAiXdpSLT0rvNvVSvaLhx8FoczbIRR2TFOCttU6k/qyi50It2m3m6sLhDoCpFk7Sx6fO1G9CDyFE34qDPXu3uilgki+y5fes/j0kVK4XKgXuCw9oa7j/qA1Kt+GJ40YJMRz0wIixI1Di4Ti+cNBe0XApXYi3BqwQAakWjWY23U79zEKa0YPHcWo1Q3czSH3Q9Jndyc6xbZkIbW+4leSejzxo/h0zkokO7rprs8kswnlilK16gv5oou4BUQPWb0shz+f+gLIEGuwv9PSpM+yDB36GenexnIrvH0hE2Slm29BIa+3bM2Pm925hzyS2qJ/26I2z+aeyam9TtBYLOre58r5b0jMpFozPwnTu3GDIgaSSJ0Lzp5PmOLJWPuwEjfEoAV4Q/sz9PSlRgnaYmdIo/9oDECOAzxo8kGgDX4/EO8ZxC8lOscWNhp7GrVGUAHrxHD+UKFCS8vCYOTHWBL9OreqsaYemNJKxjeJh31PzaeAjY9Cu6XIKv3AxFwmBktJHr9pVqpvsg= 10 | provider: pypi 11 | user: '0x00b1' 12 | dist: trusty 13 | env: 14 | - KERAS_BACKEND=tensorflow 15 | install: pip install --editable . 16 | language: python 17 | script: pytest 18 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | keras-resnet was written by Allen Goodman. It’s maintained by Allen Goodman and various contributors: 2 | 3 | Claire McQuin @mcquin 4 | 5 | Hans Gaiser @hgaiser 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The BSD 3-Clause License 2 | 3 | Copyright © 2017 Broad Institute, Inc. All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the Broad Institute, Inc. nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED “AS IS.” BROAD MAKES NO EXPRESS OR IMPLIED 20 | REPRESENTATIONS OR WARRANTIES OF ANY KIND REGARDING THE SOFTWARE AND 21 | COPYRIGHT, INCLUDING, BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY, 22 | FITNESS FOR A PARTICULAR PURPOSE, CONFORMITY WITH ANY DOCUMENTATION, 23 | NON-INFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT 24 | DISCOVERABLE. IN NO EVENT SHALL BROAD, THE COPYRIGHT HOLDERS, OR CONTRIBUTORS 25 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 26 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO PROCUREMENT OF SUBSTITUTE 27 | GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 28 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 29 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 30 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF, HAVE REASON TO KNOW, OR IN 31 | FACT SHALL KNOW OF THE POSSIBILITY OF SUCH DAMAGE. 32 | 33 | If, by operation of law or otherwise, any of the aforementioned warranty 34 | disclaimers are determined inapplicable, your sole remedy, regardless of the 35 | form of action, including, but not limited to, negligence and strict 36 | liability, shall be replacement of the software with an updated version if one 37 | exists. 38 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Keras-ResNet 2 | ============ 3 | 4 | .. image:: https://travis-ci.org/broadinstitute/keras-resnet.svg?branch=master 5 | :target: https://travis-ci.org/broadinstitute/keras-resnet 6 | 7 | 8 | 9 | Keras-ResNet is **the** Keras package for deep residual networks. It's fast *and* flexible. 10 | 11 | A tantalizing preview of Keras-ResNet simplicity: 12 | 13 | .. code-block:: python 14 | 15 | >>> import keras 16 | 17 | >>> import keras_resnet.models 18 | 19 | >>> shape, classes = (32, 32, 3), 10 20 | 21 | >>> x = keras.layers.Input(shape) 22 | 23 | >>> model = keras_resnet.models.ResNet50(x, classes=classes) 24 | 25 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 26 | 27 | >>> (training_x, training_y), (_, _) = keras.datasets.cifar10.load_data() 28 | 29 | >>> training_y = keras.utils.np_utils.to_categorical(training_y) 30 | 31 | >>> model.fit(training_x, training_y) 32 | 33 | Installation 34 | ------------ 35 | 36 | Installation couldn’t be easier: 37 | 38 | .. code-block:: bash 39 | 40 | $ pip install keras-resnet 41 | 42 | Contributing 43 | ------------ 44 | 45 | #. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet. 46 | #. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it). 47 | #. Write a test which shows that the bug was fixed or that the feature works as expected. 48 | #. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_. 49 | 50 | .. _`the repository`: http://github.com/0x00b1/keras-resnet 51 | .. _AUTHORS: https://github.com/0x00b1/keras-resnet/blob/master/AUTHORS.rst 52 | .. _Contributor Friendly: https://github.com/0x00b1/keras-resnet/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open 53 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = keras-resnet 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=keras-resnet 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/source/autoencoder.rst: -------------------------------------------------------------------------------- 1 | Autoencoder 2 | ----------- 3 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # keras-resnet documentation build configuration file, created by 5 | # sphinx-quickstart on Mon Apr 24 14:22:30 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | # import os 21 | # import sys 22 | # sys.path.insert(0, os.path.abspath('.')) 23 | 24 | import pkg_resources 25 | import sphinx_rtd_theme 26 | 27 | # -- General configuration ------------------------------------------------ 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | # 31 | # needs_sphinx = '1.0' 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 35 | # ones. 36 | extensions = ['sphinx.ext.autodoc', 37 | 'sphinx.ext.doctest', 38 | 'sphinx.ext.coverage', 39 | 'sphinx.ext.mathjax', 40 | 'sphinx.ext.viewcode', 41 | 'sphinx.ext.githubpages'] 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | templates_path = ['_templates'] 45 | 46 | # The suffix(es) of source filenames. 47 | # You can specify multiple suffix as a list of string: 48 | # 49 | # source_suffix = ['.rst', '.md'] 50 | source_suffix = '.rst' 51 | 52 | # The master toctree document. 53 | master_doc = 'index' 54 | 55 | # General information about the project. 56 | project = 'keras-resnet' 57 | copyright = '2017, Allen Goodman' 58 | author = 'Allen Goodman' 59 | 60 | # The version info for the project you're documenting, acts as replacement for 61 | # |version| and |release|, also used in various other places throughout the 62 | # built documents. 63 | # 64 | # The short X.Y version. 65 | version = pkg_resources.get_distribution("keras-resnet").version 66 | # The full version, including alpha/beta/rc tags. 67 | release = pkg_resources.get_distribution("keras-resnet").version 68 | 69 | # The language for content autogenerated by Sphinx. Refer to documentation 70 | # for a list of supported languages. 71 | # 72 | # This is also used if you do content translation via gettext catalogs. 73 | # Usually you set "language" from the command line for these cases. 74 | language = None 75 | 76 | # List of patterns, relative to source directory, that match files and 77 | # directories to ignore when looking for source files. 78 | # This patterns also effect to html_static_path and html_extra_path 79 | exclude_patterns = [] 80 | 81 | # The name of the Pygments (syntax highlighting) style to use. 82 | pygments_style = 'sphinx' 83 | 84 | # If true, `todo` and `todoList` produce output, else they produce nothing. 85 | todo_include_todos = False 86 | 87 | # -- Options for HTML output ---------------------------------------------- 88 | 89 | # The theme to use for HTML and HTML Help pages. See the documentation for 90 | # a list of builtin themes. 91 | # 92 | html_theme = 'sphinx_rtd_theme' 93 | 94 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 95 | 96 | # Theme options are theme-specific and customize the look and feel of a theme 97 | # further. For a list of options available for each theme, see the 98 | # documentation. 99 | 100 | html_theme_options = { 101 | 'navigation_depth': 4 102 | } 103 | 104 | # Add any paths that contain custom static files (such as style sheets) here, 105 | # relative to this directory. They are copied after the builtin static files, 106 | # so a file named "default.css" will overwrite the builtin "default.css". 107 | html_static_path = ['_static'] 108 | 109 | # -- Options for HTMLHelp output ------------------------------------------ 110 | 111 | # Output file base name for HTML help builder. 112 | htmlhelp_basename = 'keras-resnetdoc' 113 | 114 | # -- Options for LaTeX output --------------------------------------------- 115 | 116 | latex_elements = { 117 | # The paper size ('letterpaper' or 'a4paper'). 118 | # 119 | # 'papersize': 'letterpaper', 120 | 121 | # The font size ('10pt', '11pt' or '12pt'). 122 | # 123 | # 'pointsize': '10pt', 124 | 125 | # Additional stuff for the LaTeX preamble. 126 | # 127 | # 'preamble': '', 128 | 129 | # Latex figure (float) alignment 130 | # 131 | # 'figure_align': 'htbp', 132 | } 133 | 134 | # Grouping the document tree into LaTeX files. List of tuples 135 | # (source start file, target name, title, 136 | # author, documentclass [howto, manual, or own class]). 137 | latex_documents = [ 138 | (master_doc, 'keras-resnet.tex', 'keras-resnet Documentation', 139 | 'Allen Goodman', 'manual'), 140 | ] 141 | 142 | # -- Options for manual page output --------------------------------------- 143 | 144 | # One entry per manual page. List of tuples 145 | # (source start file, name, description, authors, manual section). 146 | man_pages = [ 147 | (master_doc, 'keras-resnet', 'keras-resnet Documentation', 148 | [author], 1) 149 | ] 150 | 151 | # -- Options for Texinfo output ------------------------------------------- 152 | 153 | # Grouping the document tree into Texinfo files. List of tuples 154 | # (source start file, target name, title, author, 155 | # dir menu entry, description, category) 156 | texinfo_documents = [ 157 | (master_doc, 'keras-resnet', 'keras-resnet Documentation', 158 | author, 'keras-resnet', 'One line description of project.', 159 | 'Miscellaneous'), 160 | ] 161 | 162 | # -- Options for Epub output ---------------------------------------------- 163 | 164 | # Bibliographic Dublin Core info. 165 | epub_title = project 166 | epub_author = author 167 | epub_publisher = author 168 | epub_copyright = copyright 169 | 170 | # The unique identifier of the text. This can be a ISBN number 171 | # or the project homepage. 172 | # 173 | # epub_identifier = '' 174 | 175 | # A unique identification for the text. 176 | # 177 | # epub_uid = '' 178 | 179 | # A list of files that should not be packed into the epub file. 180 | epub_exclude_files = ['search.html'] 181 | -------------------------------------------------------------------------------- /docs/source/ensemble_learning.rst: -------------------------------------------------------------------------------- 1 | Ensemble learning 2 | ----------------- 3 | -------------------------------------------------------------------------------- /docs/source/feature_extraction.rst: -------------------------------------------------------------------------------- 1 | Feature extraction 2 | ------------------ 3 | 4 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. keras-resnet documentation master file, created by 2 | sphinx-quickstart on Mon Apr 24 14:22:30 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | .. toctree:: 7 | :maxdepth: 2 8 | 9 | statistical_classification 10 | ensemble_learning 11 | feature_extraction 12 | transfer_learning 13 | autoencoder 14 | 15 | .. module:: keras_resnet 16 | 17 | Blocks 18 | ------ 19 | 20 | .. autofunction:: keras_resnet.blocks.basic_2d 21 | 22 | .. autofunction:: keras_resnet.blocks.bottleneck_2d 23 | 24 | Models 25 | ------ 26 | 27 | .. autoclass:: keras_resnet.models.ResNet 28 | 29 | .. autoclass:: keras_resnet.models.ResNet18 30 | 31 | .. autoclass:: keras_resnet.models.ResNet34 32 | 33 | .. autoclass:: keras_resnet.models.ResNet50 34 | 35 | .. autoclass:: keras_resnet.models.ResNet101 36 | 37 | .. autoclass:: keras_resnet.models.ResNet152 38 | 39 | .. autoclass:: keras_resnet.models.ResNet200 40 | -------------------------------------------------------------------------------- /docs/source/statistical_classification.rst: -------------------------------------------------------------------------------- 1 | Statistical classification 2 | -------------------------- 3 | 4 | Audio 5 | ~~~~~ 6 | 7 | Image 8 | ~~~~~ 9 | 10 | Video 11 | ~~~~~ 12 | -------------------------------------------------------------------------------- /docs/source/transfer_learning.rst: -------------------------------------------------------------------------------- 1 | Transfer learning 2 | ----------------- 3 | 4 | -------------------------------------------------------------------------------- /keras_resnet/__init__.py: -------------------------------------------------------------------------------- 1 | from . import layers 2 | 3 | custom_objects = { 4 | 'BatchNormalization': layers.BatchNormalization, 5 | } 6 | -------------------------------------------------------------------------------- /keras_resnet/backend/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from .common import * 4 | 5 | _BACKEND = "tensorflow" 6 | 7 | if "KERAS_BACKEND" in os.environ: 8 | _backend = os.environ["KERAS_BACKEND"] 9 | 10 | backends = { 11 | "cntk", 12 | "tensorflow", 13 | } 14 | 15 | assert _backend in backends 16 | 17 | _BACKEND = _backend 18 | 19 | if _BACKEND == "cntk": 20 | from .cntk_backend import * 21 | elif _BACKEND == "tensorflow": 22 | from .tensorflow_backend import * 23 | else: 24 | raise ValueError("Unknown backend: " + str(_BACKEND)) 25 | -------------------------------------------------------------------------------- /keras_resnet/backend/cntk_backend.py: -------------------------------------------------------------------------------- 1 | def resize(image, output_shape): 2 | raise NotImplementedError 3 | -------------------------------------------------------------------------------- /keras_resnet/backend/common.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/broadinstitute/keras-resnet/898a1ee417e940ff12bf73ad44c2aae88501771f/keras_resnet/backend/common.py -------------------------------------------------------------------------------- /keras_resnet/backend/tensorflow_backend.py: -------------------------------------------------------------------------------- 1 | import tensorflow 2 | 3 | 4 | def resize(image, output_shape): 5 | """ 6 | Resize an image or images to match a certain size. 7 | 8 | :param image: Input image or images with the shape: 9 | 10 | (rows, columns, channels) 11 | 12 | or: 13 | 14 | (batch, rows, columns, channels). 15 | 16 | :param output_shape: Shape of the output image: 17 | 18 | (rows, columns). 19 | 20 | :return: If an image is provided a resized image with the shape: 21 | 22 | (resized rows, resized columns, channels) 23 | 24 | is returned. 25 | 26 | If more than one image is provided then a batch of resized images with 27 | the shape: 28 | 29 | (batch size, resized rows, resized columns, channels) 30 | 31 | are returned. 32 | """ 33 | return tensorflow.image.resize_images(image, output_shape) 34 | -------------------------------------------------------------------------------- /keras_resnet/backend/theano_backend.py: -------------------------------------------------------------------------------- 1 | def resize(image, output_shape): 2 | raise NotImplementedError 3 | -------------------------------------------------------------------------------- /keras_resnet/benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | 3 | import click 4 | import keras 5 | import keras.preprocessing.image 6 | import numpy 7 | import pkg_resources 8 | import sklearn.model_selection 9 | import tensorflow 10 | 11 | import keras_resnet.metrics 12 | import keras_resnet.models 13 | 14 | _benchmarks = { 15 | "CIFAR-10": keras.datasets.cifar10, 16 | "CIFAR-100": keras.datasets.cifar100, 17 | "MNIST": keras.datasets.mnist 18 | } 19 | 20 | 21 | _names = { 22 | "ResNet-18": keras_resnet.models.ResNet2D18, 23 | "ResNet-34": keras_resnet.models.ResNet2D34, 24 | "ResNet-50": keras_resnet.models.ResNet2D50, 25 | "ResNet-101": keras_resnet.models.ResNet2D101, 26 | "ResNet-152": keras_resnet.models.ResNet2D152, 27 | "ResNet-200": keras_resnet.models.ResNet2D200 28 | } 29 | 30 | 31 | @click.command() 32 | @click.option( 33 | "--benchmark", 34 | default="CIFAR-10", 35 | type=click.Choice( 36 | [ 37 | "CIFAR-10", 38 | "CIFAR-100", 39 | "ImageNet", 40 | "MNIST" 41 | ] 42 | ) 43 | ) 44 | @click.option("--device", default=0) 45 | @click.option( 46 | "--name", 47 | default="ResNet-50", 48 | type=click.Choice( 49 | [ 50 | "ResNet-18", 51 | "ResNet-34", 52 | "ResNet-50", 53 | "ResNet-101", 54 | "ResNet-152", 55 | "ResNet-200" 56 | ] 57 | ) 58 | ) 59 | def __main__(benchmark, device, name): 60 | configuration = tensorflow.ConfigProto() 61 | 62 | configuration.gpu_options.allow_growth = True 63 | 64 | configuration.gpu_options.visible_device_list = str(device) 65 | 66 | session = tensorflow.Session(config=configuration) 67 | 68 | keras.backend.set_session(session) 69 | 70 | (training_x, training_y), _ = _benchmarks[benchmark].load_data() 71 | 72 | training_x = training_x.astype(numpy.float16) 73 | 74 | if benchmark is "MNIST": 75 | training_x = numpy.expand_dims(training_x, -1) 76 | 77 | training_y = keras.utils.np_utils.to_categorical(training_y) 78 | 79 | training_x, validation_x, training_y, validation_y = sklearn.model_selection.train_test_split( 80 | training_x, 81 | training_y 82 | ) 83 | 84 | generator = keras.preprocessing.image.ImageDataGenerator( 85 | horizontal_flip=True 86 | ) 87 | 88 | generator.fit(training_x) 89 | 90 | generator = generator.flow( 91 | x=training_x, 92 | y=training_y, 93 | batch_size=256 94 | ) 95 | 96 | validation_data = keras.preprocessing.image.ImageDataGenerator() 97 | 98 | validation_data.fit(validation_x) 99 | 100 | validation_data = validation_data.flow( 101 | x=validation_x, 102 | y=validation_y, 103 | batch_size=256 104 | ) 105 | 106 | shape, classes = training_x.shape[1:], training_y.shape[-1] 107 | 108 | x = keras.layers.Input(shape) 109 | 110 | model = _names[name](inputs=x, classes=classes) 111 | 112 | metrics = [ 113 | keras_resnet.metrics.top_1_categorical_error, 114 | keras_resnet.metrics.top_5_categorical_error 115 | ] 116 | 117 | model.compile("adam", "categorical_crossentropy", metrics) 118 | 119 | pathname = os.path.join("data", "checkpoints", benchmark, "{}.hdf5".format(name)) 120 | 121 | pathname = pkg_resources.resource_filename("keras_resnet", pathname) 122 | 123 | model_checkpoint = keras.callbacks.ModelCheckpoint(pathname) 124 | 125 | pathname = os.path.join("data", "logs", benchmark, "{}.csv".format(name)) 126 | 127 | pathname = pkg_resources.resource_filename("keras_resnet", pathname) 128 | 129 | csv_logger = keras.callbacks.CSVLogger(pathname) 130 | 131 | callbacks = [ 132 | csv_logger, 133 | model_checkpoint 134 | ] 135 | 136 | model.fit_generator( 137 | callbacks=callbacks, 138 | epochs=100, 139 | generator=generator, 140 | validation_data=validation_data 141 | ) 142 | 143 | 144 | if __name__ == "__main__": 145 | __main__() 146 | -------------------------------------------------------------------------------- /keras_resnet/blocks/_1d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.blocks._1d 5 | ~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements a number of popular one-dimensional residual blocks. 8 | """ 9 | 10 | import keras.layers 11 | import keras.regularizers 12 | 13 | import keras_resnet.layers 14 | 15 | parameters = { 16 | "kernel_initializer": "he_normal" 17 | } 18 | 19 | 20 | def basic_1d( 21 | filters, 22 | stage=0, 23 | block=0, 24 | kernel_size=3, 25 | numerical_name=False, 26 | stride=None, 27 | freeze_bn=False 28 | ): 29 | """ 30 | A one-dimensional basic block. 31 | 32 | :param filters: the output’s feature space 33 | 34 | :param stage: int representing the stage of this block (starting from 0) 35 | 36 | :param block: int representing this block (starting from 0) 37 | 38 | :param kernel_size: size of the kernel 39 | 40 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 41 | 42 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 43 | 44 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 45 | 46 | Usage: 47 | 48 | >>> import keras_resnet.blocks 49 | 50 | >>> keras_resnet.blocks.basic_1d(64) 51 | """ 52 | if stride is None: 53 | if block != 0 or stage == 0: 54 | stride = 1 55 | else: 56 | stride = 2 57 | 58 | if keras.backend.image_data_format() == "channels_last": 59 | axis = -1 60 | else: 61 | axis = 1 62 | 63 | if block > 0 and numerical_name: 64 | block_char = "b{}".format(block) 65 | else: 66 | block_char = chr(ord('a') + block) 67 | 68 | stage_char = str(stage + 2) 69 | 70 | def f(x): 71 | y = keras.layers.ZeroPadding1D( 72 | padding=1, 73 | name="padding{}{}_branch2a".format(stage_char, block_char) 74 | )(x) 75 | 76 | y = keras.layers.Conv1D( 77 | filters, 78 | kernel_size, 79 | strides=stride, 80 | use_bias=False, 81 | name="res{}{}_branch2a".format(stage_char, block_char), 82 | **parameters 83 | )(y) 84 | 85 | y = keras_resnet.layers.BatchNormalization( 86 | axis=axis, 87 | epsilon=1e-5, 88 | freeze=freeze_bn, 89 | name="bn{}{}_branch2a".format(stage_char, block_char) 90 | )(y) 91 | 92 | y = keras.layers.Activation( 93 | "relu", 94 | name="res{}{}_branch2a_relu".format(stage_char, block_char) 95 | )(y) 96 | 97 | y = keras.layers.ZeroPadding1D( 98 | padding=1, 99 | name="padding{}{}_branch2b".format(stage_char, block_char) 100 | )(y) 101 | 102 | y = keras.layers.Conv1D( 103 | filters, 104 | kernel_size, 105 | use_bias=False, 106 | name="res{}{}_branch2b".format(stage_char, block_char), 107 | **parameters 108 | )(y) 109 | 110 | y = keras_resnet.layers.BatchNormalization( 111 | axis=axis, 112 | epsilon=1e-5, 113 | freeze=freeze_bn, 114 | name="bn{}{}_branch2b".format(stage_char, block_char) 115 | )(y) 116 | 117 | if block == 0: 118 | shortcut = keras.layers.Conv1D( 119 | filters, 120 | 1, 121 | strides=stride, 122 | use_bias=False, 123 | name="res{}{}_branch1".format(stage_char, block_char), 124 | **parameters 125 | )(x) 126 | 127 | shortcut = keras_resnet.layers.BatchNormalization( 128 | axis=axis, 129 | epsilon=1e-5, 130 | freeze=freeze_bn, 131 | name="bn{}{}_branch1".format(stage_char, block_char) 132 | )(shortcut) 133 | else: 134 | shortcut = x 135 | 136 | y = keras.layers.Add( 137 | name="res{}{}".format(stage_char, block_char) 138 | )([y, shortcut]) 139 | 140 | y = keras.layers.Activation( 141 | "relu", 142 | name="res{}{}_relu".format(stage_char, block_char) 143 | )(y) 144 | 145 | return y 146 | 147 | return f 148 | 149 | 150 | def bottleneck_1d( 151 | filters, 152 | stage=0, 153 | block=0, 154 | kernel_size=3, 155 | numerical_name=False, 156 | stride=None, 157 | freeze_bn=False 158 | ): 159 | """ 160 | A one-dimensional bottleneck block. 161 | 162 | :param filters: the output’s feature space 163 | 164 | :param stage: int representing the stage of this block (starting from 0) 165 | 166 | :param block: int representing this block (starting from 0) 167 | 168 | :param kernel_size: size of the kernel 169 | 170 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 171 | 172 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 173 | 174 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 175 | 176 | Usage: 177 | 178 | >>> import keras_resnet.blocks 179 | 180 | >>> keras_resnet.blocks.bottleneck_1d(64) 181 | """ 182 | if stride is None: 183 | stride = 1 if block != 0 or stage == 0 else 2 184 | 185 | if keras.backend.image_data_format() == "channels_last": 186 | axis = -1 187 | else: 188 | axis = 1 189 | 190 | if block > 0 and numerical_name: 191 | block_char = "b{}".format(block) 192 | else: 193 | block_char = chr(ord('a') + block) 194 | 195 | stage_char = str(stage + 2) 196 | 197 | def f(x): 198 | y = keras.layers.Conv1D( 199 | filters, 200 | 1, 201 | strides=stride, 202 | use_bias=False, 203 | name="res{}{}_branch2a".format(stage_char, block_char), 204 | **parameters 205 | )(x) 206 | 207 | y = keras_resnet.layers.BatchNormalization( 208 | axis=axis, 209 | epsilon=1e-5, 210 | freeze=freeze_bn, 211 | name="bn{}{}_branch2a".format(stage_char, block_char) 212 | )(y) 213 | 214 | y = keras.layers.Activation( 215 | "relu", 216 | name="res{}{}_branch2a_relu".format(stage_char, block_char) 217 | )(y) 218 | 219 | y = keras.layers.ZeroPadding1D( 220 | padding=1, 221 | name="padding{}{}_branch2b".format(stage_char, block_char) 222 | )(y) 223 | 224 | y = keras.layers.Conv1D( 225 | filters, 226 | kernel_size, 227 | use_bias=False, 228 | name="res{}{}_branch2b".format(stage_char, block_char), 229 | **parameters 230 | )(y) 231 | 232 | y = keras_resnet.layers.BatchNormalization( 233 | axis=axis, 234 | epsilon=1e-5, 235 | freeze=freeze_bn, 236 | name="bn{}{}_branch2b".format(stage_char, block_char) 237 | )(y) 238 | 239 | y = keras.layers.Activation( 240 | "relu", 241 | name="res{}{}_branch2b_relu".format(stage_char, block_char) 242 | )(y) 243 | 244 | y = keras.layers.Conv1D( 245 | filters * 4, 246 | 1, 247 | use_bias=False, 248 | name="res{}{}_branch2c".format(stage_char, block_char), 249 | **parameters 250 | )(y) 251 | 252 | y = keras_resnet.layers.BatchNormalization( 253 | axis=axis, 254 | epsilon=1e-5, 255 | freeze=freeze_bn, 256 | name="bn{}{}_branch2c".format(stage_char, block_char) 257 | )(y) 258 | 259 | if block == 0: 260 | shortcut = keras.layers.Conv1D( 261 | filters * 4, 262 | 1, 263 | strides=stride, 264 | use_bias=False, 265 | name="res{}{}_branch1".format(stage_char, block_char), 266 | **parameters 267 | )(x) 268 | 269 | shortcut = keras_resnet.layers.BatchNormalization( 270 | axis=axis, 271 | epsilon=1e-5, 272 | freeze=freeze_bn, 273 | name="bn{}{}_branch1".format(stage_char, block_char) 274 | )(shortcut) 275 | else: 276 | shortcut = x 277 | 278 | y = keras.layers.Add( 279 | name="res{}{}".format(stage_char, block_char) 280 | )([y, shortcut]) 281 | 282 | y = keras.layers.Activation( 283 | "relu", 284 | name="res{}{}_relu".format(stage_char, block_char) 285 | )(y) 286 | 287 | return y 288 | 289 | return f 290 | -------------------------------------------------------------------------------- /keras_resnet/blocks/_2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.blocks._2d 5 | ~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements a number of popular two-dimensional residual blocks. 8 | """ 9 | 10 | import keras.layers 11 | import keras.regularizers 12 | 13 | import keras_resnet.layers 14 | 15 | parameters = { 16 | "kernel_initializer": "he_normal" 17 | } 18 | 19 | 20 | def basic_2d( 21 | filters, 22 | stage=0, 23 | block=0, 24 | kernel_size=3, 25 | numerical_name=False, 26 | stride=None, 27 | freeze_bn=False 28 | ): 29 | """ 30 | A two-dimensional basic block. 31 | 32 | :param filters: the output’s feature space 33 | 34 | :param stage: int representing the stage of this block (starting from 0) 35 | 36 | :param block: int representing this block (starting from 0) 37 | 38 | :param kernel_size: size of the kernel 39 | 40 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 41 | 42 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 43 | 44 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 45 | 46 | Usage: 47 | 48 | >>> import keras_resnet.blocks 49 | 50 | >>> keras_resnet.blocks.basic_2d(64) 51 | """ 52 | if stride is None: 53 | if block != 0 or stage == 0: 54 | stride = 1 55 | else: 56 | stride = 2 57 | 58 | if keras.backend.image_data_format() == "channels_last": 59 | axis = 3 60 | else: 61 | axis = 1 62 | 63 | if block > 0 and numerical_name: 64 | block_char = "b{}".format(block) 65 | else: 66 | block_char = chr(ord('a') + block) 67 | 68 | stage_char = str(stage + 2) 69 | 70 | def f(x): 71 | y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) 72 | 73 | y = keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) 74 | 75 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) 76 | 77 | y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) 78 | 79 | y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) 80 | 81 | y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) 82 | 83 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) 84 | 85 | if block == 0: 86 | shortcut = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) 87 | 88 | shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) 89 | else: 90 | shortcut = x 91 | 92 | y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) 93 | 94 | y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) 95 | 96 | return y 97 | 98 | return f 99 | 100 | 101 | def bottleneck_2d( 102 | filters, 103 | stage=0, 104 | block=0, 105 | kernel_size=3, 106 | numerical_name=False, 107 | stride=None, 108 | freeze_bn=False 109 | ): 110 | """ 111 | A two-dimensional bottleneck block. 112 | 113 | :param filters: the output’s feature space 114 | 115 | :param stage: int representing the stage of this block (starting from 0) 116 | 117 | :param block: int representing this block (starting from 0) 118 | 119 | :param kernel_size: size of the kernel 120 | 121 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 122 | 123 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 124 | 125 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 126 | 127 | Usage: 128 | 129 | >>> import keras_resnet.blocks 130 | 131 | >>> keras_resnet.blocks.bottleneck_2d(64) 132 | """ 133 | if stride is None: 134 | if block != 0 or stage == 0: 135 | stride = 1 136 | else: 137 | stride = 2 138 | 139 | if keras.backend.image_data_format() == "channels_last": 140 | axis = 3 141 | else: 142 | axis = 1 143 | 144 | if block > 0 and numerical_name: 145 | block_char = "b{}".format(block) 146 | else: 147 | block_char = chr(ord('a') + block) 148 | 149 | stage_char = str(stage + 2) 150 | 151 | def f(x): 152 | y = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) 153 | 154 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) 155 | 156 | y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) 157 | 158 | y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) 159 | 160 | y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) 161 | 162 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) 163 | 164 | y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) 165 | 166 | y = keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) 167 | 168 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) 169 | 170 | if block == 0: 171 | shortcut = keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) 172 | 173 | shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) 174 | else: 175 | shortcut = x 176 | 177 | y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) 178 | 179 | y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) 180 | 181 | return y 182 | 183 | return f 184 | -------------------------------------------------------------------------------- /keras_resnet/blocks/_3d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.blocks._3d 5 | ~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements a number of popular three-dimensional residual blocks. 8 | """ 9 | 10 | import keras.layers 11 | import keras.regularizers 12 | 13 | import keras_resnet.layers 14 | 15 | parameters = { 16 | "kernel_initializer": "he_normal" 17 | } 18 | 19 | 20 | def basic_3d( 21 | filters, 22 | stage=0, 23 | block=0, 24 | kernel_size=3, 25 | numerical_name=False, 26 | stride=None, 27 | freeze_bn=False 28 | ): 29 | """ 30 | A three-dimensional basic block. 31 | 32 | :param filters: the output’s feature space 33 | 34 | :param stage: int representing the stage of this block (starting from 0) 35 | 36 | :param block: int representing this block (starting from 0) 37 | 38 | :param kernel_size: size of the kernel 39 | 40 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 41 | 42 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 43 | 44 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 45 | 46 | Usage: 47 | 48 | >>> import keras_resnet.blocks 49 | 50 | >>> keras_resnet.blocks.basic_3d(64) 51 | """ 52 | if stride is None: 53 | if block != 0 or stage == 0: 54 | stride = 1 55 | else: 56 | stride = 2 57 | 58 | if keras.backend.image_data_format() == "channels_last": 59 | axis = 3 60 | else: 61 | axis = 1 62 | 63 | if block > 0 and numerical_name: 64 | block_char = "b{}".format(block) 65 | else: 66 | block_char = chr(ord('a') + block) 67 | 68 | stage_char = str(stage + 2) 69 | 70 | def f(x): 71 | y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) 72 | 73 | y = keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) 74 | 75 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) 76 | 77 | y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) 78 | 79 | y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) 80 | 81 | y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) 82 | 83 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) 84 | 85 | if block == 0: 86 | shortcut = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) 87 | 88 | shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) 89 | else: 90 | shortcut = x 91 | 92 | y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) 93 | 94 | y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) 95 | 96 | return y 97 | 98 | return f 99 | 100 | 101 | def bottleneck_3d( 102 | filters, 103 | stage=0, 104 | block=0, 105 | kernel_size=3, 106 | numerical_name=False, 107 | stride=None, 108 | freeze_bn=False 109 | ): 110 | """ 111 | A three-dimensional bottleneck block. 112 | 113 | :param filters: the output’s feature space 114 | 115 | :param stage: int representing the stage of this block (starting from 0) 116 | 117 | :param block: int representing this block (starting from 0) 118 | 119 | :param kernel_size: size of the kernel 120 | 121 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 122 | 123 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 124 | 125 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 126 | 127 | Usage: 128 | 129 | >>> import keras_resnet.blocks 130 | 131 | >>> keras_resnet.blocks.bottleneck_3d(64) 132 | """ 133 | if stride is None: 134 | if block != 0 or stage == 0: 135 | stride = 1 136 | else: 137 | stride = 2 138 | 139 | if keras.backend.image_data_format() == "channels_last": 140 | axis = 3 141 | else: 142 | axis = 1 143 | 144 | if block > 0 and numerical_name: 145 | block_char = "b{}".format(block) 146 | else: 147 | block_char = chr(ord('a') + block) 148 | 149 | stage_char = str(stage + 2) 150 | 151 | def f(x): 152 | y = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) 153 | 154 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) 155 | 156 | y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) 157 | 158 | y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) 159 | 160 | y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) 161 | 162 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) 163 | 164 | y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) 165 | 166 | y = keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) 167 | 168 | y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) 169 | 170 | if block == 0: 171 | shortcut = keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) 172 | 173 | shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) 174 | else: 175 | shortcut = x 176 | 177 | y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) 178 | 179 | y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) 180 | 181 | return y 182 | 183 | return f 184 | -------------------------------------------------------------------------------- /keras_resnet/blocks/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.blocks 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements a number of popular residual blocks. 8 | """ 9 | 10 | from ._1d import ( 11 | basic_1d, 12 | bottleneck_1d 13 | ) 14 | 15 | from ._2d import ( 16 | basic_2d, 17 | bottleneck_2d 18 | ) 19 | 20 | from ._3d import ( 21 | basic_3d, 22 | bottleneck_3d 23 | ) 24 | 25 | from ._time_distributed_2d import ( 26 | time_distributed_basic_2d, 27 | time_distributed_bottleneck_2d 28 | ) 29 | -------------------------------------------------------------------------------- /keras_resnet/blocks/_time_distributed_2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.blocks._time_distributed_2d 5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements a number of popular time distributed two-dimensional residual blocks. 8 | """ 9 | 10 | import keras.layers 11 | import keras.regularizers 12 | 13 | import keras_resnet.layers 14 | 15 | parameters = { 16 | "kernel_initializer": "he_normal" 17 | } 18 | 19 | 20 | def time_distributed_basic_2d( 21 | filters, 22 | stage=0, 23 | block=0, 24 | kernel_size=3, 25 | numerical_name=False, 26 | stride=None, 27 | freeze_bn=False 28 | ): 29 | """ 30 | 31 | A time distributed two-dimensional basic block. 32 | 33 | :param filters: the output’s feature space 34 | 35 | :param stage: int representing the stage of this block (starting from 0) 36 | 37 | :param block: int representing this block (starting from 0) 38 | 39 | :param kernel_size: size of the kernel 40 | 41 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 42 | 43 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 44 | 45 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 46 | 47 | Usage: 48 | 49 | >>> import keras_resnet.blocks 50 | 51 | >>> keras_resnet.blocks.time_distributed_basic_2d(64) 52 | 53 | """ 54 | if stride is None: 55 | if block != 0 or stage == 0: 56 | stride = 1 57 | else: 58 | stride = 2 59 | 60 | if keras.backend.image_data_format() == "channels_last": 61 | axis = 3 62 | else: 63 | axis = 1 64 | 65 | if block > 0 and numerical_name: 66 | block_char = "b{}".format(block) 67 | else: 68 | block_char = chr(ord('a') + block) 69 | 70 | stage_char = str(stage + 2) 71 | 72 | def f(x): 73 | y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) 74 | 75 | y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) 76 | 77 | y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) 78 | 79 | y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) 80 | 81 | y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) 82 | 83 | y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) 84 | 85 | y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) 86 | 87 | if block == 0: 88 | shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) 89 | 90 | shortcut = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) 91 | else: 92 | shortcut = x 93 | 94 | y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) 95 | 96 | y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) 97 | 98 | return y 99 | 100 | return f 101 | 102 | 103 | def time_distributed_bottleneck_2d( 104 | filters, 105 | stage=0, 106 | block=0, 107 | kernel_size=3, 108 | numerical_name=False, 109 | stride=None, 110 | freeze_bn=False 111 | ): 112 | """ 113 | 114 | A time distributed two-dimensional bottleneck block. 115 | 116 | :param filters: the output’s feature space 117 | 118 | :param stage: int representing the stage of this block (starting from 0) 119 | 120 | :param block: int representing this block (starting from 0) 121 | 122 | :param kernel_size: size of the kernel 123 | 124 | :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) 125 | 126 | :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id 127 | 128 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 129 | 130 | Usage: 131 | 132 | >>> import keras_resnet.blocks 133 | 134 | >>> keras_resnet.blocks.time_distributed_bottleneck_2d(64) 135 | 136 | """ 137 | if stride is None: 138 | if block != 0 or stage == 0: 139 | stride = 1 140 | else: 141 | stride = 2 142 | 143 | if keras.backend.image_data_format() == "channels_last": 144 | axis = 3 145 | else: 146 | axis = 1 147 | 148 | if block > 0 and numerical_name: 149 | block_char = "b{}".format(block) 150 | else: 151 | block_char = chr(ord('a') + block) 152 | 153 | stage_char = str(stage + 2) 154 | 155 | def f(x): 156 | y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) 157 | 158 | y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) 159 | 160 | y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) 161 | 162 | y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) 163 | 164 | y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) 165 | 166 | y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) 167 | 168 | y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) 169 | 170 | y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) 171 | 172 | y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) 173 | 174 | if block == 0: 175 | shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) 176 | 177 | shortcut = keras.layers.TimeDistributed(keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) 178 | else: 179 | shortcut = x 180 | 181 | y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) 182 | 183 | y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) 184 | 185 | return y 186 | 187 | return f 188 | -------------------------------------------------------------------------------- /keras_resnet/classifiers/_2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.classifiers 5 | ~~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular residual two-dimensional classifiers. 8 | """ 9 | 10 | import keras.backend 11 | import keras.layers 12 | import keras.models 13 | import keras.regularizers 14 | 15 | import keras_resnet.models 16 | 17 | 18 | class ResNet18(keras.models.Model): 19 | """ 20 | A :class:`ResNet18 ` object. 21 | 22 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 23 | 24 | Usage: 25 | 26 | >>> import keras_resnet.classifiers 27 | 28 | >>> shape, classes = (224, 224, 3), 1000 29 | 30 | >>> x = keras.layers.Input(shape) 31 | 32 | >>> model = keras_resnet.classifiers.ResNet18(x) 33 | 34 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 35 | """ 36 | def __init__(self, inputs, classes): 37 | outputs = keras_resnet.models.ResNet18(inputs) 38 | 39 | outputs = keras.layers.Flatten()(outputs.output) 40 | 41 | outputs = keras.layers.Dense(classes, activation="softmax")(outputs) 42 | 43 | super(ResNet18, self).__init__(inputs, outputs) 44 | 45 | 46 | class ResNet34(keras.models.Model): 47 | """ 48 | A :class:`ResNet34 ` object. 49 | 50 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 51 | 52 | Usage: 53 | 54 | >>> import keras_resnet.classifiers 55 | 56 | >>> shape, classes = (224, 224, 3), 1000 57 | 58 | >>> x = keras.layers.Input(shape) 59 | 60 | >>> model = keras_resnet.classifiers.ResNet34(x) 61 | 62 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 63 | """ 64 | def __init__(self, inputs, classes): 65 | outputs = keras_resnet.models.ResNet34(inputs) 66 | 67 | outputs = keras.layers.Flatten()(outputs.output) 68 | 69 | outputs = keras.layers.Dense(classes, activation="softmax")(outputs) 70 | 71 | super(ResNet34, self).__init__(inputs, outputs) 72 | 73 | 74 | class ResNet50(keras.models.Model): 75 | """ 76 | A :class:`ResNet50 ` object. 77 | 78 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 79 | 80 | Usage: 81 | 82 | >>> import keras_resnet.classifiers 83 | 84 | >>> shape, classes = (224, 224, 3), 1000 85 | 86 | >>> x = keras.layers.Input(shape) 87 | 88 | >>> model = keras_resnet.classifiers.ResNet50(x) 89 | 90 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 91 | """ 92 | def __init__(self, inputs, classes): 93 | outputs = keras_resnet.models.ResNet50(inputs) 94 | 95 | outputs = keras.layers.Flatten()(outputs.output) 96 | 97 | outputs = keras.layers.Dense(classes, activation="softmax")(outputs) 98 | 99 | super(ResNet50, self).__init__(inputs, outputs) 100 | 101 | 102 | class ResNet101(keras.models.Model): 103 | """ 104 | A :class:`ResNet101 ` object. 105 | 106 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 107 | 108 | Usage: 109 | 110 | >>> import keras_resnet.classifiers 111 | 112 | >>> shape, classes = (224, 224, 3), 1000 113 | 114 | >>> x = keras.layers.Input(shape) 115 | 116 | >>> model = keras_resnet.classifiers.ResNet101(x) 117 | 118 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 119 | """ 120 | def __init__(self, inputs, classes): 121 | outputs = keras_resnet.models.ResNet101(inputs) 122 | 123 | outputs = keras.layers.Flatten()(outputs.output) 124 | 125 | outputs = keras.layers.Dense(classes, activation="softmax")(outputs) 126 | 127 | super(ResNet101, self).__init__(inputs, outputs) 128 | 129 | 130 | class ResNet152(keras.models.Model): 131 | """ 132 | A :class:`ResNet152 ` object. 133 | 134 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 135 | 136 | Usage: 137 | 138 | >>> import keras_resnet.classifiers 139 | 140 | >>> shape, classes = (224, 224, 3), 1000 141 | 142 | >>> x = keras.layers.Input(shape) 143 | 144 | >>> model = keras_resnet.classifiers.ResNet152(x) 145 | 146 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 147 | 148 | """ 149 | def __init__(self, inputs, classes): 150 | outputs = keras_resnet.models.ResNet152(inputs) 151 | 152 | outputs = keras.layers.Flatten()(outputs.output) 153 | 154 | outputs = keras.layers.Dense(classes, activation="softmax")(outputs) 155 | 156 | super(ResNet152, self).__init__(inputs, outputs) 157 | 158 | 159 | class ResNet200(keras.models.Model): 160 | """ 161 | A :class:`ResNet200 ` object. 162 | 163 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 164 | 165 | Usage: 166 | 167 | >>> import keras_resnet.classifiers 168 | 169 | >>> shape, classes = (224, 224, 3), 1000 170 | 171 | >>> x = keras.layers.Input(shape) 172 | 173 | >>> model = keras_resnet.classifiers.ResNet200(x) 174 | 175 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 176 | """ 177 | def __init__(self, inputs, classes): 178 | outputs = keras_resnet.models.ResNet200(inputs) 179 | 180 | outputs = keras.layers.Flatten()(outputs.output) 181 | 182 | outputs = keras.layers.Dense(classes, activation="softmax")(outputs) 183 | 184 | super(ResNet200, self).__init__(inputs, outputs) 185 | -------------------------------------------------------------------------------- /keras_resnet/classifiers/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.classifiers 5 | ~~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular residual classifiers. 8 | """ 9 | 10 | from ._2d import ( 11 | ResNet18, 12 | ResNet34, 13 | ResNet50, 14 | ResNet101, 15 | ResNet152, 16 | ResNet200 17 | ) 18 | -------------------------------------------------------------------------------- /keras_resnet/data/checkpoints/CIFAR-10/ResNet-18.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/broadinstitute/keras-resnet/898a1ee417e940ff12bf73ad44c2aae88501771f/keras_resnet/data/checkpoints/CIFAR-10/ResNet-18.hdf5 -------------------------------------------------------------------------------- /keras_resnet/data/logs/CIFAR-10/ResNet-152.csv: -------------------------------------------------------------------------------- 1 | epoch,acc,loss,val_acc,val_loss 2 | 0,0.45048,5.6569789698,0.52456,2.52404960579 3 | 1,0.5802,1.97952193207,0.57628,1.85609009392 4 | 2,0.6378,1.59078127138,0.58036,1.71531735924 5 | 3,0.68028,1.42835026871,0.63564,1.5793722065 6 | 4,0.70708,1.36528547768,0.57992,1.77542785202 7 | 5,0.73308,1.30552210175,0.68112,1.44930167469 8 | 6,0.76016,1.22355006672,0.64572,1.56860588356 9 | 7,0.78496,1.15169128395,0.64908,1.58125893982 10 | 8,0.81084,1.07741350706,0.63448,1.76302654694 11 | 9,0.82964,1.03127105854,0.68564,1.49897863586 12 | 10,0.84764,0.973772405777,0.67392,1.62877078094 13 | 11,0.86544,0.936406512718,0.70332,1.54720495323 14 | 12,0.88188,0.880764993744,0.70564,1.61152415085 15 | 13,0.8898,0.865879303856,0.69232,1.65447898827 16 | 14,0.90252,0.831456372223,0.68696,1.65889738792 17 | 15,0.9044,0.832293133163,0.69408,1.73048787109 18 | 16,0.91828,0.781580786877,0.72228,1.6345967038 19 | 17,0.92132,0.783939222946,0.64484,2.1204690313 20 | 18,0.92536,0.775381643562,0.71584,1.64438322769 21 | 19,0.92812,0.762364400082,0.67912,1.95392770218 22 | 20,0.92996,0.75270499136,0.70468,1.74274598427 23 | 21,0.93348,0.744652792206,0.69768,1.81645549057 24 | 22,0.93636,0.735111584702,0.68592,1.89696693932 25 | 23,0.93792,0.729523564987,0.68424,1.89776666023 26 | 24,0.94096,0.719789583645,0.68992,1.8297908065 27 | 25,0.93668,0.729977995338,0.69284,1.86887301064 28 | 26,0.94212,0.716373442802,0.71412,1.75364293022 29 | 27,0.94416,0.711411055107,0.71084,1.82916514145 30 | 28,0.94328,0.704894613934,0.71408,1.74045247726 31 | 29,0.94884,0.689063891869,0.7012,1.79963303383 32 | 30,0.94456,0.70617998558,0.71968,1.71550001167 33 | 31,0.9512,0.68318250206,0.7082,1.79918074066 34 | 32,0.94784,0.693417632885,0.70288,1.84066558891 35 | 33,0.94808,0.688813854713,0.72628,1.6777460598 36 | 34,0.9524,0.673064042969,0.693,1.85032689247 37 | 35,0.94964,0.684403116837,0.71488,1.82139017357 38 | 36,0.95204,0.668751282806,0.7092,1.7957940889 39 | 37,0.95288,0.666678543167,0.70232,1.83750470825 40 | 38,0.95084,0.666435951405,0.7164,1.73421942223 41 | 39,0.9538,0.652561580982,0.70824,1.81593661156 42 | 40,0.95128,0.667898567829,0.70788,1.84904203239 43 | 41,0.95448,0.649378330002,0.70744,1.85420666649 44 | 42,0.9534,0.654717244797,0.71252,1.91642944542 45 | 43,0.95624,0.64721691988,0.70716,1.76985499306 46 | 44,0.95772,0.636954878006,0.71264,1.77503314545 47 | 45,0.95232,0.651303475227,0.69732,1.87291989624 48 | 46,0.9564,0.636415992489,0.70344,1.79303389183 49 | 47,0.95544,0.63748412281,0.70004,1.8751271632 50 | 48,0.95684,0.629755827217,0.71136,1.83379663605 51 | 49,0.95852,0.622991967468,0.69052,2.01958825005 52 | 50,0.95544,0.634256403332,0.70608,1.79803046906 53 | 51,0.95952,0.617938518734,0.70436,1.90774699913 54 | 52,0.9576,0.620909184856,0.72232,1.74220076778 55 | 53,0.95712,0.614600144329,0.69468,1.98340138771 56 | 54,0.9604,0.612931265984,0.71788,1.7960405632 57 | 55,0.95972,0.61181312561,0.68188,2.10309059319 58 | 56,0.96024,0.599948996658,0.71884,1.75586853378 59 | 57,0.95816,0.612823825188,0.71688,1.77811519279 60 | 58,0.95916,0.60589071991,0.70944,1.85249260025 61 | 59,0.96228,0.591477043171,0.7062,1.81866511925 62 | 60,0.9594,0.60072430563,0.71984,1.7280502449 63 | 61,0.95808,0.597107102337,0.71924,1.78307212532 64 | 62,0.96312,0.586346117058,0.7046,1.9250768293 65 | 63,0.95828,0.598417911568,0.70572,1.76429192619 66 | 64,0.96332,0.577492843828,0.72004,1.71519271767 67 | 65,0.96348,0.577564435577,0.70028,1.87984622681 68 | 66,0.961,0.585994176464,0.722,1.76821646027 69 | 67,0.96208,0.581109823875,0.70788,1.77447559059 70 | 68,0.96184,0.573872935505,0.71412,1.78038441383 71 | 69,0.96064,0.583914702339,0.69152,1.85537320671 72 | 70,0.96228,0.576501415596,0.71128,1.76108913204 73 | 71,0.96176,0.573572147865,0.71432,1.74296127136 74 | 72,0.96308,0.571898015156,0.71996,1.75626888855 75 | 73,0.96096,0.57597786869,0.72268,1.69158449982 76 | 74,0.96468,0.562929959631,0.70428,1.87859262039 77 | 75,0.96824,0.542902224483,0.70248,1.79049867249 78 | 76,0.96048,0.570442586117,0.70916,1.805807556 79 | 77,0.96352,0.55739930687,0.71676,1.76644431541 80 | 78,0.96488,0.547661134987,0.71252,1.84655031155 81 | 79,0.96228,0.553942497463,0.70936,1.70010541222 82 | 80,0.96236,0.561035350189,0.71112,1.76348115589 83 | 81,0.96664,0.54425209384,0.72212,1.80034795361 84 | 82,0.96336,0.551216385956,0.7062,1.78828575253 85 | 83,0.96176,0.554362309341,0.67244,2.07504709923 86 | 84,0.9666,0.536079183722,0.71152,1.84909118378 87 | 85,0.96548,0.538974055786,0.68516,2.04362067757 88 | -------------------------------------------------------------------------------- /keras_resnet/data/logs/CIFAR-10/ResNet-18.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/broadinstitute/keras-resnet/898a1ee417e940ff12bf73ad44c2aae88501771f/keras_resnet/data/logs/CIFAR-10/ResNet-18.csv -------------------------------------------------------------------------------- /keras_resnet/data/logs/CIFAR-10/ResNet-50.csv: -------------------------------------------------------------------------------- 1 | epoch,acc,loss,val_acc,val_loss 2 | 0,0.43584000349,5.62913276672,0.100839997828,18.3248052216 3 | 1,0.613839981556,4.6392209816,0.128239999712,11.0363575363 4 | 2,0.727839994431,3.81915694237,0.194319999218,6.3110577774 5 | 3,0.837040014267,3.10814973831,0.447239995003,4.10543880463 6 | 4,0.895920023918,2.60104823112,0.521400008202,3.65062627792 7 | 5,0.936160049438,2.21063588142,0.560199997425,3.46328066826 8 | 6,0.953960042,1.94667746067,0.588559992313,3.14252257347 9 | 7,0.960440046787,1.74168484211,0.610159988403,3.01861333847 10 | 8,0.965000035763,1.58069104671,0.609799985886,2.91891400337 11 | 9,0.969680035114,1.44878509521,0.58915999651,3.06458162308 12 | 10,0.97468003273,1.33450681686,0.604199991226,2.8859497261 13 | 11,0.977840023041,1.238817873,0.609560000896,2.82743697166 14 | 12,0.979160020351,1.15541218281,0.627919986248,2.7040555954 15 | 13,0.981320016384,1.08901126385,0.6247199893,2.64943538666 16 | 14,0.98379999876,1.02661623716,0.623279998302,2.63110416412 17 | 15,0.981520011425,0.985036349297,0.603999991417,2.76023046494 18 | 16,0.978280010223,0.955665054321,0.613960006237,2.72687380791 19 | 17,0.976120016575,0.929158322811,0.623119988441,2.64345932961 20 | 18,0.975840024948,0.897617707253,0.616840009689,2.87130368233 21 | 19,0.98180000782,0.856136341095,0.611039996147,2.93421761513 22 | 20,0.97956001997,0.838128817081,0.62364000082,2.8393068409 23 | 21,0.978480014801,0.814125754833,0.582520000935,3.18922561646 24 | 22,0.978199994564,0.794351735115,0.608199987411,3.00167135239 25 | 23,0.977680027485,0.782097830772,0.618280000687,2.86438794136 26 | 24,0.983160009384,0.748475122452,0.602960004807,2.87347389221 27 | 25,0.987960000038,0.718061943054,0.62651999712,2.78077593803 28 | 26,0.987199993134,0.698964850903,0.60931999445,2.87224621773 29 | 27,0.989200003147,0.675797929764,0.618000001907,2.75454705238 30 | 28,0.988760023117,0.661319212914,0.620039994717,2.68993344307 31 | 29,0.98732000351,0.653367042542,0.617039999962,2.85696388245 32 | 30,0.984160006046,0.650923583508,0.616720004082,2.83272325516 33 | 31,0.980480020046,0.654793913364,0.618279986382,2.86812015533 34 | 32,0.981120014191,0.639981796741,0.5789599967,3.43344039917 35 | 33,0.985560028553,0.623309266567,0.613959999084,3.02275442123 36 | 34,0.986520013809,0.606539611816,0.613279998302,2.98420905113 37 | 35,0.990080001354,0.587531495094,0.624679996967,2.77140503883 38 | 36,0.98988001585,0.577072823048,0.616040005684,2.80857343674 39 | 37,0.98776001215,0.575551519394,0.603159992695,3.10779605865 40 | 38,0.98384001255,0.581647360325,0.55887999773,3.49894658089 41 | 39,0.985440003872,0.572706053257,0.606159999371,2.91105371475 42 | 40,0.987920019627,0.55689914465,0.625319998264,2.7114607811 43 | 41,0.987120008469,0.550275967121,0.638920013905,2.52355694771 44 | 42,0.98856003046,0.538058042526,0.632600007057,2.61510424614 45 | 43,0.989040024281,0.527270588875,0.627400004864,2.64803095818 46 | 44,0.987680001259,0.530300002098,0.615599997044,2.6820781517 47 | 45,0.988120000362,0.520626094341,0.622719993591,2.77283156395 48 | 46,0.988000018597,0.516941388845,0.604319992065,2.86117936134 49 | 47,0.985440011024,0.519231830835,0.619200000763,2.80338571548 50 | 48,0.983600020409,0.520252664089,0.630480003357,2.74096003532 51 | 49,0.984040021896,0.515836383104,0.61364000082,3.00537041664 52 | 50,0.985680027008,0.50866099,0.61463999033,2.7754552269 53 | 51,0.990360019207,0.492037719488,0.639800009727,2.65804454803 54 | 52,0.990599999428,0.48505227685,0.616879997253,2.92769122124 55 | 53,0.991680021286,0.47422213316,0.648079993725,2.49201997757 56 | 54,0.993400022984,0.461529686451,0.6278799963,2.550382967 57 | 55,0.989520003796,0.466327902079,0.632199993134,2.51265090942 58 | 56,0.989960017204,0.464019637108,0.345360000134,6.8282803154 59 | 57,0.986560006142,0.473317199945,0.6057999897,3.08627145767 60 | 58,0.986039998531,0.472426207066,0.626319994926,2.93131689072 61 | 59,0.982240014076,0.483788002729,0.627839999199,2.83019703865 62 | 60,0.981720006466,0.481595607996,0.638599996567,2.69752861023 63 | 61,0.985359997749,0.466702456474,0.638960003853,2.65502017021 64 | 62,0.988919994831,0.456031087637,0.445279996395,5.7157790947 65 | 63,0.993000011444,0.440598186255,0.630119988918,2.51147171021 66 | 64,0.992440023422,0.435190602541,0.62651999712,2.57957274437 67 | 65,0.993360004425,0.42595925808,0.648399987221,2.2531371212 68 | 66,0.99116001606,0.427255400419,0.608400001526,2.71374420166 69 | 67,0.98944000721,0.430816050768,0.615439996719,2.47887261391 70 | 68,0.987559995651,0.437047631741,0.624880001545,2.58480267525 71 | 69,0.983840019703,0.448080509901,0.622999982834,2.66262166977 72 | 70,0.985160009861,0.44582660079,0.595399987698,3.12249378204 73 | 71,0.985360016823,0.440602530241,0.624680001736,2.76941171646 74 | 72,0.987560019493,0.433295376301,0.63583999157,2.53925125122 75 | 73,0.990080020428,0.418974424601,0.625079989433,2.66666457176 76 | 74,0.989360010624,0.418960067034,0.616440005302,2.72926710129 77 | 75,0.990640017986,0.415399730206,0.541600000858,3.80070173264 78 | 76,0.990480005741,0.411887803078,0.577159993649,3.05717662811 79 | 77,0.987280006409,0.421492083073,0.582079985142,3.11453285217 80 | 78,0.987800021172,0.417623175383,0.51939999342,3.83483703613 81 | 79,0.988959999084,0.412954752445,0.624959988594,2.71498307228 82 | 80,0.986320009232,0.416066893339,0.566519994736,3.28630393028 83 | 81,0.985760002136,0.419280433655,0.590320005417,2.88762340546 84 | 82,0.987520003319,0.417089204788,0.61167999506,2.77382235527 85 | 83,0.989480004311,0.407765614986,0.631600003242,2.51894221306 86 | 84,0.990560011864,0.399002737999,0.628799996376,2.57540126801 87 | 85,0.992200014591,0.39015312314,0.648999993801,2.4336120224 88 | 86,0.991760008335,0.387459388971,0.634640002251,2.50763084412 89 | 87,0.991840014458,0.384128415585,0.625799992085,2.4767740345 90 | 88,0.992480013371,0.37585185051,0.620199992657,2.41639127731 91 | 89,0.992000021935,0.379179673195,0.590439991951,2.94235163689 92 | 90,0.991800026894,0.374588243961,0.629279997349,2.40989689827 93 | 91,0.988320009708,0.38130390048,0.6007999897,2.86736544609 94 | 92,0.983880000114,0.398092731237,0.609239988327,2.74953005791 95 | 93,0.983000013828,0.40337767601,0.589840004444,2.92746019363 96 | 94,0.984920003414,0.394550970793,0.6328799963,2.59582882881 97 | 95,0.990080006123,0.385717000961,0.634040000439,2.59733297348 98 | 96,0.991280007362,0.376961611509,0.633000006676,2.37583026886 99 | 97,0.992280020714,0.368541344404,0.629559996128,2.51543186188 100 | 98,0.992120013237,0.367575811148,0.605279989243,2.61244617462 101 | 99,0.99280002594,0.360384497643,0.644039993286,2.40321258545 102 | -------------------------------------------------------------------------------- /keras_resnet/layers/__init__.py: -------------------------------------------------------------------------------- 1 | from ._batch_normalization import BatchNormalization 2 | -------------------------------------------------------------------------------- /keras_resnet/layers/_batch_normalization.py: -------------------------------------------------------------------------------- 1 | import keras 2 | 3 | 4 | class BatchNormalization(keras.layers.BatchNormalization): 5 | """ 6 | Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters. 7 | """ 8 | def __init__(self, freeze, *args, **kwargs): 9 | self.freeze = freeze 10 | super(BatchNormalization, self).__init__(*args, **kwargs) 11 | 12 | # set to non-trainable if freeze is true 13 | self.trainable = not self.freeze 14 | 15 | def call(self, *args, **kwargs): 16 | # Force test mode if frozen, otherwise use default behaviour (i.e., training=None). 17 | if self.freeze: 18 | kwargs['training'] = False 19 | return super(BatchNormalization, self).call(*args, **kwargs) 20 | 21 | def get_config(self): 22 | config = super(BatchNormalization, self).get_config() 23 | config.update({'freeze': self.freeze}) 24 | return config 25 | -------------------------------------------------------------------------------- /keras_resnet/metrics.py: -------------------------------------------------------------------------------- 1 | import keras.metrics 2 | 3 | 4 | def top_1_categorical_error(y_true, y_pred): 5 | return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) 6 | 7 | 8 | def top_5_categorical_error(y_true, y_pred): 9 | return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) 10 | -------------------------------------------------------------------------------- /keras_resnet/models/_1d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.models._1d 5 | ~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular one-dimensional residual models. 8 | """ 9 | 10 | import keras.backend 11 | import keras.layers 12 | import keras.models 13 | import keras.regularizers 14 | 15 | import keras_resnet.blocks 16 | import keras_resnet.layers 17 | 18 | 19 | class ResNet1D(keras.Model): 20 | """ 21 | Constructs a `keras.models.Model` object using the given block count. 22 | 23 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 24 | 25 | :param blocks: the network’s residual architecture 26 | 27 | :param block: a residual block (e.g. an instance of `keras_resnet.blocks.basic_1d`) 28 | 29 | :param include_top: if true, includes classification layers 30 | 31 | :param classes: number of classes to classify (include_top must be true) 32 | 33 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 34 | 35 | :param numerical_names: list of bool, same size as blocks, used to indicate whether names of layers should include numbers or letters 36 | 37 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 38 | 39 | Usage: 40 | 41 | >>> import keras_resnet.blocks 42 | >>> import keras_resnet.models 43 | 44 | >>> shape, classes = (224, 224, 3), 1000 45 | 46 | >>> x = keras.layers.Input(shape) 47 | 48 | >>> blocks = [2, 2, 2, 2] 49 | 50 | >>> block = keras_resnet.blocks.basic_1d 51 | 52 | >>> model = keras_resnet.models.ResNet(x, classes, blocks, block, classes=classes) 53 | 54 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 55 | """ 56 | def __init__( 57 | self, 58 | inputs, 59 | blocks, 60 | block, 61 | include_top=True, 62 | classes=1000, 63 | freeze_bn=True, 64 | numerical_names=None, 65 | *args, 66 | **kwargs 67 | ): 68 | if keras.backend.image_data_format() == "channels_last": 69 | axis = 3 70 | else: 71 | axis = 1 72 | 73 | if numerical_names is None: 74 | numerical_names = [True] * len(blocks) 75 | 76 | x = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1")(inputs) 77 | x = keras.layers.Conv1D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) 78 | x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) 79 | x = keras.layers.Activation("relu", name="conv1_relu")(x) 80 | x = keras.layers.MaxPooling1D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) 81 | 82 | features = 64 83 | 84 | outputs = [] 85 | 86 | for stage_id, iterations in enumerate(blocks): 87 | for block_id in range(iterations): 88 | x = block( 89 | features, 90 | stage_id, 91 | block_id, 92 | numerical_name=(block_id > 0 and numerical_names[stage_id]), 93 | freeze_bn=freeze_bn 94 | )(x) 95 | 96 | features *= 2 97 | 98 | outputs.append(x) 99 | 100 | if include_top: 101 | assert classes > 0 102 | 103 | x = keras.layers.GlobalAveragePooling1D(name="pool5")(x) 104 | x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) 105 | 106 | super(ResNet1D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) 107 | else: 108 | # Else output each stages features 109 | super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) 110 | 111 | 112 | class ResNet1D18(ResNet1D): 113 | """ 114 | Constructs a `keras.models.Model` according to the ResNet18 specifications. 115 | 116 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 117 | 118 | :param blocks: the network’s residual architecture 119 | 120 | :param include_top: if true, includes classification layers 121 | 122 | :param classes: number of classes to classify (include_top must be true) 123 | 124 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 125 | 126 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 127 | 128 | Usage: 129 | 130 | >>> import keras_resnet.models 131 | 132 | >>> shape, classes = (224, 224, 3), 1000 133 | 134 | >>> x = keras.layers.Input(shape) 135 | 136 | >>> model = keras_resnet.models.ResNet18(x, classes=classes) 137 | 138 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 139 | """ 140 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 141 | if blocks is None: 142 | blocks = [2, 2, 2, 2] 143 | 144 | super(ResNet1D18, self).__init__( 145 | inputs, 146 | blocks, 147 | block=keras_resnet.blocks.basic_1d, 148 | include_top=include_top, 149 | classes=classes, 150 | freeze_bn=freeze_bn, 151 | *args, 152 | **kwargs 153 | ) 154 | 155 | 156 | class ResNet1D34(ResNet1D): 157 | """ 158 | Constructs a `keras.models.Model` according to the ResNet34 specifications. 159 | 160 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 161 | 162 | :param blocks: the network’s residual architecture 163 | 164 | :param include_top: if true, includes classification layers 165 | 166 | :param classes: number of classes to classify (include_top must be true) 167 | 168 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 169 | 170 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 171 | 172 | Usage: 173 | 174 | >>> import keras_resnet.models 175 | 176 | >>> shape, classes = (224, 224, 3), 1000 177 | 178 | >>> x = keras.layers.Input(shape) 179 | 180 | >>> model = keras_resnet.models.ResNet34(x, classes=classes) 181 | 182 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 183 | """ 184 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 185 | if blocks is None: 186 | blocks = [3, 4, 6, 3] 187 | 188 | super(ResNet1D34, self).__init__( 189 | inputs, 190 | blocks, 191 | block=keras_resnet.blocks.basic_1d, 192 | include_top=include_top, 193 | classes=classes, 194 | freeze_bn=freeze_bn, 195 | *args, 196 | **kwargs 197 | ) 198 | 199 | 200 | class ResNet1D50(ResNet1D): 201 | """ 202 | Constructs a `keras.models.Model` according to the ResNet50 specifications. 203 | 204 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 205 | 206 | :param blocks: the network’s residual architecture 207 | 208 | :param include_top: if true, includes classification layers 209 | 210 | :param classes: number of classes to classify (include_top must be true) 211 | 212 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 213 | 214 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 215 | 216 | Usage: 217 | 218 | >>> import keras_resnet.models 219 | 220 | >>> shape, classes = (224, 224, 3), 1000 221 | 222 | >>> x = keras.layers.Input(shape) 223 | 224 | >>> model = keras_resnet.models.ResNet50(x) 225 | 226 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 227 | """ 228 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 229 | if blocks is None: 230 | blocks = [3, 4, 6, 3] 231 | 232 | numerical_names = [False, False, False, False] 233 | 234 | super(ResNet1D50, self).__init__( 235 | inputs, 236 | blocks, 237 | numerical_names=numerical_names, 238 | block=keras_resnet.blocks.bottleneck_1d, 239 | include_top=include_top, 240 | classes=classes, 241 | freeze_bn=freeze_bn, 242 | *args, 243 | **kwargs 244 | ) 245 | 246 | 247 | class ResNet1D101(ResNet1D): 248 | """ 249 | Constructs a `keras.models.Model` according to the ResNet101 specifications. 250 | 251 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 252 | 253 | :param blocks: the network’s residual architecture 254 | 255 | :param include_top: if true, includes classification layers 256 | 257 | :param classes: number of classes to classify (include_top must be true) 258 | 259 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 260 | 261 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 262 | 263 | Usage: 264 | 265 | >>> import keras_resnet.models 266 | 267 | >>> shape, classes = (224, 224, 3), 1000 268 | 269 | >>> x = keras.layers.Input(shape) 270 | 271 | >>> model = keras_resnet.models.ResNet101(x, classes=classes) 272 | 273 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 274 | """ 275 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 276 | if blocks is None: 277 | blocks = [3, 4, 23, 3] 278 | 279 | numerical_names = [False, True, True, False] 280 | 281 | super(ResNet1D101, self).__init__( 282 | inputs, 283 | blocks, 284 | numerical_names=numerical_names, 285 | block=keras_resnet.blocks.bottleneck_1d, 286 | include_top=include_top, 287 | classes=classes, 288 | freeze_bn=freeze_bn, 289 | *args, 290 | **kwargs 291 | ) 292 | 293 | 294 | class ResNet1D152(ResNet1D): 295 | """ 296 | Constructs a `keras.models.Model` according to the ResNet152 specifications. 297 | 298 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 299 | 300 | :param blocks: the network’s residual architecture 301 | 302 | :param include_top: if true, includes classification layers 303 | 304 | :param classes: number of classes to classify (include_top must be true) 305 | 306 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 307 | 308 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 309 | 310 | Usage: 311 | 312 | >>> import keras_resnet.models 313 | 314 | >>> shape, classes = (224, 224, 3), 1000 315 | 316 | >>> x = keras.layers.Input(shape) 317 | 318 | >>> model = keras_resnet.models.ResNet152(x, classes=classes) 319 | 320 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 321 | """ 322 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 323 | if blocks is None: 324 | blocks = [3, 8, 36, 3] 325 | 326 | numerical_names = [False, True, True, False] 327 | 328 | super(ResNet1D152, self).__init__( 329 | inputs, 330 | blocks, 331 | numerical_names=numerical_names, 332 | block=keras_resnet.blocks.bottleneck_1d, 333 | include_top=include_top, 334 | classes=classes, 335 | freeze_bn=freeze_bn, 336 | *args, 337 | **kwargs 338 | ) 339 | 340 | 341 | class ResNet1D200(ResNet1D): 342 | """ 343 | Constructs a `keras.models.Model` according to the ResNet200 specifications. 344 | 345 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 346 | 347 | :param blocks: the network’s residual architecture 348 | 349 | :param include_top: if true, includes classification layers 350 | 351 | :param classes: number of classes to classify (include_top must be true) 352 | 353 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 354 | 355 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 356 | 357 | Usage: 358 | 359 | >>> import keras_resnet.models 360 | 361 | >>> shape, classes = (224, 224, 3), 1000 362 | 363 | >>> x = keras.layers.Input(shape) 364 | 365 | >>> model = keras_resnet.models.ResNet200(x, classes=classes) 366 | 367 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 368 | """ 369 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 370 | if blocks is None: 371 | blocks = [3, 24, 36, 3] 372 | 373 | numerical_names = [False, True, True, False] 374 | 375 | super(ResNet1D200, self).__init__( 376 | inputs, 377 | blocks, 378 | numerical_names=numerical_names, 379 | block=keras_resnet.blocks.bottleneck_1d, 380 | include_top=include_top, 381 | classes=classes, 382 | freeze_bn=freeze_bn, 383 | *args, 384 | **kwargs 385 | ) 386 | -------------------------------------------------------------------------------- /keras_resnet/models/_2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.models._2d 5 | ~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular two-dimensional residual models. 8 | """ 9 | 10 | import keras.backend 11 | import keras.layers 12 | import keras.models 13 | import keras.regularizers 14 | 15 | import keras_resnet.blocks 16 | import keras_resnet.layers 17 | 18 | 19 | class ResNet2D(keras.Model): 20 | """ 21 | Constructs a `keras.models.Model` object using the given block count. 22 | 23 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 24 | 25 | :param blocks: the network’s residual architecture 26 | 27 | :param block: a residual block (e.g. an instance of `keras_resnet.blocks.basic_2d`) 28 | 29 | :param include_top: if true, includes classification layers 30 | 31 | :param classes: number of classes to classify (include_top must be true) 32 | 33 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 34 | 35 | :param numerical_names: list of bool, same size as blocks, used to indicate whether names of layers should include numbers or letters 36 | 37 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 38 | 39 | Usage: 40 | 41 | >>> import keras_resnet.blocks 42 | >>> import keras_resnet.models 43 | 44 | >>> shape, classes = (224, 224, 3), 1000 45 | 46 | >>> x = keras.layers.Input(shape) 47 | 48 | >>> blocks = [2, 2, 2, 2] 49 | 50 | >>> block = keras_resnet.blocks.basic_2d 51 | 52 | >>> model = keras_resnet.models.ResNet(x, classes, blocks, block, classes=classes) 53 | 54 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 55 | """ 56 | def __init__( 57 | self, 58 | inputs, 59 | blocks, 60 | block, 61 | include_top=True, 62 | classes=1000, 63 | freeze_bn=True, 64 | numerical_names=None, 65 | *args, 66 | **kwargs 67 | ): 68 | if keras.backend.image_data_format() == "channels_last": 69 | axis = 3 70 | else: 71 | axis = 1 72 | 73 | if numerical_names is None: 74 | numerical_names = [True] * len(blocks) 75 | 76 | x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) 77 | x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) 78 | x = keras.layers.Activation("relu", name="conv1_relu")(x) 79 | x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) 80 | 81 | features = 64 82 | 83 | outputs = [] 84 | 85 | for stage_id, iterations in enumerate(blocks): 86 | for block_id in range(iterations): 87 | x = block( 88 | features, 89 | stage_id, 90 | block_id, 91 | numerical_name=(block_id > 0 and numerical_names[stage_id]), 92 | freeze_bn=freeze_bn 93 | )(x) 94 | 95 | features *= 2 96 | 97 | outputs.append(x) 98 | 99 | if include_top: 100 | assert classes > 0 101 | 102 | x = keras.layers.GlobalAveragePooling2D(name="pool5")(x) 103 | x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) 104 | 105 | super(ResNet2D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) 106 | else: 107 | # Else output each stages features 108 | super(ResNet2D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) 109 | 110 | 111 | class ResNet2D18(ResNet2D): 112 | """ 113 | Constructs a `keras.models.Model` according to the ResNet18 specifications. 114 | 115 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 116 | 117 | :param blocks: the network’s residual architecture 118 | 119 | :param include_top: if true, includes classification layers 120 | 121 | :param classes: number of classes to classify (include_top must be true) 122 | 123 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 124 | 125 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 126 | 127 | Usage: 128 | 129 | >>> import keras_resnet.models 130 | 131 | >>> shape, classes = (224, 224, 3), 1000 132 | 133 | >>> x = keras.layers.Input(shape) 134 | 135 | >>> model = keras_resnet.models.ResNet18(x, classes=classes) 136 | 137 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 138 | """ 139 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 140 | if blocks is None: 141 | blocks = [2, 2, 2, 2] 142 | 143 | super(ResNet2D18, self).__init__( 144 | inputs, 145 | blocks, 146 | block=keras_resnet.blocks.basic_2d, 147 | include_top=include_top, 148 | classes=classes, 149 | freeze_bn=freeze_bn, 150 | *args, 151 | **kwargs 152 | ) 153 | 154 | 155 | class ResNet2D34(ResNet2D): 156 | """ 157 | Constructs a `keras.models.Model` according to the ResNet34 specifications. 158 | 159 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 160 | 161 | :param blocks: the network’s residual architecture 162 | 163 | :param include_top: if true, includes classification layers 164 | 165 | :param classes: number of classes to classify (include_top must be true) 166 | 167 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 168 | 169 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 170 | 171 | Usage: 172 | 173 | >>> import keras_resnet.models 174 | 175 | >>> shape, classes = (224, 224, 3), 1000 176 | 177 | >>> x = keras.layers.Input(shape) 178 | 179 | >>> model = keras_resnet.models.ResNet34(x, classes=classes) 180 | 181 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 182 | """ 183 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 184 | if blocks is None: 185 | blocks = [3, 4, 6, 3] 186 | 187 | super(ResNet2D34, self).__init__( 188 | inputs, 189 | blocks, 190 | block=keras_resnet.blocks.basic_2d, 191 | include_top=include_top, 192 | classes=classes, 193 | freeze_bn=freeze_bn, 194 | *args, 195 | **kwargs 196 | ) 197 | 198 | 199 | class ResNet2D50(ResNet2D): 200 | """ 201 | Constructs a `keras.models.Model` according to the ResNet50 specifications. 202 | 203 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 204 | 205 | :param blocks: the network’s residual architecture 206 | 207 | :param include_top: if true, includes classification layers 208 | 209 | :param classes: number of classes to classify (include_top must be true) 210 | 211 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 212 | 213 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 214 | 215 | Usage: 216 | 217 | >>> import keras_resnet.models 218 | 219 | >>> shape, classes = (224, 224, 3), 1000 220 | 221 | >>> x = keras.layers.Input(shape) 222 | 223 | >>> model = keras_resnet.models.ResNet50(x) 224 | 225 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 226 | """ 227 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 228 | if blocks is None: 229 | blocks = [3, 4, 6, 3] 230 | 231 | numerical_names = [False, False, False, False] 232 | 233 | super(ResNet2D50, self).__init__( 234 | inputs, 235 | blocks, 236 | numerical_names=numerical_names, 237 | block=keras_resnet.blocks.bottleneck_2d, 238 | include_top=include_top, 239 | classes=classes, 240 | freeze_bn=freeze_bn, 241 | *args, 242 | **kwargs 243 | ) 244 | 245 | 246 | class ResNet2D101(ResNet2D): 247 | """ 248 | Constructs a `keras.models.Model` according to the ResNet101 specifications. 249 | 250 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 251 | 252 | :param blocks: the network’s residual architecture 253 | 254 | :param include_top: if true, includes classification layers 255 | 256 | :param classes: number of classes to classify (include_top must be true) 257 | 258 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 259 | 260 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 261 | 262 | Usage: 263 | 264 | >>> import keras_resnet.models 265 | 266 | >>> shape, classes = (224, 224, 3), 1000 267 | 268 | >>> x = keras.layers.Input(shape) 269 | 270 | >>> model = keras_resnet.models.ResNet101(x, classes=classes) 271 | 272 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 273 | """ 274 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 275 | if blocks is None: 276 | blocks = [3, 4, 23, 3] 277 | 278 | numerical_names = [False, True, True, False] 279 | 280 | super(ResNet2D101, self).__init__( 281 | inputs, 282 | blocks, 283 | numerical_names=numerical_names, 284 | block=keras_resnet.blocks.bottleneck_2d, 285 | include_top=include_top, 286 | classes=classes, 287 | freeze_bn=freeze_bn, 288 | *args, 289 | **kwargs 290 | ) 291 | 292 | 293 | class ResNet2D152(ResNet2D): 294 | """ 295 | Constructs a `keras.models.Model` according to the ResNet152 specifications. 296 | 297 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 298 | 299 | :param blocks: the network’s residual architecture 300 | 301 | :param include_top: if true, includes classification layers 302 | 303 | :param classes: number of classes to classify (include_top must be true) 304 | 305 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 306 | 307 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 308 | 309 | Usage: 310 | 311 | >>> import keras_resnet.models 312 | 313 | >>> shape, classes = (224, 224, 3), 1000 314 | 315 | >>> x = keras.layers.Input(shape) 316 | 317 | >>> model = keras_resnet.models.ResNet152(x, classes=classes) 318 | 319 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 320 | """ 321 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 322 | if blocks is None: 323 | blocks = [3, 8, 36, 3] 324 | 325 | numerical_names = [False, True, True, False] 326 | 327 | super(ResNet2D152, self).__init__( 328 | inputs, 329 | blocks, 330 | numerical_names=numerical_names, 331 | block=keras_resnet.blocks.bottleneck_2d, 332 | include_top=include_top, 333 | classes=classes, 334 | freeze_bn=freeze_bn, 335 | *args, 336 | **kwargs 337 | ) 338 | 339 | 340 | class ResNet2D200(ResNet2D): 341 | """ 342 | Constructs a `keras.models.Model` according to the ResNet200 specifications. 343 | 344 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 345 | 346 | :param blocks: the network’s residual architecture 347 | 348 | :param include_top: if true, includes classification layers 349 | 350 | :param classes: number of classes to classify (include_top must be true) 351 | 352 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 353 | 354 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 355 | 356 | Usage: 357 | 358 | >>> import keras_resnet.models 359 | 360 | >>> shape, classes = (224, 224, 3), 1000 361 | 362 | >>> x = keras.layers.Input(shape) 363 | 364 | >>> model = keras_resnet.models.ResNet200(x, classes=classes) 365 | 366 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 367 | """ 368 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 369 | if blocks is None: 370 | blocks = [3, 24, 36, 3] 371 | 372 | numerical_names = [False, True, True, False] 373 | 374 | super(ResNet2D200, self).__init__( 375 | inputs, 376 | blocks, 377 | numerical_names=numerical_names, 378 | block=keras_resnet.blocks.bottleneck_2d, 379 | include_top=include_top, 380 | classes=classes, 381 | freeze_bn=freeze_bn, 382 | *args, 383 | **kwargs 384 | ) 385 | -------------------------------------------------------------------------------- /keras_resnet/models/_3d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.models._3d 5 | ~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular three-dimensional residual models. 8 | """ 9 | 10 | import keras.backend 11 | import keras.layers 12 | import keras.models 13 | import keras.regularizers 14 | 15 | import keras_resnet.blocks 16 | import keras_resnet.layers 17 | 18 | 19 | class ResNet3D(keras.Model): 20 | """ 21 | Constructs a `keras.models.Model` object using the given block count. 22 | 23 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 24 | 25 | :param blocks: the network’s residual architecture 26 | 27 | :param block: a residual block (e.g. an instance of `keras_resnet.blocks.basic_3d`) 28 | 29 | :param include_top: if true, includes classification layers 30 | 31 | :param classes: number of classes to classify (include_top must be true) 32 | 33 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 34 | 35 | :param numerical_names: list of bool, same size as blocks, used to indicate whether names of layers should include numbers or letters 36 | 37 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 38 | 39 | Usage: 40 | 41 | >>> import keras_resnet.blocks 42 | >>> import keras_resnet.models 43 | 44 | >>> shape, classes = (224, 224, 3), 1000 45 | 46 | >>> x = keras.layers.Input(shape) 47 | 48 | >>> blocks = [2, 2, 2, 2] 49 | 50 | >>> block = keras_resnet.blocks.basic_3d 51 | 52 | >>> model = keras_resnet.models.ResNet(x, classes, blocks, block, classes=classes) 53 | 54 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 55 | """ 56 | def __init__( 57 | self, 58 | inputs, 59 | blocks, 60 | block, 61 | include_top=True, 62 | classes=1000, 63 | freeze_bn=True, 64 | numerical_names=None, 65 | *args, 66 | **kwargs 67 | ): 68 | if keras.backend.image_data_format() == "channels_last": 69 | axis = 3 70 | else: 71 | axis = 1 72 | 73 | if numerical_names is None: 74 | numerical_names = [True] * len(blocks) 75 | 76 | x = keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) 77 | x = keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) 78 | x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) 79 | x = keras.layers.Activation("relu", name="conv1_relu")(x) 80 | x = keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) 81 | 82 | features = 64 83 | 84 | outputs = [] 85 | 86 | for stage_id, iterations in enumerate(blocks): 87 | for block_id in range(iterations): 88 | x = block( 89 | features, 90 | stage_id, 91 | block_id, 92 | numerical_name=(block_id > 0 and numerical_names[stage_id]), 93 | freeze_bn=freeze_bn 94 | )(x) 95 | 96 | features *= 2 97 | 98 | outputs.append(x) 99 | 100 | if include_top: 101 | assert classes > 0 102 | 103 | x = keras.layers.GlobalAveragePooling3D(name="pool5")(x) 104 | x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) 105 | 106 | super(ResNet3D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) 107 | else: 108 | # Else output each stages features 109 | super(ResNet3D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) 110 | 111 | 112 | class ResNet3D18(ResNet3D): 113 | """ 114 | Constructs a `keras.models.Model` according to the ResNet18 specifications. 115 | 116 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 117 | 118 | :param blocks: the network’s residual architecture 119 | 120 | :param include_top: if true, includes classification layers 121 | 122 | :param classes: number of classes to classify (include_top must be true) 123 | 124 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 125 | 126 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 127 | 128 | Usage: 129 | 130 | >>> import keras_resnet.models 131 | 132 | >>> shape, classes = (224, 224, 3), 1000 133 | 134 | >>> x = keras.layers.Input(shape) 135 | 136 | >>> model = keras_resnet.models.ResNet18(x, classes=classes) 137 | 138 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 139 | """ 140 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 141 | if blocks is None: 142 | blocks = [2, 2, 2, 2] 143 | 144 | super(ResNet3D18, self).__init__( 145 | inputs, 146 | blocks, 147 | block=keras_resnet.blocks.basic_3d, 148 | include_top=include_top, 149 | classes=classes, 150 | freeze_bn=freeze_bn, 151 | *args, 152 | **kwargs 153 | ) 154 | 155 | 156 | class ResNet3D34(ResNet3D): 157 | """ 158 | Constructs a `keras.models.Model` according to the ResNet34 specifications. 159 | 160 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 161 | 162 | :param blocks: the network’s residual architecture 163 | 164 | :param include_top: if true, includes classification layers 165 | 166 | :param classes: number of classes to classify (include_top must be true) 167 | 168 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 169 | 170 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 171 | 172 | Usage: 173 | 174 | >>> import keras_resnet.models 175 | 176 | >>> shape, classes = (224, 224, 3), 1000 177 | 178 | >>> x = keras.layers.Input(shape) 179 | 180 | >>> model = keras_resnet.models.ResNet34(x, classes=classes) 181 | 182 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 183 | """ 184 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 185 | if blocks is None: 186 | blocks = [3, 4, 6, 3] 187 | 188 | super(ResNet3D34, self).__init__( 189 | inputs, 190 | blocks, 191 | block=keras_resnet.blocks.basic_3d, 192 | include_top=include_top, 193 | classes=classes, 194 | freeze_bn=freeze_bn, 195 | *args, 196 | **kwargs 197 | ) 198 | 199 | 200 | class ResNet3D50(ResNet3D): 201 | """ 202 | Constructs a `keras.models.Model` according to the ResNet50 specifications. 203 | 204 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 205 | 206 | :param blocks: the network’s residual architecture 207 | 208 | :param include_top: if true, includes classification layers 209 | 210 | :param classes: number of classes to classify (include_top must be true) 211 | 212 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 213 | 214 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 215 | 216 | Usage: 217 | 218 | >>> import keras_resnet.models 219 | 220 | >>> shape, classes = (224, 224, 3), 1000 221 | 222 | >>> x = keras.layers.Input(shape) 223 | 224 | >>> model = keras_resnet.models.ResNet50(x) 225 | 226 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 227 | """ 228 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 229 | if blocks is None: 230 | blocks = [3, 4, 6, 3] 231 | 232 | numerical_names = [False, False, False, False] 233 | 234 | super(ResNet3D50, self).__init__( 235 | inputs, 236 | blocks, 237 | numerical_names=numerical_names, 238 | block=keras_resnet.blocks.bottleneck_3d, 239 | include_top=include_top, 240 | classes=classes, 241 | freeze_bn=freeze_bn, 242 | *args, 243 | **kwargs 244 | ) 245 | 246 | 247 | class ResNet3D101(ResNet3D): 248 | """ 249 | Constructs a `keras.models.Model` according to the ResNet101 specifications. 250 | 251 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 252 | 253 | :param blocks: the network’s residual architecture 254 | 255 | :param include_top: if true, includes classification layers 256 | 257 | :param classes: number of classes to classify (include_top must be true) 258 | 259 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 260 | 261 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 262 | 263 | Usage: 264 | 265 | >>> import keras_resnet.models 266 | 267 | >>> shape, classes = (224, 224, 3), 1000 268 | 269 | >>> x = keras.layers.Input(shape) 270 | 271 | >>> model = keras_resnet.models.ResNet101(x, classes=classes) 272 | 273 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 274 | """ 275 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 276 | if blocks is None: 277 | blocks = [3, 4, 23, 3] 278 | 279 | numerical_names = [False, True, True, False] 280 | 281 | super(ResNet3D101, self).__init__( 282 | inputs, 283 | blocks, 284 | numerical_names=numerical_names, 285 | block=keras_resnet.blocks.bottleneck_3d, 286 | include_top=include_top, 287 | classes=classes, 288 | freeze_bn=freeze_bn, 289 | *args, 290 | **kwargs 291 | ) 292 | 293 | 294 | class ResNet3D152(ResNet3D): 295 | """ 296 | Constructs a `keras.models.Model` according to the ResNet152 specifications. 297 | 298 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 299 | 300 | :param blocks: the network’s residual architecture 301 | 302 | :param include_top: if true, includes classification layers 303 | 304 | :param classes: number of classes to classify (include_top must be true) 305 | 306 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 307 | 308 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 309 | 310 | Usage: 311 | 312 | >>> import keras_resnet.models 313 | 314 | >>> shape, classes = (224, 224, 3), 1000 315 | 316 | >>> x = keras.layers.Input(shape) 317 | 318 | >>> model = keras_resnet.models.ResNet152(x, classes=classes) 319 | 320 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 321 | """ 322 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 323 | if blocks is None: 324 | blocks = [3, 8, 36, 3] 325 | 326 | numerical_names = [False, True, True, False] 327 | 328 | super(ResNet3D152, self).__init__( 329 | inputs, 330 | blocks, 331 | numerical_names=numerical_names, 332 | block=keras_resnet.blocks.bottleneck_3d, 333 | include_top=include_top, 334 | classes=classes, 335 | freeze_bn=freeze_bn, 336 | *args, 337 | **kwargs 338 | ) 339 | 340 | 341 | class ResNet3D200(ResNet3D): 342 | """ 343 | Constructs a `keras.models.Model` according to the ResNet200 specifications. 344 | 345 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 346 | 347 | :param blocks: the network’s residual architecture 348 | 349 | :param include_top: if true, includes classification layers 350 | 351 | :param classes: number of classes to classify (include_top must be true) 352 | 353 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 354 | 355 | :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 356 | 357 | Usage: 358 | 359 | >>> import keras_resnet.models 360 | 361 | >>> shape, classes = (224, 224, 3), 1000 362 | 363 | >>> x = keras.layers.Input(shape) 364 | 365 | >>> model = keras_resnet.models.ResNet200(x, classes=classes) 366 | 367 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 368 | """ 369 | def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): 370 | if blocks is None: 371 | blocks = [3, 24, 36, 3] 372 | 373 | numerical_names = [False, True, True, False] 374 | 375 | super(ResNet3D200, self).__init__( 376 | inputs, 377 | blocks, 378 | numerical_names=numerical_names, 379 | block=keras_resnet.blocks.bottleneck_3d, 380 | include_top=include_top, 381 | classes=classes, 382 | freeze_bn=freeze_bn, 383 | *args, 384 | **kwargs 385 | ) 386 | -------------------------------------------------------------------------------- /keras_resnet/models/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.models 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular residual models. 8 | """ 9 | 10 | from ._1d import ( 11 | ResNet1D, 12 | ResNet1D18, 13 | ResNet1D34, 14 | ResNet1D50, 15 | ResNet1D101, 16 | ResNet1D152, 17 | ResNet1D200 18 | ) 19 | 20 | from ._2d import ( 21 | ResNet2D, 22 | ResNet2D18, 23 | ResNet2D34, 24 | ResNet2D50, 25 | ResNet2D101, 26 | ResNet2D152, 27 | ResNet2D200 28 | ) 29 | 30 | from ._3d import ( 31 | ResNet3D, 32 | ResNet3D18, 33 | ResNet3D34, 34 | ResNet3D50, 35 | ResNet3D101, 36 | ResNet3D152, 37 | ResNet3D200 38 | ) 39 | 40 | from ._feature_pyramid_2d import ( 41 | FPN2D, 42 | FPN2D18, 43 | FPN2D34, 44 | FPN2D50, 45 | FPN2D101, 46 | FPN2D152, 47 | FPN2D200 48 | ) 49 | 50 | from ._time_distributed_2d import ( 51 | TimeDistributedResNet, 52 | TimeDistributedResNet18, 53 | TimeDistributedResNet34, 54 | TimeDistributedResNet50, 55 | TimeDistributedResNet101, 56 | TimeDistributedResNet152, 57 | TimeDistributedResNet200 58 | ) 59 | 60 | # for backwards compatibility reasons 61 | ResNet = ResNet2D 62 | ResNet18 = ResNet2D18 63 | ResNet34 = ResNet2D34 64 | ResNet50 = ResNet2D50 65 | ResNet101 = ResNet2D101 66 | ResNet152 = ResNet2D152 67 | ResNet200 = ResNet2D200 68 | -------------------------------------------------------------------------------- /keras_resnet/models/_feature_pyramid_2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.models._feature_pyramid_2d 5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular two-dimensional feature pyramid networks (FPNs). 8 | """ 9 | 10 | import keras.backend 11 | import keras.layers 12 | import keras.models 13 | import keras.regularizers 14 | 15 | import keras_resnet.blocks 16 | import keras_resnet.layers 17 | 18 | 19 | class FPN2D(keras.Model): 20 | def __init__( 21 | self, 22 | inputs, 23 | blocks, 24 | block, 25 | freeze_bn=True, 26 | numerical_names=None, 27 | *args, 28 | **kwargs 29 | ): 30 | if keras.backend.image_data_format() == "channels_last": 31 | axis = 3 32 | else: 33 | axis = 1 34 | 35 | if numerical_names is None: 36 | numerical_names = [True] * len(blocks) 37 | 38 | x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) 39 | x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) 40 | x = keras.layers.Activation("relu", name="conv1_relu")(x) 41 | x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) 42 | 43 | features = 64 44 | 45 | outputs = [] 46 | 47 | for stage_id, iterations in enumerate(blocks): 48 | for block_id in range(iterations): 49 | x = block( 50 | features, 51 | stage_id, 52 | block_id, 53 | numerical_name=(block_id > 0 and numerical_names[stage_id]), 54 | freeze_bn=freeze_bn 55 | )(x) 56 | 57 | features *= 2 58 | 59 | outputs.append(x) 60 | 61 | c2, c3, c4, c5 = outputs 62 | 63 | pyramid_5 = keras.layers.Conv2D( 64 | filters=256, 65 | kernel_size=1, 66 | strides=1, 67 | padding="same", 68 | name="c5_reduced" 69 | )(c5) 70 | 71 | upsampled_p5 = keras.layers.UpSampling2D( 72 | interpolation="bilinear", 73 | name="p5_upsampled", 74 | size=(2, 2) 75 | )(pyramid_5) 76 | 77 | pyramid_4 = keras.layers.Conv2D( 78 | filters=256, 79 | kernel_size=1, 80 | strides=1, 81 | padding="same", 82 | name="c4_reduced" 83 | )(c4) 84 | 85 | pyramid_4 = keras.layers.Add( 86 | name="p4_merged" 87 | )([upsampled_p5, pyramid_4]) 88 | 89 | upsampled_p4 = keras.layers.UpSampling2D( 90 | interpolation="bilinear", 91 | name="p4_upsampled", 92 | size=(2, 2) 93 | )(pyramid_4) 94 | 95 | pyramid_4 = keras.layers.Conv2D( 96 | filters=256, 97 | kernel_size=3, 98 | strides=1, 99 | padding="same", 100 | name="p4" 101 | )(pyramid_4) 102 | 103 | pyramid_3 = keras.layers.Conv2D( 104 | filters=256, 105 | kernel_size=1, 106 | strides=1, 107 | padding="same", 108 | name="c3_reduced" 109 | )(c3) 110 | 111 | pyramid_3 = keras.layers.Add( 112 | name="p3_merged" 113 | )([upsampled_p4, pyramid_3]) 114 | 115 | upsampled_p3 = keras.layers.UpSampling2D( 116 | interpolation="bilinear", 117 | name="p3_upsampled", 118 | size=(2, 2) 119 | )(pyramid_3) 120 | 121 | pyramid_3 = keras.layers.Conv2D( 122 | filters=256, 123 | kernel_size=3, 124 | strides=1, 125 | padding="same", 126 | name="p3" 127 | )(pyramid_3) 128 | 129 | pyramid_2 = keras.layers.Conv2D( 130 | filters=256, 131 | kernel_size=1, 132 | strides=1, 133 | padding="same", 134 | name="c2_reduced" 135 | )(c2) 136 | 137 | pyramid_2 = keras.layers.Add( 138 | name="p2_merged" 139 | )([upsampled_p3, pyramid_2]) 140 | 141 | pyramid_2 = keras.layers.Conv2D( 142 | filters=256, 143 | kernel_size=3, 144 | strides=1, 145 | padding="same", 146 | name="p2" 147 | )(pyramid_2) 148 | 149 | pyramid_6 = keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) 150 | 151 | outputs = [ 152 | pyramid_2, 153 | pyramid_3, 154 | pyramid_4, 155 | pyramid_5, 156 | pyramid_6 157 | ] 158 | 159 | super(FPN2D, self).__init__( 160 | inputs=inputs, 161 | outputs=outputs, 162 | *args, 163 | **kwargs 164 | ) 165 | 166 | 167 | class FPN2D50(FPN2D): 168 | def __init__(self, inputs, blocks=None, *args, **kwargs): 169 | if blocks is None: 170 | blocks = [3, 4, 6, 3] 171 | 172 | numerical_names = [False, False, False, False] 173 | 174 | super(FPN2D50, self).__init__( 175 | inputs, 176 | blocks, 177 | numerical_names=numerical_names, 178 | block=keras_resnet.blocks.bottleneck_2d, 179 | *args, 180 | **kwargs 181 | ) 182 | 183 | 184 | class FPN2D18(FPN2D): 185 | def __init__(self, inputs, blocks=None, *args, **kwargs): 186 | if blocks is None: 187 | blocks = [2, 2, 2, 2] 188 | 189 | super(FPN2D18, self).__init__( 190 | inputs, 191 | blocks, 192 | block=keras_resnet.blocks.basic_2d, 193 | *args, 194 | **kwargs 195 | ) 196 | 197 | 198 | class FPN2D34(FPN2D): 199 | def __init__(self, inputs, blocks=None, *args, **kwargs): 200 | if blocks is None: 201 | blocks = [3, 4, 6, 3] 202 | 203 | super(FPN2D34, self).__init__( 204 | inputs, 205 | blocks, 206 | block=keras_resnet.blocks.basic_2d, 207 | *args, 208 | **kwargs 209 | ) 210 | 211 | 212 | class FPN2D101(FPN2D): 213 | def __init__(self, inputs, blocks=None, *args, **kwargs): 214 | if blocks is None: 215 | blocks = [3, 4, 23, 3] 216 | 217 | numerical_names = [False, True, True, False] 218 | 219 | super(FPN2D101, self).__init__( 220 | inputs, 221 | blocks, 222 | numerical_names=numerical_names, 223 | block=keras_resnet.blocks.bottleneck_2d, 224 | *args, 225 | **kwargs 226 | ) 227 | 228 | 229 | class FPN2D152(FPN2D): 230 | def __init__(self, inputs, blocks=None, *args, **kwargs): 231 | if blocks is None: 232 | blocks = [3, 8, 36, 3] 233 | 234 | numerical_names = [False, True, True, False] 235 | 236 | super(FPN2D152, self).__init__( 237 | inputs, 238 | blocks, 239 | numerical_names=numerical_names, 240 | block=keras_resnet.blocks.bottleneck_2d, 241 | *args, 242 | **kwargs 243 | ) 244 | 245 | 246 | class FPN2D200(FPN2D): 247 | def __init__(self, inputs, blocks=None, *args, **kwargs): 248 | if blocks is None: 249 | blocks = [3, 24, 36, 3] 250 | 251 | numerical_names = [False, True, True, False] 252 | 253 | super(FPN2D200, self).__init__( 254 | inputs, 255 | blocks, 256 | numerical_names=numerical_names, 257 | block=keras_resnet.blocks.bottleneck_2d, 258 | *args, 259 | **kwargs 260 | ) 261 | -------------------------------------------------------------------------------- /keras_resnet/models/_time_distributed_2d.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | keras_resnet.models._time_distributed_2d 5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module implements popular time distributed two-dimensional residual networks. 8 | """ 9 | 10 | import keras.backend 11 | import keras.layers 12 | import keras.models 13 | import keras.regularizers 14 | 15 | import keras_resnet.blocks 16 | import keras_resnet.layers 17 | 18 | 19 | def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, freeze_bn=True, *args, **kwargs): 20 | """ 21 | Constructs a time distributed `keras.models.Model` object using the given block count. 22 | 23 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 24 | 25 | :param blocks: the network’s residual architecture 26 | 27 | :param block: a time distributed residual block (e.g. an instance of `keras_resnet.blocks.time_distributed_basic_2d`) 28 | 29 | :param include_top: if true, includes classification layers 30 | 31 | :param classes: number of classes to classify (include_top must be true) 32 | 33 | :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) 34 | 35 | :return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 36 | 37 | Usage: 38 | 39 | >>> import keras_resnet.blocks 40 | >>> import keras_resnet.models 41 | 42 | >>> shape, classes = (224, 224, 3), 1000 43 | 44 | >>> x = keras.layers.Input(shape) 45 | 46 | >>> blocks = [2, 2, 2, 2] 47 | 48 | >>> blocks = keras_resnet.blocks.time_distributed_basic_2d 49 | 50 | >>> y = keras_resnet.models.TimeDistributedResNet(x, classes, blocks, blocks) 51 | 52 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 53 | 54 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 55 | 56 | >>> model = keras.models.Model(x, y) 57 | 58 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 59 | """ 60 | if keras.backend.image_data_format() == "channels_last": 61 | axis = 3 62 | else: 63 | axis = 1 64 | 65 | x = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) 66 | x = keras.layers.TimeDistributed(keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) 67 | x = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) 68 | x = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="conv1_relu")(x) 69 | x = keras.layers.TimeDistributed(keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) 70 | 71 | features = 64 72 | 73 | outputs = [] 74 | 75 | for stage_id, iterations in enumerate(blocks): 76 | for block_id in range(iterations): 77 | x = block(features, stage_id, block_id, numerical_name=(blocks[stage_id] > 6), freeze_bn=freeze_bn)(x) 78 | 79 | features *= 2 80 | outputs.append(x) 81 | 82 | if include_top: 83 | assert classes > 0 84 | 85 | x = keras.layers.TimeDistributed(keras.layers.GlobalAveragePooling2D(), name="pool5")(x) 86 | x = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) 87 | 88 | return keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) 89 | else: 90 | # Else output each stages features 91 | return keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) 92 | 93 | 94 | def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): 95 | """ 96 | Constructs a time distributed `keras.models.Model` according to the ResNet18 specifications. 97 | 98 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 99 | 100 | :param blocks: the network’s residual architecture 101 | 102 | :param include_top: if true, includes classification layers 103 | 104 | :param classes: number of classes to classify (include_top must be true) 105 | 106 | :return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 107 | 108 | Usage: 109 | 110 | >>> import keras_resnet.models 111 | 112 | >>> shape, classes = (224, 224, 3), 1000 113 | 114 | >>> x = keras.layers.Input(shape) 115 | 116 | >>> y = keras_resnet.models.TimeDistributedResNet18(x) 117 | 118 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 119 | 120 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 121 | 122 | >>> model = keras.models.Model(x, y) 123 | 124 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 125 | """ 126 | if blocks is None: 127 | blocks = [2, 2, 2, 2] 128 | 129 | return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_basic_2d, include_top=include_top, classes=classes, *args, **kwargs) 130 | 131 | 132 | def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): 133 | """ 134 | Constructs a time distributed `keras.models.Model` according to the ResNet34 specifications. 135 | 136 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 137 | 138 | :param blocks: the network’s residual architecture 139 | 140 | :param include_top: if true, includes classification layers 141 | 142 | :param classes: number of classes to classify (include_top must be true) 143 | 144 | :return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 145 | 146 | Usage: 147 | 148 | >>> import keras_resnet.models 149 | 150 | >>> shape, classes = (224, 224, 3), 1000 151 | 152 | >>> x = keras.layers.Input(shape) 153 | 154 | >>> y = keras_resnet.models.TimeDistributedResNet34(x) 155 | 156 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 157 | 158 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 159 | 160 | >>> model = keras.models.Model(x, y) 161 | 162 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 163 | """ 164 | if blocks is None: 165 | blocks = [3, 4, 6, 3] 166 | 167 | return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_basic_2d, include_top=include_top, classes=classes, *args, **kwargs) 168 | 169 | 170 | def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): 171 | """ 172 | Constructs a time distributed `keras.models.Model` according to the ResNet50 specifications. 173 | 174 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 175 | 176 | :param blocks: the network’s residual architecture 177 | 178 | :param include_top: if true, includes classification layers 179 | 180 | :param classes: number of classes to classify (include_top must be true) 181 | 182 | Usage: 183 | 184 | >>> import keras_resnet.models 185 | 186 | >>> shape, classes = (224, 224, 3), 1000 187 | 188 | >>> x = keras.layers.Input(shape) 189 | 190 | >>> y = keras_resnet.models.TimeDistributedResNet50(x) 191 | 192 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 193 | 194 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 195 | 196 | >>> model = keras.models.Model(x, y) 197 | 198 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 199 | """ 200 | if blocks is None: 201 | blocks = [3, 4, 6, 3] 202 | 203 | return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs) 204 | 205 | 206 | def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): 207 | """ 208 | Constructs a time distributed `keras.models.Model` according to the ResNet101 specifications. 209 | 210 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 211 | 212 | :param blocks: the network’s residual architecture 213 | 214 | :param include_top: if true, includes classification layers 215 | 216 | :param classes: number of classes to classify (include_top must be true) 217 | 218 | :return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 219 | 220 | Usage: 221 | 222 | >>> import keras_resnet.models 223 | 224 | >>> shape, classes = (224, 224, 3), 1000 225 | 226 | >>> x = keras.layers.Input(shape) 227 | 228 | >>> y = keras_resnet.models.TimeDistributedResNet101(x) 229 | 230 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 231 | 232 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 233 | 234 | >>> model = keras.models.Model(x, y) 235 | 236 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 237 | """ 238 | if blocks is None: 239 | blocks = [3, 4, 23, 3] 240 | 241 | return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs) 242 | 243 | 244 | def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): 245 | """ 246 | Constructs a time distributed `keras.models.Model` according to the ResNet152 specifications. 247 | 248 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 249 | 250 | :param blocks: the network’s residual architecture 251 | 252 | :param include_top: if true, includes classification layers 253 | 254 | :param classes: number of classes to classify (include_top must be true) 255 | 256 | :return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 257 | 258 | Usage: 259 | 260 | >>> import keras_resnet.models 261 | 262 | >>> shape, classes = (224, 224, 3), 1000 263 | 264 | >>> x = keras.layers.Input(shape) 265 | 266 | >>> y = keras_resnet.models.TimeDistributedResNet152(x) 267 | 268 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 269 | 270 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 271 | 272 | >>> model = keras.models.Model(x, y) 273 | 274 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 275 | """ 276 | if blocks is None: 277 | blocks = [3, 8, 36, 3] 278 | 279 | return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs) 280 | 281 | 282 | def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): 283 | """ 284 | Constructs a time distributed `keras.models.Model` according to the ResNet200 specifications. 285 | 286 | :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) 287 | 288 | :param blocks: the network’s residual architecture 289 | 290 | :param include_top: if true, includes classification layers 291 | 292 | :param classes: number of classes to classify (include_top must be true) 293 | 294 | :return model: Time distributed ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) 295 | 296 | Usage: 297 | 298 | >>> import keras_resnet.models 299 | 300 | >>> shape, classes = (224, 224, 3), 1000 301 | 302 | >>> x = keras.layers.Input(shape) 303 | 304 | >>> y = keras_resnet.models.TimeDistributedResNet200(x) 305 | 306 | >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) 307 | 308 | >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) 309 | 310 | >>> model = keras.models.Model(x, y) 311 | 312 | >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) 313 | """ 314 | if blocks is None: 315 | blocks = [3, 24, 36, 3] 316 | 317 | return TimeDistributedResNet(inputs, blocks, block=keras_resnet.blocks.time_distributed_bottleneck_2d, include_top=include_top, classes=classes, *args, **kwargs) 318 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # ignore: 2 | # E501 line too long (98 > 79 characters) 3 | [tool:pytest] 4 | pep8ignore = E501 5 | 6 | [bdist_wheel] 7 | universal=1 8 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | setuptools.setup( 4 | author="Allen Goodman", 5 | author_email="allen.goodman@icloud.com", 6 | extras_require={ 7 | "benchmark": [ 8 | "click", 9 | "sklearn" 10 | ], 11 | "test": [ 12 | "pytest" 13 | ] 14 | }, 15 | install_requires=[ 16 | "keras>=2.2.4" 17 | ], 18 | license="MIT", 19 | name="keras-resnet", 20 | package_data={ 21 | "keras-resnet": [ 22 | "data/checkpoints/*/*.hdf5", 23 | "data/logs/*/*.csv" 24 | ] 25 | }, 26 | packages=setuptools.find_packages( 27 | exclude=[ 28 | "tests" 29 | ] 30 | ), 31 | url="https://github.com/broadinstitute/keras-resnet", 32 | version="0.2.0" 33 | ) 34 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/broadinstitute/keras-resnet/898a1ee417e940ff12bf73ad44c2aae88501771f/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import keras.layers 2 | import pytest 3 | 4 | 5 | @pytest.fixture(scope="module") 6 | def x(): 7 | shape = (224, 224, 3) 8 | 9 | return keras.layers.Input(shape) 10 | -------------------------------------------------------------------------------- /tests/test_block.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/broadinstitute/keras-resnet/898a1ee417e940ff12bf73ad44c2aae88501771f/tests/test_block.py -------------------------------------------------------------------------------- /tests/test_models.py: -------------------------------------------------------------------------------- 1 | import keras_resnet.models 2 | 3 | 4 | class TestResNet18: 5 | def test_constructor(self, x): 6 | model = keras_resnet.models.ResNet2D18(x) 7 | 8 | assert len(model.layers) == 87 9 | 10 | 11 | class TestResNet34: 12 | def test_constructor(self, x): 13 | model = keras_resnet.models.ResNet2D34(x) 14 | 15 | assert len(model.layers) == 159 16 | 17 | 18 | class TestResNet50: 19 | def test_constructor(self, x): 20 | model = keras_resnet.models.ResNet2D50(x) 21 | 22 | assert len(model.layers) == 191 23 | 24 | 25 | class TestResNet101: 26 | def test_constructor(self, x): 27 | model = keras_resnet.models.ResNet2D101(x) 28 | 29 | assert len(model.layers) == 378 30 | 31 | 32 | class TestResNet152: 33 | def test_constructor(self, x): 34 | model = keras_resnet.models.ResNet2D152(x) 35 | 36 | assert len(model.layers) == 565 37 | 38 | 39 | class TestResNet200: 40 | def test_constructor(self, x): 41 | model = keras_resnet.models.ResNet2D200(x) 42 | 43 | assert len(model.layers) == 741 44 | -------------------------------------------------------------------------------- /tools/export-caffe-weights.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import caffe 4 | import argparse 5 | 6 | 7 | def parse_args(): 8 | parser = argparse.ArgumentParser(description="Export caffe weights to h5 format.") 9 | parser.add_argument("prototxt", help="Path to prototxt file.") 10 | parser.add_argument("caffemodel", help="Path to weights file.") 11 | parser.add_argument("output", help="Path to output weights.") 12 | 13 | return parser.parse_args() 14 | 15 | 16 | if __name__ == "__main__": 17 | args = parse_args() 18 | 19 | net = caffe.Net(args.prototxt, caffe.TEST, weights=args.caffemodel) 20 | net.save_hdf5(args.output) 21 | 22 | print("done.") 23 | -------------------------------------------------------------------------------- /tools/import-caffe-weights.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import keras_resnet.models 4 | import keras 5 | 6 | import h5py 7 | import argparse 8 | import numpy as np 9 | 10 | 11 | def convert_conv_weights(weights): 12 | return np.array(weights).transpose((2, 3, 1, 0)) 13 | 14 | 15 | def convert_dense_weights(weights, biases): 16 | return [np.array(weights).T, np.array(biases)] 17 | 18 | 19 | def create_model(resnet): 20 | valid = ["resnet50", "resnet101", "resnet152"] 21 | if resnet not in valid: 22 | raise ValueError("Invalid resnet argument (valid: {}) : '{}'".format(valid, resnet)) 23 | 24 | image = keras.layers.Input((None, None, 3)) 25 | if resnet == "resnet50": 26 | return keras_resnet.models.ResNet50(image) 27 | elif resnet == "resnet101": 28 | return keras_resnet.models.ResNet101(image) 29 | elif resnet == "resnet152": 30 | return keras_resnet.models.ResNet152(image) 31 | 32 | 33 | def parse_args(): 34 | parser = argparse.ArgumentParser(description="Import caffe weights from h5 format.") 35 | parser.add_argument("weights", help="Path to weights (.h5) file.") 36 | parser.add_argument("output", help="Path to output Keras model to.") 37 | parser.add_argument("resnet", help="ResNet type (one of 'resnet50', 'resnet101', 'resnet152').") 38 | 39 | return parser.parse_args() 40 | 41 | 42 | if __name__ == "__main__": 43 | args = parse_args() 44 | 45 | # first create the model 46 | model = create_model(args.resnet) 47 | 48 | # load the caffe weights 49 | weights = h5py.File(args.weights).get("data") 50 | 51 | # port each layer 52 | for index, l in enumerate(model.layers): 53 | if isinstance(l, keras.layers.Conv2D): 54 | l.set_weights([convert_conv_weights(weights.get(l.name).get("0"))]) 55 | elif isinstance(l, keras.layers.Dense): 56 | l.set_weights(convert_dense_weights(weights.get(l.name).get("0"), weights.get(l.name).get("1"))) 57 | elif isinstance(l, keras.layers.BatchNormalization): 58 | scale_name = l.name.replace("bn", "scale") 59 | bn_weights = weights.get(l.name) 60 | scale_weights = weights.get(scale_name) 61 | 62 | l.set_weights([ 63 | np.array(scale_weights.get("0")), # gamma 64 | np.array(scale_weights.get("1")), # beta 65 | np.array(bn_weights.get("0")), # mean 66 | np.array(bn_weights.get("1")), # variance 67 | ]) 68 | 69 | print("imported layer: {}/{}".format(index, len(model.layers)), end="\r") 70 | 71 | print("saving...") 72 | model.save(args.output) 73 | print("done.") 74 | --------------------------------------------------------------------------------