├── tests ├── __init__.py └── test_test.py ├── examples ├── __init__.py ├── demo_embedding.py ├── demo.py └── demo_graph.py ├── tb_chainer ├── src │ ├── __init__.py │ ├── resource_handle.proto │ ├── versions.proto │ ├── tensor_shape.proto │ ├── types.proto │ ├── graph.proto │ ├── event.proto │ ├── attr_value.proto │ ├── node_def.proto │ ├── tensor.proto │ ├── versions_pb2.py │ ├── graph_pb2.py │ ├── resource_handle_pb2.py │ ├── summary.proto │ ├── tensor_shape_pb2.py │ ├── node_def_pb2.py │ ├── types_pb2.py │ ├── tensor_pb2.py │ ├── event_pb2.py │ ├── attr_value_pb2.py │ └── summary_pb2.py ├── __init__.py ├── record_writer.py ├── embedding.py ├── ordered_set.py ├── utils.py ├── name_scope.py ├── crc32c.py ├── event_file_writer.py ├── graph.py ├── summary.py └── writer.py ├── compile.sh ├── screenshots ├── audio.png ├── graph.gif ├── image.png ├── scalar.png ├── text.png ├── histogram.png └── distribution.png ├── .gitignore ├── pyproject.toml ├── setup.py ├── .travis.yml └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tb_chainer/src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /compile.sh: -------------------------------------------------------------------------------- 1 | protoc tb_chainer/src/*.proto --python_out=. 2 | -------------------------------------------------------------------------------- /screenshots/audio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/audio.png -------------------------------------------------------------------------------- /screenshots/graph.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/graph.gif -------------------------------------------------------------------------------- /screenshots/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/image.png -------------------------------------------------------------------------------- /screenshots/scalar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/scalar.png -------------------------------------------------------------------------------- /screenshots/text.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/text.png -------------------------------------------------------------------------------- /screenshots/histogram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/histogram.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | proto_src/ 2 | __pycache__ 3 | *.pyc 4 | runs/ 5 | *~ 6 | build/* 7 | dist/* 8 | tensorboard_chainer.egg-info/* -------------------------------------------------------------------------------- /screenshots/distribution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/HEAD/screenshots/distribution.png -------------------------------------------------------------------------------- /tb_chainer/__init__.py: -------------------------------------------------------------------------------- 1 | """A module for visualization with tensorboard 2 | """ 3 | 4 | from .writer import FileWriter, SummaryWriter 5 | from .record_writer import RecordWriter 6 | from .name_scope import name_scope, within_name_scope, register_functions 7 | from .graph import NodeName 8 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "tensorboard-chainer" 3 | packages = [ 4 | {include = "tb_chainer"} 5 | ] 6 | version = "0.5.3" 7 | description = "" 8 | authors = ["nekanat "] 9 | license = "MIT" 10 | 11 | [tool.poetry.dependencies] 12 | python = "^2.7 || ^3.6 || ^3.7" 13 | chainer = "*" 14 | tensorboard = "*" 15 | setuptools = "*" 16 | twine = "*" 17 | 18 | [tool.poetry.dev-dependencies] 19 | 20 | [build-system] 21 | requires = ["poetry>=0.12"] 22 | build-backend = "poetry.masonry.api" 23 | -------------------------------------------------------------------------------- /examples/demo_embedding.py: -------------------------------------------------------------------------------- 1 | from tb_chainer.embedding import add_embedding 2 | import keyword 3 | import numpy as np 4 | meta = [] 5 | while len(meta)<100: 6 | meta = meta+keyword.kwlist 7 | meta = meta[:100] 8 | 9 | for i, v in enumerate(meta): 10 | meta[i] = v+str(i) 11 | 12 | label_img = np.random.rand(100, 3, 10, 32) 13 | for i in range(100): 14 | label_img[i]*=i/100.0 15 | 16 | add_embedding(np.random.randn(100, 5), save_path='embedding1', metadata=meta, label_img=label_img) 17 | add_embedding(np.random.randn(100, 5), save_path='embedding2', label_img=label_img) 18 | add_embedding(np.random.randn(100, 5), save_path='embedding3', metadata=meta) 19 | 20 | #tensorboard --logdir embedding1 21 | -------------------------------------------------------------------------------- /tests/test_test.py: -------------------------------------------------------------------------------- 1 | def test_demo(): 2 | from examples import demo 3 | 4 | def test_demo_graph(): 5 | from examples import demo_graph 6 | 7 | # def test_demo_embedding(): 8 | # from examples import demo_embedding 9 | 10 | def test_name_scope(): 11 | import chainer 12 | import numpy as np 13 | from tb_chainer import name_scope 14 | with name_scope("test"): 15 | x = chainer.Variable(np.zeros((10, 10))) 16 | y = chainer.functions.activation.leaky_relu.leaky_relu(x) 17 | z = chainer.functions.activation.leaky_relu.leaky_relu(y) 18 | 19 | assert y.creator.name_scope == "test" 20 | assert z.creator.name_scope == '' 21 | assert y.node.name_scope == "test" 22 | assert x.node.name_scope == "test" 23 | assert not hasattr(z, "name_scope") 24 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from setuptools import setup, find_packages 5 | 6 | requirements = [ 7 | 'protobuf', 8 | 'six', 9 | 'moviepy' 10 | ] 11 | 12 | test_requirements = [ 13 | 'pytest', 14 | 'moviepy' 15 | ] 16 | 17 | setup( 18 | name='tensorboard-chainer', 19 | version='0.5.3', 20 | description='Log TensorBoard events with chainer', 21 | long_description=open('README.md').read(), 22 | long_description_content_type='text/markdown', 23 | author='nake nat', 24 | author_email='nakanat.stock@gmail.com', 25 | url='https://github.com/neka-nat/tensorboard-chainer', 26 | packages=find_packages(exclude=['docs', 'tests']), 27 | include_package_data=True, 28 | install_requires=requirements, 29 | license='MIT license', 30 | zip_safe=False, 31 | test_suite='tests', 32 | tests_require=test_requirements 33 | ) 34 | 35 | # python setup.py bdist_wheel --universal upload 36 | -------------------------------------------------------------------------------- /tb_chainer/src/resource_handle.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "ResourceHandle"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | // Protocol buffer representing a handle to a tensorflow resource. Handles are 10 | // not valid across executions, but can be serialized back and forth from within 11 | // a single run. 12 | message ResourceHandleProto { 13 | // Unique name for the device containing the resource. 14 | string device = 1; 15 | 16 | // Container in which this resource is placed. 17 | string container = 2; 18 | 19 | // Unique name of this resource. 20 | string name = 3; 21 | 22 | // Hash code for the type of the resource. Is only valid in the same device 23 | // and in the same execution. 24 | uint64 hash_code = 4; 25 | 26 | // For debug-only, the name of the type pointed to by this handle, if 27 | // available. 28 | string maybe_type_name = 5; 29 | }; 30 | -------------------------------------------------------------------------------- /tb_chainer/src/versions.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "VersionsProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | // Version information for a piece of serialized data 10 | // 11 | // There are different types of versions for each type of data 12 | // (GraphDef, etc.), but they all have the same common shape 13 | // described here. 14 | // 15 | // Each consumer has "consumer" and "min_producer" versions (specified 16 | // elsewhere). A consumer is allowed to consume this data if 17 | // 18 | // producer >= min_producer 19 | // consumer >= min_consumer 20 | // consumer not in bad_consumers 21 | // 22 | message VersionDef { 23 | // The version of the code that produced this data. 24 | int32 producer = 1; 25 | 26 | // Any consumer below this version is not allowed to consume this data. 27 | int32 min_consumer = 2; 28 | 29 | // Specific consumer versions which are disallowed (e.g. due to bugs). 30 | repeated int32 bad_consumers = 3; 31 | }; 32 | -------------------------------------------------------------------------------- /examples/demo.py: -------------------------------------------------------------------------------- 1 | import math 2 | import chainer 3 | import numpy as np 4 | from datetime import datetime 5 | from tb_chainer import utils, SummaryWriter 6 | 7 | vgg = chainer.links.VGG16Layers() 8 | writer = SummaryWriter('runs/'+datetime.now().strftime('%B%d %H:%M:%S')) 9 | sample_rate = 44100 10 | freqs = [262, 294, 330, 349, 392, 440, 440, 440, 440, 440, 440] 11 | for n_iter in range(100): 12 | M_global = np.random.rand(1) # value to keep 13 | writer.add_scalar('M_global', M_global[0], n_iter) 14 | x = np.random.rand(32, 3, 64, 64) # output from network 15 | if n_iter % 10 == 0: 16 | x = utils.make_grid(x) 17 | writer.add_image('Image', x, n_iter) 18 | x = np.zeros(sample_rate*2) 19 | for i in range(x.shape[0]): 20 | x[i] = np.cos(freqs[n_iter//10] * np.pi * float(i) / float(sample_rate)) # sound amplitude should in [-1, 1] 21 | writer.add_audio('Audio', x, n_iter) 22 | for name, param in vgg.namedparams(): 23 | writer.add_histogram(name, chainer.cuda.to_cpu(param.data), n_iter) 24 | writer.add_text('Text', 'text logged at step:'+str(n_iter), n_iter) 25 | writer.add_text('another Text', 'another text logged at step:'+str(n_iter), n_iter) 26 | 27 | video = np.random.rand(16, 3, 16, 64, 64) # (batchsize, channel, time, height, width) 28 | writer.add_video('Video', vid_tensor=video) 29 | 30 | writer.close() 31 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | # We don't actually use the Travis Python, but this keeps it organized. 4 | - "2.7" 5 | - "3.6" 6 | 7 | install: 8 | - sudo add-apt-repository -y ppa:mc3man/trusty-media 9 | - sudo apt-get update 10 | - sudo apt-get install -y ffmpeg 11 | # We do this conditionally because it saves us some downloading if the 12 | # version is the same. 13 | - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then 14 | wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; 15 | else 16 | wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; 17 | fi 18 | - bash miniconda.sh -b -p $HOME/miniconda 19 | - export PATH="$HOME/miniconda/bin:$PATH" 20 | - hash -r 21 | - conda config --set always_yes yes --set changeps1 no 22 | - conda update -q conda 23 | # Useful for debugging any issues with conda 24 | - conda info -a 25 | 26 | # Replace dep1 dep2 ... with your dependencies 27 | - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION -c soumith 28 | - source activate test-environment 29 | - which python 30 | - conda list 31 | - pip install chainer 32 | - pip install pillow tensorflow 33 | - pip install codecov pytest-cov 34 | - pip install --upgrade pytest 35 | - python setup.py install 36 | 37 | script: 38 | - travis_wait 30 pytest --cov=./ 39 | after_success: 40 | - codecov -------------------------------------------------------------------------------- /examples/demo_graph.py: -------------------------------------------------------------------------------- 1 | import chainer 2 | import chainer.functions as F 3 | import chainer.links as L 4 | import numpy as np 5 | from datetime import datetime 6 | from tb_chainer import SummaryWriter, name_scope, within_name_scope 7 | 8 | np.random.seed(123) 9 | 10 | class MLP(chainer.Chain): 11 | def __init__(self, n_units, n_out): 12 | super(MLP, self).__init__() 13 | with self.init_scope(): 14 | self.l1 = L.Linear(None, n_units) # n_in -> n_units 15 | self.l2 = L.Linear(None, n_units) # n_units -> n_units 16 | self.l3 = L.Linear(None, n_out) # n_units -> n_out 17 | 18 | @within_name_scope('MLP') 19 | def __call__(self, x): 20 | with name_scope('linear1', self.l1.params()): 21 | h1 = F.relu(self.l1(x)) 22 | with name_scope('linear2', self.l2.params()): 23 | h2 = F.relu(self.l2(h1)) 24 | with name_scope('linear3', self.l3.params()): 25 | o = self.l3(h2) 26 | return o 27 | 28 | model = L.Classifier(MLP(1000, 10)) 29 | 30 | res = model(chainer.Variable(np.random.rand(1, 784).astype(np.float32)), 31 | chainer.Variable(np.random.rand(1).astype(np.int32))) 32 | 33 | writer = SummaryWriter('runs/'+datetime.now().strftime('%B%d %H:%M:%S')) 34 | writer.add_graph([res]) 35 | writer.add_all_variable_images([res], pattern='.*MLP.*') 36 | writer.add_all_parameter_histograms([res], pattern='.*MLP.*') 37 | 38 | writer.close() 39 | -------------------------------------------------------------------------------- /tb_chainer/record_writer.py: -------------------------------------------------------------------------------- 1 | """ 2 | To write tf_record into file. Here we use it for tensorboard's event writting. 3 | The code was borrow from https://github.com/TeamHG-Memex/tensorboard_logger 4 | """ 5 | 6 | import re 7 | import struct 8 | 9 | from .crc32c import crc32c 10 | 11 | _VALID_OP_NAME_START = re.compile('^[A-Za-z0-9.]') 12 | _VALID_OP_NAME_PART = re.compile('[A-Za-z0-9_.\\-/]+') 13 | 14 | 15 | class RecordWriter(object): 16 | def __init__(self, path, flush_secs=2): 17 | self._name_to_tf_name = {} 18 | self._tf_names = set() 19 | self.path = path 20 | self.flush_secs = flush_secs # TODO. flush every flush_secs, not every time. 21 | self._writer = None 22 | self._writer = open(path, 'wb') 23 | 24 | def write(self, event_str): 25 | w = self._writer.write 26 | header = struct.pack('Q', len(event_str)) 27 | w(header) 28 | w(struct.pack('I', masked_crc32c(header))) 29 | w(event_str) 30 | w(struct.pack('I', masked_crc32c(event_str))) 31 | self._writer.flush() 32 | 33 | def __del__(self): 34 | if self._writer is not None: 35 | self._writer.close() 36 | 37 | 38 | def masked_crc32c(data): 39 | x = u32(crc32c(data)) 40 | return u32(((x >> 15) | u32(x << 17)) + 0xa282ead8) 41 | 42 | 43 | def u32(x): 44 | return x & 0xffffffff 45 | 46 | 47 | def make_valid_tf_name(name): 48 | if not _VALID_OP_NAME_START.match(name): 49 | # Must make it valid somehow, but don't want to remove stuff 50 | name = '.' + name 51 | return '_'.join(_VALID_OP_NAME_PART.findall(name)) 52 | 53 | -------------------------------------------------------------------------------- /tb_chainer/src/tensor_shape.proto: -------------------------------------------------------------------------------- 1 | // Protocol buffer representing the shape of tensors. 2 | 3 | syntax = "proto3"; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "TensorShapeProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | package tensorboard; 10 | 11 | // Dimensions of a tensor. 12 | message TensorShapeProto { 13 | // One dimension of the tensor. 14 | message Dim { 15 | // Size of the tensor in that dimension. 16 | // This value must be >= -1, but values of -1 are reserved for "unknown" 17 | // shapes (values of -1 mean "unknown" dimension). Certain wrappers 18 | // that work with TensorShapeProto may fail at runtime when deserializing 19 | // a TensorShapeProto containing a dim value of -1. 20 | int64 size = 1; 21 | 22 | // Optional name of the tensor dimension. 23 | string name = 2; 24 | }; 25 | 26 | // Dimensions of the tensor, such as {"input", 30}, {"output", 40} 27 | // for a 30 x 40 2D tensor. If an entry has size -1, this 28 | // corresponds to a dimension of unknown size. The names are 29 | // optional. 30 | // 31 | // The order of entries in "dim" matters: It indicates the layout of the 32 | // values in the tensor in-memory representation. 33 | // 34 | // The first entry in "dim" is the outermost dimension used to layout the 35 | // values, the last entry is the innermost dimension. This matches the 36 | // in-memory layout of RowMajor Eigen tensors. 37 | // 38 | // If "dim.size()" > 0, "unknown_rank" must be false. 39 | repeated Dim dim = 2; 40 | 41 | // If true, the number of dimensions in the shape is unknown. 42 | // 43 | // If true, "dim.size()" must be 0. 44 | bool unknown_rank = 3; 45 | }; 46 | -------------------------------------------------------------------------------- /tb_chainer/src/types.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "TypesProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | // LINT.IfChange 10 | enum DataType { 11 | // Not a legal value for DataType. Used to indicate a DataType field 12 | // has not been set. 13 | DT_INVALID = 0; 14 | 15 | // Data types that all computation devices are expected to be 16 | // capable to support. 17 | DT_FLOAT = 1; 18 | DT_DOUBLE = 2; 19 | DT_INT32 = 3; 20 | DT_UINT8 = 4; 21 | DT_INT16 = 5; 22 | DT_INT8 = 6; 23 | DT_STRING = 7; 24 | DT_COMPLEX64 = 8; // Single-precision complex 25 | DT_INT64 = 9; 26 | DT_BOOL = 10; 27 | DT_QINT8 = 11; // Quantized int8 28 | DT_QUINT8 = 12; // Quantized uint8 29 | DT_QINT32 = 13; // Quantized int32 30 | DT_BFLOAT16 = 14; // Float32 truncated to 16 bits. Only for cast ops. 31 | DT_QINT16 = 15; // Quantized int16 32 | DT_QUINT16 = 16; // Quantized uint16 33 | DT_UINT16 = 17; 34 | DT_COMPLEX128 = 18; // Double-precision complex 35 | DT_HALF = 19; 36 | DT_RESOURCE = 20; 37 | 38 | // TODO(josh11b): DT_GENERIC_PROTO = ??; 39 | // TODO(jeff,josh11b): DT_UINT64? DT_UINT32? 40 | 41 | // Do not use! These are only for parameters. Every enum above 42 | // should have a corresponding value below (verified by types_test). 43 | DT_FLOAT_REF = 101; 44 | DT_DOUBLE_REF = 102; 45 | DT_INT32_REF = 103; 46 | DT_UINT8_REF = 104; 47 | DT_INT16_REF = 105; 48 | DT_INT8_REF = 106; 49 | DT_STRING_REF = 107; 50 | DT_COMPLEX64_REF = 108; 51 | DT_INT64_REF = 109; 52 | DT_BOOL_REF = 110; 53 | DT_QINT8_REF = 111; 54 | DT_QUINT8_REF = 112; 55 | DT_QINT32_REF = 113; 56 | DT_BFLOAT16_REF = 114; 57 | DT_QINT16_REF = 115; 58 | DT_QUINT16_REF = 116; 59 | DT_UINT16_REF = 117; 60 | DT_COMPLEX128_REF = 118; 61 | DT_HALF_REF = 119; 62 | DT_RESOURCE_REF = 120; 63 | } 64 | // LINT.ThenChange(https://www.tensorflow.org/code/tensorflow/c/c_api.h,https://www.tensorflow.org/code/tensorflow/go/tensor.go) 65 | -------------------------------------------------------------------------------- /tb_chainer/src/graph.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "GraphProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | import "tb_chainer/src/node_def.proto"; 10 | //import "tensorflow/core/framework/function.proto"; 11 | import "tb_chainer/src/versions.proto"; 12 | 13 | // Represents the graph of operations 14 | message GraphDef { 15 | repeated NodeDef node = 1; 16 | 17 | // Compatibility versions of the graph. See core/public/version.h for version 18 | // history. The GraphDef version is distinct from the TensorFlow version, and 19 | // each release of TensorFlow will support a range of GraphDef versions. 20 | VersionDef versions = 4; 21 | 22 | // Deprecated single version field; use versions above instead. Since all 23 | // GraphDef changes before "versions" was introduced were forward 24 | // compatible, this field is entirely ignored. 25 | int32 version = 3 [deprecated = true]; 26 | 27 | // EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET. 28 | // 29 | // "library" provides user-defined functions. 30 | // 31 | // Naming: 32 | // * library.function.name are in a flat namespace. 33 | // NOTE: We may need to change it to be hierarchical to support 34 | // different orgs. E.g., 35 | // { "/google/nn", { ... }}, 36 | // { "/google/vision", { ... }} 37 | // { "/org_foo/module_bar", { ... }} 38 | // map named_lib; 39 | // * If node[i].op is the name of one function in "library", 40 | // node[i] is deemed as a function call. Otherwise, node[i].op 41 | // must be a primitive operation supported by the runtime. 42 | // 43 | // 44 | // Function call semantics: 45 | // 46 | // * The callee may start execution as soon as some of its inputs 47 | // are ready. The caller may want to use Tuple() mechanism to 48 | // ensure all inputs are ready in the same time. 49 | // 50 | // * The consumer of return values may start executing as soon as 51 | // the return values the consumer depends on are ready. The 52 | // consumer may want to use Tuple() mechanism to ensure the 53 | // consumer does not start until all return values of the callee 54 | // function are ready. 55 | //FunctionDefLibrary library = 2; 56 | }; 57 | -------------------------------------------------------------------------------- /tb_chainer/src/event.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "EventProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.util"; 8 | 9 | import "tb_chainer/src/summary.proto"; 10 | 11 | // Protocol buffer representing an event that happened during 12 | // the execution of a Brain model. 13 | message Event { 14 | // Timestamp of the event. 15 | double wall_time = 1; 16 | 17 | // Global step of the event. 18 | int64 step = 2; 19 | 20 | oneof what { 21 | // An event file was started, with the specified version. 22 | // This is use to identify the contents of the record IO files 23 | // easily. Current version is "brain.Event:2". All versions 24 | // start with "brain.Event:". 25 | string file_version = 3; 26 | // An encoded version of a GraphDef. 27 | bytes graph_def = 4; 28 | // A summary was generated. 29 | Summary summary = 5; 30 | // The user output a log message. Not all messages are logged, only ones 31 | // generated via the Python tensorboard_logging module. 32 | LogMessage log_message = 6; 33 | // The state of the session which can be used for restarting after crashes. 34 | SessionLog session_log = 7; 35 | // The metadata returned by running a session.run() call. 36 | TaggedRunMetadata tagged_run_metadata = 8; 37 | // An encoded version of a MetaGraphDef. 38 | bytes meta_graph_def = 9; 39 | } 40 | } 41 | 42 | // Protocol buffer used for logging messages to the events file. 43 | message LogMessage { 44 | enum Level { 45 | UNKNOWN = 0; 46 | DEBUG = 10; 47 | INFO = 20; 48 | WARN = 30; 49 | ERROR = 40; 50 | FATAL = 50; 51 | } 52 | Level level = 1; 53 | string message = 2; 54 | } 55 | 56 | // Protocol buffer used for logging session state. 57 | message SessionLog { 58 | enum SessionStatus { 59 | STATUS_UNSPECIFIED = 0; 60 | START = 1; 61 | STOP = 2; 62 | CHECKPOINT = 3; 63 | } 64 | 65 | SessionStatus status = 1; 66 | // This checkpoint_path contains both the path and filename. 67 | string checkpoint_path = 2; 68 | string msg = 3; 69 | } 70 | 71 | // For logging the metadata output for a single session.run() call. 72 | message TaggedRunMetadata { 73 | // Tag name associated with this metadata. 74 | string tag = 1; 75 | // Byte-encoded version of the `RunMetadata` proto in order to allow lazy 76 | // deserialization. 77 | bytes run_metadata = 2; 78 | } 79 | -------------------------------------------------------------------------------- /tb_chainer/src/attr_value.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "AttrValueProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | import "tb_chainer/src/tensor.proto"; 10 | import "tb_chainer/src/tensor_shape.proto"; 11 | import "tb_chainer/src/types.proto"; 12 | 13 | // Protocol buffer representing the value for an attr used to configure an Op. 14 | // Comment indicates the corresponding attr type. Only the field matching the 15 | // attr type may be filled. 16 | message AttrValue { 17 | // LINT.IfChange 18 | message ListValue { 19 | repeated bytes s = 2; // "list(string)" 20 | repeated int64 i = 3 [packed = true]; // "list(int)" 21 | repeated float f = 4 [packed = true]; // "list(float)" 22 | repeated bool b = 5 [packed = true]; // "list(bool)" 23 | repeated DataType type = 6 [packed = true]; // "list(type)" 24 | repeated TensorShapeProto shape = 7; // "list(shape)" 25 | repeated TensorProto tensor = 8; // "list(tensor)" 26 | repeated NameAttrList func = 9; // "list(attr)" 27 | } 28 | // LINT.ThenChange(https://www.tensorflow.org/code/tensorflow/c/c_api.cc) 29 | 30 | oneof value { 31 | bytes s = 2; // "string" 32 | int64 i = 3; // "int" 33 | float f = 4; // "float" 34 | bool b = 5; // "bool" 35 | DataType type = 6; // "type" 36 | TensorShapeProto shape = 7; // "shape" 37 | TensorProto tensor = 8; // "tensor" 38 | ListValue list = 1; // any "list(...)" 39 | 40 | // "func" represents a function. func.name is a function's name or 41 | // a primitive op's name. func.attr.first is the name of an attr 42 | // defined for that function. func.attr.second is the value for 43 | // that attr in the instantiation. 44 | NameAttrList func = 10; 45 | 46 | // This is a placeholder only used in nodes defined inside a 47 | // function. It indicates the attr value will be supplied when 48 | // the function is instantiated. For example, let us suppose a 49 | // node "N" in function "FN". "N" has an attr "A" with value 50 | // placeholder = "foo". When FN is instantiated with attr "foo" 51 | // set to "bar", the instantiated node N's attr A will have been 52 | // given the value "bar". 53 | string placeholder = 9; 54 | } 55 | } 56 | 57 | // A list of attr names and their values. The whole list is attached 58 | // with a string name. E.g., MatMul[T=float]. 59 | message NameAttrList { 60 | string name = 1; 61 | map attr = 2; 62 | } 63 | -------------------------------------------------------------------------------- /tb_chainer/src/node_def.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "NodeProto"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | import "tb_chainer/src/attr_value.proto"; 10 | 11 | message NodeDef { 12 | // The name given to this operator. Used for naming inputs, 13 | // logging, visualization, etc. Unique within a single GraphDef. 14 | // Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*". 15 | string name = 1; 16 | 17 | // The operation name. There may be custom parameters in attrs. 18 | // Op names starting with an underscore are reserved for internal use. 19 | string op = 2; 20 | 21 | // Each input is "node:src_output" with "node" being a string name and 22 | // "src_output" indicating which output tensor to use from "node". If 23 | // "src_output" is 0 the ":0" suffix can be omitted. Regular inputs 24 | // may optionally be followed by control inputs that have the format 25 | // "^node". 26 | repeated string input = 3; 27 | 28 | // A (possibly partial) specification for the device on which this 29 | // node should be placed. 30 | // The expected syntax for this string is as follows: 31 | // 32 | // DEVICE_SPEC ::= PARTIAL_SPEC 33 | // 34 | // PARTIAL_SPEC ::= ("/" CONSTRAINT) * 35 | // CONSTRAINT ::= ("job:" JOB_NAME) 36 | // | ("replica:" [1-9][0-9]*) 37 | // | ("task:" [1-9][0-9]*) 38 | // | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") ) 39 | // 40 | // Valid values for this string include: 41 | // * "/job:worker/replica:0/task:1/gpu:3" (full specification) 42 | // * "/job:worker/gpu:3" (partial specification) 43 | // * "" (no specification) 44 | // 45 | // If the constraints do not resolve to a single device (or if this 46 | // field is empty or not present), the runtime will attempt to 47 | // choose a device automatically. 48 | string device = 4; 49 | 50 | // Operation-specific graph-construction-time configuration. 51 | // Note that this should include all attrs defined in the 52 | // corresponding OpDef, including those with a value matching 53 | // the default -- this allows the default to change and makes 54 | // NodeDefs easier to interpret on their own. However, if 55 | // an attr with a default is not specified in this list, the 56 | // default will be used. 57 | // The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and 58 | // one of the names from the corresponding OpDef's attr field). 59 | // The values must have a type matching the corresponding OpDef 60 | // attr's type field. 61 | // TODO(josh11b): Add some examples here showing best practices. 62 | map attr = 5; 63 | }; 64 | -------------------------------------------------------------------------------- /tb_chainer/embedding.py: -------------------------------------------------------------------------------- 1 | import utils 2 | import numpy as np 3 | import os 4 | 5 | def make_tsv(metadata, save_path): 6 | metadata = [str(x) for x in metadata] 7 | with open(os.path.join(save_path, 'metadata.tsv'), 'w') as f: 8 | for x in metadata: 9 | f.write(x+'\n') 10 | 11 | 12 | # https://github.com/tensorflow/tensorboard/issues/44 image label will be squared 13 | def make_sprite(label_img, save_path): 14 | import math 15 | nrow = int(math.floor(math.sqrt(label_img.shape[0]))) 16 | xx = utils.make_grid(np.zeros((1,3,32,32)), padding=0) 17 | if xx.shape[2]==33: # https://github.com/pytorch/vision/issues/206 18 | sprite = utils.make_grid(label_img, nrow=nrow, padding=0) 19 | sprite = sprite[:,1:,1:] 20 | utils.save_image(sprite, os.path.join(save_path, 'sprite.png')) 21 | else: 22 | utils.save_image(label_img, os.path.join(save_path, 'sprite.png'), nrow=nrow, padding=0) 23 | 24 | def make_pbtxt(save_path, metadata, label_img): 25 | with open(os.path.join(save_path, 'projector_config.pbtxt'), 'w') as f: 26 | f.write('embeddings {\n') 27 | f.write('tensor_name: "embedding:0"\n') 28 | if metadata is not None: 29 | f.write('metadata_path: "metadata.tsv"\n') 30 | if label_img is not None: 31 | f.write('sprite {\n') 32 | f.write('image_path: "sprite.png"\n') 33 | f.write('single_image_dim: {}\n'.format(label_img.shape[3])) 34 | f.write('single_image_dim: {}\n'.format(label_img.shape[2])) 35 | f.write('}\n') 36 | f.write('}\n') 37 | 38 | 39 | ''' 40 | mat: torch tensor. mat.size(0) is the number of data. mat.size(1) is the cardinality of feature dimensions 41 | save_path: self-explained. 42 | metadata: a list of {int, string} of length equals mat.size(0) 43 | label_img: 4D torch tensor. label_img.size(0) equals mat.size(0). 44 | 45 | ''' 46 | 47 | def add_embedding(mat, save_path, metadata=None, label_img=None): 48 | try: 49 | os.makedirs(save_path) 50 | except OSError: 51 | print('warning: dir exists') 52 | if metadata is not None: 53 | assert mat.shape[0]==len(metadata), '#labels should equal with #data points' 54 | make_tsv(metadata, save_path) 55 | if label_img is not None: 56 | assert mat.shape[0]==label_img.shape[0], '#images should equal with #data points' 57 | make_sprite(label_img, save_path) 58 | import tensorflow as tf 59 | tf.reset_default_graph() 60 | with tf.device('/cpu:0'): 61 | emb = tf.Variable(mat.tolist(), name="embedding") 62 | config = tf.ConfigProto() 63 | config.gpu_options.allow_growth = True 64 | with tf.Session(config=config) as sess: 65 | sess.run(emb.initializer) 66 | saver = tf.train.Saver() 67 | saver.save(sess, save_path=os.path.join(save_path, 'model.ckpt'), global_step=None, write_meta_graph=False) 68 | make_pbtxt(save_path, metadata, label_img) 69 | 70 | -------------------------------------------------------------------------------- /tb_chainer/src/tensor.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "TensorProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | import "tb_chainer/src/resource_handle.proto"; 10 | import "tb_chainer/src/tensor_shape.proto"; 11 | import "tb_chainer/src/types.proto"; 12 | 13 | // Protocol buffer representing a tensor. 14 | message TensorProto { 15 | DataType dtype = 1; 16 | 17 | // Shape of the tensor. TODO(touts): sort out the 0-rank issues. 18 | TensorShapeProto tensor_shape = 2; 19 | 20 | // Only one of the representations below is set, one of "tensor_contents" and 21 | // the "xxx_val" attributes. We are not using oneof because as oneofs cannot 22 | // contain repeated fields it would require another extra set of messages. 23 | 24 | // Version number. 25 | // 26 | // In version 0, if the "repeated xxx" representations contain only one 27 | // element, that element is repeated to fill the shape. This makes it easy 28 | // to represent a constant Tensor with a single value. 29 | int32 version_number = 3; 30 | 31 | // Serialized raw tensor content from either Tensor::AsProtoTensorContent or 32 | // memcpy in tensorflow::grpc::EncodeTensorToByteBuffer. This representation 33 | // can be used for all tensor types. The purpose of this representation is to 34 | // reduce serialization overhead during RPC call by avoiding serialization of 35 | // many repeated small items. 36 | bytes tensor_content = 4; 37 | 38 | // Type specific representations that make it easy to create tensor protos in 39 | // all languages. Only the representation corresponding to "dtype" can 40 | // be set. The values hold the flattened representation of the tensor in 41 | // row major order. 42 | 43 | // DT_HALF. Note that since protobuf has no int16 type, we'll have some 44 | // pointless zero padding for each value here. 45 | repeated int32 half_val = 13 [packed = true]; 46 | 47 | // DT_FLOAT. 48 | repeated float float_val = 5 [packed = true]; 49 | 50 | // DT_DOUBLE. 51 | repeated double double_val = 6 [packed = true]; 52 | 53 | // DT_INT32, DT_INT16, DT_INT8, DT_UINT8. 54 | repeated int32 int_val = 7 [packed = true]; 55 | 56 | // DT_STRING 57 | repeated bytes string_val = 8; 58 | 59 | // DT_COMPLEX64. scomplex_val(2*i) and scomplex_val(2*i+1) are real 60 | // and imaginary parts of i-th single precision complex. 61 | repeated float scomplex_val = 9 [packed = true]; 62 | 63 | // DT_INT64 64 | repeated int64 int64_val = 10 [packed = true]; 65 | 66 | // DT_BOOL 67 | repeated bool bool_val = 11 [packed = true]; 68 | 69 | // DT_COMPLEX128. dcomplex_val(2*i) and dcomplex_val(2*i+1) are real 70 | // and imaginary parts of i-th double precision complex. 71 | repeated double dcomplex_val = 12 [packed = true]; 72 | 73 | // DT_RESOURCE 74 | repeated ResourceHandleProto resource_handle_val = 14; 75 | }; 76 | -------------------------------------------------------------------------------- /tb_chainer/ordered_set.py: -------------------------------------------------------------------------------- 1 | import collections 2 | from weakref import proxy 3 | 4 | class Link(object): 5 | __slots__ = 'prev', 'next', 'key', '__weakref__' 6 | 7 | class OrderedSet(collections.MutableSet): 8 | 'Set the remembers the order elements were added' 9 | # Big-O running times for all methods are the same as for regular sets. 10 | # The internal self.__map dictionary maps keys to links in a doubly linked list. 11 | # The circular doubly linked list starts and ends with a sentinel element. 12 | # The sentinel element never gets deleted (this simplifies the algorithm). 13 | # The prev/next links are weakref proxies (to prevent circular references). 14 | # Individual links are kept alive by the hard reference in self.__map. 15 | # Those hard references disappear when a key is deleted from an OrderedSet. 16 | 17 | def __init__(self, iterable=None): 18 | self.__root = root = Link() # sentinel node for doubly linked list 19 | root.prev = root.next = root 20 | self.__map = {} # key --> link 21 | if iterable is not None: 22 | self |= iterable 23 | 24 | def __len__(self): 25 | return len(self.__map) 26 | 27 | def __contains__(self, key): 28 | return key in self.__map 29 | 30 | def add(self, key): 31 | # Store new key in a new link at the end of the linked list 32 | if key not in self.__map: 33 | self.__map[key] = link = Link() 34 | root = self.__root 35 | last = root.prev 36 | link.prev, link.next, link.key = last, root, key 37 | last.next = root.prev = proxy(link) 38 | 39 | def discard(self, key): 40 | # Remove an existing item using self.__map to find the link which is 41 | # then removed by updating the links in the predecessor and successors. 42 | if key in self.__map: 43 | link = self.__map.pop(key) 44 | link.prev.next = link.next 45 | link.next.prev = link.prev 46 | 47 | def __iter__(self): 48 | # Traverse the linked list in order. 49 | root = self.__root 50 | curr = root.next 51 | while curr is not root: 52 | yield curr.key 53 | curr = curr.next 54 | 55 | def __reversed__(self): 56 | # Traverse the linked list in reverse order. 57 | root = self.__root 58 | curr = root.prev 59 | while curr is not root: 60 | yield curr.key 61 | curr = curr.prev 62 | 63 | def pop(self, last=True): 64 | if not self: 65 | raise KeyError('set is empty') 66 | key = next(reversed(self)) if last else next(iter(self)) 67 | self.discard(key) 68 | return key 69 | 70 | def __repr__(self): 71 | if not self: 72 | return '%s()' % (self.__class__.__name__,) 73 | return '%s(%r)' % (self.__class__.__name__, list(self)) 74 | 75 | def __eq__(self, other): 76 | if isinstance(other, OrderedSet): 77 | return len(self) == len(other) and list(self) == list(other) 78 | return not self.isdisjoint(other) 79 | -------------------------------------------------------------------------------- /tb_chainer/src/versions_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/versions.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='tb_chainer/src/versions.proto', 20 | package='tensorboard', 21 | syntax='proto3', 22 | serialized_pb=_b('\n\x1dtb_chainer/src/versions.proto\x12\x0btensorboard\"K\n\nVersionDef\x12\x10\n\x08producer\x18\x01 \x01(\x05\x12\x14\n\x0cmin_consumer\x18\x02 \x01(\x05\x12\x15\n\rbad_consumers\x18\x03 \x03(\x05\x42/\n\x18org.tensorflow.frameworkB\x0eVersionsProtosP\x01\xf8\x01\x01\x62\x06proto3') 23 | ) 24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 25 | 26 | 27 | 28 | 29 | _VERSIONDEF = _descriptor.Descriptor( 30 | name='VersionDef', 31 | full_name='tensorboard.VersionDef', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | _descriptor.FieldDescriptor( 37 | name='producer', full_name='tensorboard.VersionDef.producer', index=0, 38 | number=1, type=5, cpp_type=1, label=1, 39 | has_default_value=False, default_value=0, 40 | message_type=None, enum_type=None, containing_type=None, 41 | is_extension=False, extension_scope=None, 42 | options=None), 43 | _descriptor.FieldDescriptor( 44 | name='min_consumer', full_name='tensorboard.VersionDef.min_consumer', index=1, 45 | number=2, type=5, cpp_type=1, label=1, 46 | has_default_value=False, default_value=0, 47 | message_type=None, enum_type=None, containing_type=None, 48 | is_extension=False, extension_scope=None, 49 | options=None), 50 | _descriptor.FieldDescriptor( 51 | name='bad_consumers', full_name='tensorboard.VersionDef.bad_consumers', index=2, 52 | number=3, type=5, cpp_type=1, label=3, 53 | has_default_value=False, default_value=[], 54 | message_type=None, enum_type=None, containing_type=None, 55 | is_extension=False, extension_scope=None, 56 | options=None), 57 | ], 58 | extensions=[ 59 | ], 60 | nested_types=[], 61 | enum_types=[ 62 | ], 63 | options=None, 64 | is_extendable=False, 65 | syntax='proto3', 66 | extension_ranges=[], 67 | oneofs=[ 68 | ], 69 | serialized_start=46, 70 | serialized_end=121, 71 | ) 72 | 73 | DESCRIPTOR.message_types_by_name['VersionDef'] = _VERSIONDEF 74 | 75 | VersionDef = _reflection.GeneratedProtocolMessageType('VersionDef', (_message.Message,), dict( 76 | DESCRIPTOR = _VERSIONDEF, 77 | __module__ = 'tb_chainer.src.versions_pb2' 78 | # @@protoc_insertion_point(class_scope:tensorboard.VersionDef) 79 | )) 80 | _sym_db.RegisterMessage(VersionDef) 81 | 82 | 83 | DESCRIPTOR.has_options = True 84 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\016VersionsProtosP\001\370\001\001')) 85 | # @@protoc_insertion_point(module_scope) 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/neka-nat/tensorboard-chainer.svg?branch=master)](https://travis-ci.org/neka-nat/tensorboard-chainer) 2 | [![codecov](https://codecov.io/gh/neka-nat/tensorboard-chainer/branch/master/graph/badge.svg)](https://codecov.io/gh/neka-nat/tensorboard-chainer) 3 | 4 | [![Code Climate](https://codeclimate.com/github/neka-nat/tensorboard-chainer/badges/gpa.svg)](https://codeclimate.com/github/neka-nat/tensorboard-chainer) 5 | 6 | [![PyPI version](https://badge.fury.io/py/tensorboard-chainer.svg)](https://badge.fury.io/py/tensorboard-chainer) 7 | 8 | # tensorboard-chainer 9 | 10 | Write tensorboard events with simple command. 11 | including scalar, image, histogram, audio, text, graph and embedding. 12 | 13 | This is based on [tensorboard-pytorch](https://github.com/lanpa/tensorboard-pytorch). 14 | 15 | ## Usage 16 | 17 | Install tensorflow. 18 | 19 | ``` 20 | pip install tensorflow 21 | ``` 22 | 23 | Execute demo.py and tensorboard. 24 | Access "localhost:6006" in your browser. 25 | 26 | ``` 27 | cd examples 28 | python demo.py 29 | tensorboard --logdir runs 30 | ``` 31 | 32 | ## Scalar example 33 | 34 | ![graph](https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/master/screenshots/scalar.png) 35 | 36 | ## Histogram example 37 | 38 | ![graph](https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/master/screenshots/histogram.png) 39 | 40 | ## Graph example 41 | 42 | ![graph](https://raw.githubusercontent.com/neka-nat/tensorboard-chainer/master/screenshots/graph.gif) 43 | 44 | ## Name scope 45 | 46 | Like tensorflow, nodes in the graph can be grouped together in the namespace to make it easy to see. 47 | 48 | ```python 49 | import chainer 50 | import chainer.functions as F 51 | import chainer.links as L 52 | from tb_chainer import name_scope, within_name_scope 53 | 54 | class MLP(chainer.Chain): 55 | def __init__(self, n_units, n_out): 56 | super(MLP, self).__init__() 57 | with self.init_scope(): 58 | self.l1 = L.Linear(None, n_units) # n_in -> n_units 59 | self.l2 = L.Linear(None, n_units) # n_units -> n_units 60 | self.l3 = L.Linear(None, n_out) # n_units -> n_out 61 | 62 | @within_name_scope('MLP') 63 | def __call__(self, x): 64 | with name_scope('linear1', self.l1.params()): 65 | h1 = F.relu(self.l1(x)) 66 | with name_scope('linear2', self.l2.params()): 67 | h2 = F.relu(self.l2(h1)) 68 | with name_scope('linear3', self.l3.params()): 69 | o = self.l3(h2) 70 | return o 71 | ``` 72 | 73 | How to save the logs using this model is shown below. 74 | `add_all_variable_images` is the function that saves the Variable's data in the model that matches the pattern as an images. 75 | `add_all_parameter_histograms` is the function that save histograms of the Parameter's data in the model that match the pattern. 76 | 77 | ```python 78 | from datetime import datetime 79 | from tb_chainer import SummaryWriter 80 | 81 | model = L.Classifier(MLP(1000, 10)) 82 | 83 | res = model(chainer.Variable(np.random.rand(1, 784).astype(np.float32)), 84 | chainer.Variable(np.random.rand(1).astype(np.int32))) 85 | 86 | writer = SummaryWriter('runs/'+datetime.now().strftime('%B%d %H:%M:%S')) 87 | writer.add_graph([res]) 88 | writer.add_all_variable_images([res], pattern='.*MLP.*') 89 | writer.add_all_parameter_histograms([res], pattern='.*MLP.*') 90 | 91 | writer.close() 92 | ``` 93 | 94 | ## Reference 95 | 96 | * [tensorboard-pytorch](https://github.com/lanpa/tensorboard-pytorch) 97 | * [tensorboard_logger](https://github.com/TeamHG-Memex/tensorboard_logger) 98 | * [tfchain](https://github.com/mitmul/tfchain) -------------------------------------------------------------------------------- /tb_chainer/src/graph_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/graph.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | from tb_chainer.src import node_def_pb2 as tb__chainer_dot_src_dot_node__def__pb2 17 | from tb_chainer.src import versions_pb2 as tb__chainer_dot_src_dot_versions__pb2 18 | 19 | 20 | DESCRIPTOR = _descriptor.FileDescriptor( 21 | name='tb_chainer/src/graph.proto', 22 | package='tensorboard', 23 | syntax='proto3', 24 | serialized_pb=_b('\n\x1atb_chainer/src/graph.proto\x12\x0btensorboard\x1a\x1dtb_chainer/src/node_def.proto\x1a\x1dtb_chainer/src/versions.proto\"n\n\x08GraphDef\x12\"\n\x04node\x18\x01 \x03(\x0b\x32\x14.tensorboard.NodeDef\x12)\n\x08versions\x18\x04 \x01(\x0b\x32\x17.tensorboard.VersionDef\x12\x13\n\x07version\x18\x03 \x01(\x05\x42\x02\x18\x01\x42,\n\x18org.tensorflow.frameworkB\x0bGraphProtosP\x01\xf8\x01\x01\x62\x06proto3') 25 | , 26 | dependencies=[tb__chainer_dot_src_dot_node__def__pb2.DESCRIPTOR,tb__chainer_dot_src_dot_versions__pb2.DESCRIPTOR,]) 27 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 28 | 29 | 30 | 31 | 32 | _GRAPHDEF = _descriptor.Descriptor( 33 | name='GraphDef', 34 | full_name='tensorboard.GraphDef', 35 | filename=None, 36 | file=DESCRIPTOR, 37 | containing_type=None, 38 | fields=[ 39 | _descriptor.FieldDescriptor( 40 | name='node', full_name='tensorboard.GraphDef.node', index=0, 41 | number=1, type=11, cpp_type=10, label=3, 42 | has_default_value=False, default_value=[], 43 | message_type=None, enum_type=None, containing_type=None, 44 | is_extension=False, extension_scope=None, 45 | options=None), 46 | _descriptor.FieldDescriptor( 47 | name='versions', full_name='tensorboard.GraphDef.versions', index=1, 48 | number=4, type=11, cpp_type=10, label=1, 49 | has_default_value=False, default_value=None, 50 | message_type=None, enum_type=None, containing_type=None, 51 | is_extension=False, extension_scope=None, 52 | options=None), 53 | _descriptor.FieldDescriptor( 54 | name='version', full_name='tensorboard.GraphDef.version', index=2, 55 | number=3, type=5, cpp_type=1, label=1, 56 | has_default_value=False, default_value=0, 57 | message_type=None, enum_type=None, containing_type=None, 58 | is_extension=False, extension_scope=None, 59 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), 60 | ], 61 | extensions=[ 62 | ], 63 | nested_types=[], 64 | enum_types=[ 65 | ], 66 | options=None, 67 | is_extendable=False, 68 | syntax='proto3', 69 | extension_ranges=[], 70 | oneofs=[ 71 | ], 72 | serialized_start=105, 73 | serialized_end=215, 74 | ) 75 | 76 | _GRAPHDEF.fields_by_name['node'].message_type = tb__chainer_dot_src_dot_node__def__pb2._NODEDEF 77 | _GRAPHDEF.fields_by_name['versions'].message_type = tb__chainer_dot_src_dot_versions__pb2._VERSIONDEF 78 | DESCRIPTOR.message_types_by_name['GraphDef'] = _GRAPHDEF 79 | 80 | GraphDef = _reflection.GeneratedProtocolMessageType('GraphDef', (_message.Message,), dict( 81 | DESCRIPTOR = _GRAPHDEF, 82 | __module__ = 'tb_chainer.src.graph_pb2' 83 | # @@protoc_insertion_point(class_scope:tensorboard.GraphDef) 84 | )) 85 | _sym_db.RegisterMessage(GraphDef) 86 | 87 | 88 | DESCRIPTOR.has_options = True 89 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\013GraphProtosP\001\370\001\001')) 90 | _GRAPHDEF.fields_by_name['version'].has_options = True 91 | _GRAPHDEF.fields_by_name['version']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) 92 | # @@protoc_insertion_point(module_scope) 93 | -------------------------------------------------------------------------------- /tb_chainer/src/resource_handle_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/resource_handle.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='tb_chainer/src/resource_handle.proto', 20 | package='tensorboard', 21 | syntax='proto3', 22 | serialized_pb=_b('\n$tb_chainer/src/resource_handle.proto\x12\x0btensorboard\"r\n\x13ResourceHandleProto\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\x12\x11\n\tcontainer\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x11\n\thash_code\x18\x04 \x01(\x04\x12\x17\n\x0fmaybe_type_name\x18\x05 \x01(\tB/\n\x18org.tensorflow.frameworkB\x0eResourceHandleP\x01\xf8\x01\x01\x62\x06proto3') 23 | ) 24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 25 | 26 | 27 | 28 | 29 | _RESOURCEHANDLEPROTO = _descriptor.Descriptor( 30 | name='ResourceHandleProto', 31 | full_name='tensorboard.ResourceHandleProto', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | _descriptor.FieldDescriptor( 37 | name='device', full_name='tensorboard.ResourceHandleProto.device', index=0, 38 | number=1, type=9, cpp_type=9, label=1, 39 | has_default_value=False, default_value=_b("").decode('utf-8'), 40 | message_type=None, enum_type=None, containing_type=None, 41 | is_extension=False, extension_scope=None, 42 | options=None), 43 | _descriptor.FieldDescriptor( 44 | name='container', full_name='tensorboard.ResourceHandleProto.container', index=1, 45 | number=2, type=9, cpp_type=9, label=1, 46 | has_default_value=False, default_value=_b("").decode('utf-8'), 47 | message_type=None, enum_type=None, containing_type=None, 48 | is_extension=False, extension_scope=None, 49 | options=None), 50 | _descriptor.FieldDescriptor( 51 | name='name', full_name='tensorboard.ResourceHandleProto.name', index=2, 52 | number=3, type=9, cpp_type=9, label=1, 53 | has_default_value=False, default_value=_b("").decode('utf-8'), 54 | message_type=None, enum_type=None, containing_type=None, 55 | is_extension=False, extension_scope=None, 56 | options=None), 57 | _descriptor.FieldDescriptor( 58 | name='hash_code', full_name='tensorboard.ResourceHandleProto.hash_code', index=3, 59 | number=4, type=4, cpp_type=4, label=1, 60 | has_default_value=False, default_value=0, 61 | message_type=None, enum_type=None, containing_type=None, 62 | is_extension=False, extension_scope=None, 63 | options=None), 64 | _descriptor.FieldDescriptor( 65 | name='maybe_type_name', full_name='tensorboard.ResourceHandleProto.maybe_type_name', index=4, 66 | number=5, type=9, cpp_type=9, label=1, 67 | has_default_value=False, default_value=_b("").decode('utf-8'), 68 | message_type=None, enum_type=None, containing_type=None, 69 | is_extension=False, extension_scope=None, 70 | options=None), 71 | ], 72 | extensions=[ 73 | ], 74 | nested_types=[], 75 | enum_types=[ 76 | ], 77 | options=None, 78 | is_extendable=False, 79 | syntax='proto3', 80 | extension_ranges=[], 81 | oneofs=[ 82 | ], 83 | serialized_start=53, 84 | serialized_end=167, 85 | ) 86 | 87 | DESCRIPTOR.message_types_by_name['ResourceHandleProto'] = _RESOURCEHANDLEPROTO 88 | 89 | ResourceHandleProto = _reflection.GeneratedProtocolMessageType('ResourceHandleProto', (_message.Message,), dict( 90 | DESCRIPTOR = _RESOURCEHANDLEPROTO, 91 | __module__ = 'tb_chainer.src.resource_handle_pb2' 92 | # @@protoc_insertion_point(class_scope:tensorboard.ResourceHandleProto) 93 | )) 94 | _sym_db.RegisterMessage(ResourceHandleProto) 95 | 96 | 97 | DESCRIPTOR.has_options = True 98 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\016ResourceHandleP\001\370\001\001')) 99 | # @@protoc_insertion_point(module_scope) 100 | -------------------------------------------------------------------------------- /tb_chainer/utils.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | irange = range 4 | 5 | 6 | def make_grid(tensor, nrow=8, padding=2, 7 | normalize=False, range=None, scale_each=False, pad_value=0): 8 | """Make a grid of images. 9 | 10 | Args: 11 | tensor (Tensor or list): 4D mini-batch Tensor of shape (B x C x H x W) 12 | or a list of images all of the same size. 13 | nrows (int, optional): Number of rows in grid. Final grid size is 14 | (B / nrow, nrow). Default is 8. 15 | normalize (bool, optional): If True, shift the image to the range (0, 1), 16 | by subtracting the minimum and dividing by the maximum pixel value. 17 | range (tuple, optional): tuple (min, max) where min and max are numbers, 18 | then these numbers are used to normalize the image. By default, min and max 19 | are computed from the tensor. 20 | scale_each(bool, optional): If True, scale each image in the batch of 21 | images separately rather than the (min, max) over all images. 22 | pad_value(float, optional): Value for the padded pixels. 23 | """ 24 | # if list of tensors, convert to a 4D mini-batch Tensor 25 | if isinstance(tensor, list): 26 | tensorlist = tensor 27 | numImages = len(tensorlist) 28 | size = tuple([numImages] + tensorlist[0].shape) 29 | tensor = np.zeros(size) 30 | for i in irange(numImages): 31 | tensor[i] = tensorlist[i].copy() 32 | 33 | assert tensor.ndim < 5, "'tensor.ndim' must be less than 5. the given 'tensor.ndim' is %d." % tensor.ndim 34 | 35 | if tensor.ndim == 1: 36 | tensor = tensor.reshape((1, tensor.shape[0])) 37 | if tensor.ndim == 2: # single image H x W 38 | tensor = tensor.reshape((1, tensor.shape[0], tensor.shape[1])) 39 | if tensor.ndim == 3: # single image 40 | if tensor.shape[0] == 1: # if single-channel, convert to 3-channel 41 | tensor = np.concatenate((tensor, tensor, tensor), 0) 42 | return tensor 43 | if tensor.ndim == 4 and tensor.shape[1] == 1: # single-channel images 44 | tensor = np.concatenate((tensor, tensor, tensor), 1) 45 | 46 | if normalize is True: 47 | if range is not None: 48 | assert isinstance(range, tuple), \ 49 | "range has to be a tuple (min, max) if specified. min and max are numbers" 50 | 51 | def norm_ip(img, min, max): 52 | img = img.clip(min=min, max=max) 53 | img = (img - min) / (max - min) 54 | 55 | def norm_range(t, range): 56 | if range is not None: 57 | norm_ip(t, range[0], range[1]) 58 | else: 59 | norm_ip(t, t.min(), t.max()) 60 | 61 | if scale_each is True: 62 | for t in tensor: # loop over mini-batch dimension 63 | norm_range(t, range) 64 | else: 65 | norm_range(tensor, range) 66 | 67 | # make the mini-batch of images into a grid 68 | nmaps = tensor.shape[0] 69 | xmaps = min(nrow, nmaps) 70 | ymaps = int(math.ceil(float(nmaps) / xmaps)) 71 | height, width = int(tensor.shape[2] + padding), int(tensor.shape[3] + padding) 72 | grid = np.ones((3, height * ymaps + 1 + padding // 2, width * xmaps + 1 + padding // 2)) * pad_value 73 | k = 0 74 | for y in irange(ymaps): 75 | for x in irange(xmaps): 76 | if k >= nmaps: 77 | break 78 | grid[:, (y * height + 1 + padding // 2):((y + 1) * height + 1 + padding // 2 - padding), (x * width + 1 + padding // 2):((x + 1) * width + 1 + padding // 2 - padding)] = tensor[k] 79 | k += 1 80 | return grid 81 | 82 | 83 | def save_image(tensor, filename, nrow=8, padding=2, 84 | normalize=False, range=None, scale_each=False, pad_value=0): 85 | """Save a given Tensor into an image file. 86 | 87 | Args: 88 | tensor (Tensor or list): Image to be saved. If given a mini-batch tensor, 89 | saves the tensor as a grid of images by calling ``make_grid``. 90 | **kwargs: Other arguments are documented in ``make_grid``. 91 | """ 92 | from PIL import Image 93 | grid = make_grid(tensor, nrow=nrow, padding=padding, pad_value=pad_value, 94 | normalize=normalize, range=range, scale_each=scale_each) 95 | ndarr = (grid * 255).clip(0, 255).astype(np.uint8).transpose((1, 2, 0)) 96 | im = Image.fromarray(ndarr) 97 | im.save(filename) 98 | -------------------------------------------------------------------------------- /tb_chainer/src/summary.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorboard; 4 | option cc_enable_arenas = true; 5 | option java_outer_classname = "SummaryProtos"; 6 | option java_multiple_files = true; 7 | option java_package = "org.tensorflow.framework"; 8 | 9 | import "tb_chainer/src/tensor.proto"; 10 | 11 | // Metadata associated with a series of Summary data 12 | message SummaryDescription { 13 | // Hint on how plugins should process the data in this series. 14 | // Supported values include "scalar", "histogram", "image", "audio" 15 | string type_hint = 1; 16 | } 17 | 18 | // Serialization format for histogram module in 19 | // core/lib/histogram/histogram.h 20 | message HistogramProto { 21 | double min = 1; 22 | double max = 2; 23 | double num = 3; 24 | double sum = 4; 25 | double sum_squares = 5; 26 | 27 | // Parallel arrays encoding the bucket boundaries and the bucket values. 28 | // bucket(i) is the count for the bucket i. The range for 29 | // a bucket is: 30 | // i == 0: -DBL_MAX .. bucket_limit(0) 31 | // i != 0: bucket_limit(i-1) .. bucket_limit(i) 32 | repeated double bucket_limit = 6 [packed = true]; 33 | repeated double bucket = 7 [packed = true]; 34 | }; 35 | 36 | // A SummaryMetadata encapsulates information on which plugins are able to make 37 | // use of a certain summary value. 38 | message SummaryMetadata { 39 | message PluginData { 40 | // The name of the plugin this data pertains to. 41 | string plugin_name = 1; 42 | 43 | // The content to store for the plugin. The best practice is for this JSON 44 | // string to be the canonical JSON serialization of a protocol buffer 45 | // defined by the plugin. Converting that protobuf to and from JSON is the 46 | // responsibility of the plugin code, and is not enforced by 47 | // TensorFlow/TensorBoard. 48 | string content = 2; 49 | } 50 | 51 | // A list of plugin data. A single summary value instance may be used by more 52 | // than 1 plugin. 53 | repeated PluginData plugin_data = 1; 54 | }; 55 | 56 | // A Summary is a set of named values to be displayed by the 57 | // visualizer. 58 | // 59 | // Summaries are produced regularly during training, as controlled by 60 | // the "summary_interval_secs" attribute of the training operation. 61 | // Summaries are also produced at the end of an evaluation. 62 | message Summary { 63 | message Image { 64 | // Dimensions of the image. 65 | int32 height = 1; 66 | int32 width = 2; 67 | // Valid colorspace values are 68 | // 1 - grayscale 69 | // 2 - grayscale + alpha 70 | // 3 - RGB 71 | // 4 - RGBA 72 | // 5 - DIGITAL_YUV 73 | // 6 - BGRA 74 | int32 colorspace = 3; 75 | // Image data in encoded format. All image formats supported by 76 | // image_codec::CoderUtil can be stored here. 77 | bytes encoded_image_string = 4; 78 | } 79 | 80 | message Audio { 81 | // Sample rate of the audio in Hz. 82 | float sample_rate = 1; 83 | // Number of channels of audio. 84 | int64 num_channels = 2; 85 | // Length of the audio in frames (samples per channel). 86 | int64 length_frames = 3; 87 | // Encoded audio data and its associated RFC 2045 content type (e.g. 88 | // "audio/wav"). 89 | bytes encoded_audio_string = 4; 90 | string content_type = 5; 91 | } 92 | 93 | message Value { 94 | // Name of the node that output this summary; in general, the name of a 95 | // TensorSummary node. If the node in question has multiple outputs, then 96 | // a ":\d+" suffix will be appended, like "some_op:13". 97 | // Might not be set for legacy summaries (i.e. those not using the tensor 98 | // value field) 99 | string node_name = 7; 100 | 101 | // Tag name for the data. Will only be used by legacy summaries 102 | // (ie. those not using the tensor value field) 103 | // For legacy summaries, will be used as the title of the graph 104 | // in the visualizer. 105 | // 106 | // Tag is usually "op_name:value_name", where "op_name" itself can have 107 | // structure to indicate grouping. 108 | string tag = 1; 109 | SummaryMetadata metadata = 9; 110 | // Value associated with the tag. 111 | oneof value { 112 | float simple_value = 2; 113 | bytes obsolete_old_style_histogram = 3; 114 | Image image = 4; 115 | HistogramProto histo = 5; 116 | Audio audio = 6; 117 | TensorProto tensor = 8; 118 | } 119 | } 120 | 121 | // Set of values for the summary. 122 | repeated Value value = 1; 123 | } 124 | -------------------------------------------------------------------------------- /tb_chainer/name_scope.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import inspect 3 | import chainer 4 | from chainer import function_node 5 | from chainer import variable 6 | import functools 7 | from types import MethodType 8 | if sys.version_info >= (3, 0): 9 | def method_wraper(f): 10 | def wrapper(*args, **kwargs): 11 | return f(*args, **kwargs) 12 | return functools.wraps(f)(wrapper) 13 | gen_method = lambda m, i, c: method_wraper(m) 14 | else: 15 | gen_method = lambda m, i, c: MethodType(m, i, c) 16 | 17 | def _copy_method(f, c): 18 | g = gen_method(f, None, c) 19 | return g 20 | 21 | def _init_with_name_scope(self, *args, **kargs): 22 | self.name_scope = kargs['_name_scope'] 23 | org_init = kargs['_org_init'] 24 | retain_data = kargs['_retain_data'] 25 | del kargs['_name_scope'] 26 | del kargs['_org_init'] 27 | del kargs['_retain_data'] 28 | org_init(self, *args, **kargs) 29 | if retain_data and isinstance(self, variable.VariableNode): 30 | self.retain_data() 31 | 32 | def _new_with_name_scope(cls, *args, **kargs): 33 | ns = kargs['_name_scope'] 34 | org_new = kargs['_org_new'] 35 | self = org_new(cls) 36 | self.name_scope = ns 37 | return self 38 | 39 | _org_classes = [] 40 | _copy_org_inits = [] 41 | 42 | def register_functions(funcs): 43 | """Register function nodes to use name_scope. 44 | Args: 45 | funcs (list): List of function nodes 46 | """ 47 | global _org_classes, _copy_org_inits 48 | _org_classes.extend(funcs) 49 | for c in _org_classes: 50 | if c == variable.VariableNode: 51 | _copy_org_inits.append(_copy_method(c.__init__, c)) 52 | else: 53 | _copy_org_inits.append(c.__new__) 54 | 55 | register_functions([function_node.FunctionNode, variable.VariableNode]) 56 | 57 | class name_scope(object): 58 | """Class that creates hierarchical names for operations and variables. 59 | Args: 60 | name (str): Name for setting namespace. 61 | values (list): Variable in the namespace. 62 | retain_data (bool): Hold the data in the variable. 63 | Example: 64 | You can set namespace using "with" statement. 65 | In the following example, no namespace is set for the variable 'X', but 66 | the variable 'Y' and the relu function are set to the namespace "test". 67 | 68 | x = chainer.Variable(...) 69 | with name_scope('test'): 70 | y = F.relu(x) 71 | """ 72 | stack = [] 73 | def __init__(self, name, values=[], retain_data=True): 74 | self.stack.append(name) 75 | self._org_inits = [] 76 | self._retain_data = retain_data 77 | for v in values: 78 | v.node.name_scope = '/'.join(self.stack) 79 | 80 | def __enter__(self): 81 | for idx, c in enumerate(_org_classes): 82 | if c == variable.VariableNode: 83 | self._org_inits.append(c.__init__) 84 | c.__init__ = gen_method(functools.partial(_init_with_name_scope, 85 | _name_scope='/'.join(self.stack), 86 | _org_init=_copy_org_inits[idx], 87 | _retain_data=self._retain_data), 88 | None, c) 89 | else: 90 | self._org_inits.append(c.__new__) 91 | c.__new__ = classmethod(functools.partial(_new_with_name_scope, 92 | _name_scope='/'.join(self.stack), 93 | _org_new=_copy_org_inits[idx])) 94 | return self 95 | 96 | def __exit__(self, exec_type, exec_value, traceback): 97 | for idx, c in enumerate(_org_classes): 98 | if c == variable.VariableNode: 99 | c.__init__ = self._org_inits[idx] 100 | else: 101 | c.__new__ = classmethod(functools.partial(_new_with_name_scope, 102 | _name_scope='/'.join(self.stack[:-1]), 103 | _org_new=_copy_org_inits[idx])) 104 | self.stack.pop(-1) 105 | 106 | def within_name_scope(name, retain_data=True): 107 | """Decorator for link class methods. 108 | Args: 109 | name (str): Name for setting namespace. 110 | retain_data (bool): Hold the data in the variable. 111 | """ 112 | def decorator(func): 113 | @functools.wraps(func) 114 | def wrapper(self, *args, **kwargs): 115 | with name_scope(name, self.params(), retain_data=retain_data): 116 | res = func(self, *args, **kwargs) 117 | return res 118 | return wrapper 119 | return decorator 120 | -------------------------------------------------------------------------------- /tb_chainer/src/tensor_shape_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/tensor_shape.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='tb_chainer/src/tensor_shape.proto', 20 | package='tensorboard', 21 | syntax='proto3', 22 | serialized_pb=_b('\n!tb_chainer/src/tensor_shape.proto\x12\x0btensorboard\"{\n\x10TensorShapeProto\x12.\n\x03\x64im\x18\x02 \x03(\x0b\x32!.tensorboard.TensorShapeProto.Dim\x12\x14\n\x0cunknown_rank\x18\x03 \x01(\x08\x1a!\n\x03\x44im\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\tB2\n\x18org.tensorflow.frameworkB\x11TensorShapeProtosP\x01\xf8\x01\x01\x62\x06proto3') 23 | ) 24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 25 | 26 | 27 | 28 | 29 | _TENSORSHAPEPROTO_DIM = _descriptor.Descriptor( 30 | name='Dim', 31 | full_name='tensorboard.TensorShapeProto.Dim', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | _descriptor.FieldDescriptor( 37 | name='size', full_name='tensorboard.TensorShapeProto.Dim.size', index=0, 38 | number=1, type=3, cpp_type=2, label=1, 39 | has_default_value=False, default_value=0, 40 | message_type=None, enum_type=None, containing_type=None, 41 | is_extension=False, extension_scope=None, 42 | options=None), 43 | _descriptor.FieldDescriptor( 44 | name='name', full_name='tensorboard.TensorShapeProto.Dim.name', index=1, 45 | number=2, type=9, cpp_type=9, label=1, 46 | has_default_value=False, default_value=_b("").decode('utf-8'), 47 | message_type=None, enum_type=None, containing_type=None, 48 | is_extension=False, extension_scope=None, 49 | options=None), 50 | ], 51 | extensions=[ 52 | ], 53 | nested_types=[], 54 | enum_types=[ 55 | ], 56 | options=None, 57 | is_extendable=False, 58 | syntax='proto3', 59 | extension_ranges=[], 60 | oneofs=[ 61 | ], 62 | serialized_start=140, 63 | serialized_end=173, 64 | ) 65 | 66 | _TENSORSHAPEPROTO = _descriptor.Descriptor( 67 | name='TensorShapeProto', 68 | full_name='tensorboard.TensorShapeProto', 69 | filename=None, 70 | file=DESCRIPTOR, 71 | containing_type=None, 72 | fields=[ 73 | _descriptor.FieldDescriptor( 74 | name='dim', full_name='tensorboard.TensorShapeProto.dim', index=0, 75 | number=2, type=11, cpp_type=10, label=3, 76 | has_default_value=False, default_value=[], 77 | message_type=None, enum_type=None, containing_type=None, 78 | is_extension=False, extension_scope=None, 79 | options=None), 80 | _descriptor.FieldDescriptor( 81 | name='unknown_rank', full_name='tensorboard.TensorShapeProto.unknown_rank', index=1, 82 | number=3, type=8, cpp_type=7, label=1, 83 | has_default_value=False, default_value=False, 84 | message_type=None, enum_type=None, containing_type=None, 85 | is_extension=False, extension_scope=None, 86 | options=None), 87 | ], 88 | extensions=[ 89 | ], 90 | nested_types=[_TENSORSHAPEPROTO_DIM, ], 91 | enum_types=[ 92 | ], 93 | options=None, 94 | is_extendable=False, 95 | syntax='proto3', 96 | extension_ranges=[], 97 | oneofs=[ 98 | ], 99 | serialized_start=50, 100 | serialized_end=173, 101 | ) 102 | 103 | _TENSORSHAPEPROTO_DIM.containing_type = _TENSORSHAPEPROTO 104 | _TENSORSHAPEPROTO.fields_by_name['dim'].message_type = _TENSORSHAPEPROTO_DIM 105 | DESCRIPTOR.message_types_by_name['TensorShapeProto'] = _TENSORSHAPEPROTO 106 | 107 | TensorShapeProto = _reflection.GeneratedProtocolMessageType('TensorShapeProto', (_message.Message,), dict( 108 | 109 | Dim = _reflection.GeneratedProtocolMessageType('Dim', (_message.Message,), dict( 110 | DESCRIPTOR = _TENSORSHAPEPROTO_DIM, 111 | __module__ = 'tb_chainer.src.tensor_shape_pb2' 112 | # @@protoc_insertion_point(class_scope:tensorboard.TensorShapeProto.Dim) 113 | )) 114 | , 115 | DESCRIPTOR = _TENSORSHAPEPROTO, 116 | __module__ = 'tb_chainer.src.tensor_shape_pb2' 117 | # @@protoc_insertion_point(class_scope:tensorboard.TensorShapeProto) 118 | )) 119 | _sym_db.RegisterMessage(TensorShapeProto) 120 | _sym_db.RegisterMessage(TensorShapeProto.Dim) 121 | 122 | 123 | DESCRIPTOR.has_options = True 124 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\021TensorShapeProtosP\001\370\001\001')) 125 | # @@protoc_insertion_point(module_scope) 126 | -------------------------------------------------------------------------------- /tb_chainer/crc32c.py: -------------------------------------------------------------------------------- 1 | import array 2 | 3 | 4 | CRC_TABLE = ( 5 | 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 6 | 0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb, 7 | 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, 8 | 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 9 | 0x105ec76f, 0xe235446c, 0xf165b798, 0x030e349b, 10 | 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, 11 | 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 12 | 0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b, 13 | 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, 14 | 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 15 | 0xaa64d611, 0x580f5512, 0x4b5fa6e6, 0xb93425e5, 16 | 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, 17 | 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 18 | 0xf779deae, 0x05125dad, 0x1642ae59, 0xe4292d5a, 19 | 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, 20 | 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 21 | 0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48, 22 | 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, 23 | 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 24 | 0x0c38d26c, 0xfe53516f, 0xed03a29b, 0x1f682198, 25 | 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, 26 | 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 27 | 0xdbfc821c, 0x2997011f, 0x3ac7f2eb, 0xc8ac71e8, 28 | 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, 29 | 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 30 | 0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789, 31 | 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, 32 | 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 33 | 0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9, 34 | 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, 35 | 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 36 | 0x3cdb9bdd, 0xceb018de, 0xdde0eb2a, 0x2f8b6829, 37 | 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, 38 | 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 39 | 0x082f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043, 40 | 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, 41 | 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 42 | 0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc, 43 | 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, 44 | 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 45 | 0xa24bb5a6, 0x502036a5, 0x4370c551, 0xb11b4652, 46 | 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, 47 | 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 48 | 0xef087a76, 0x1d63f975, 0x0e330a81, 0xfc588982, 49 | 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, 50 | 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 51 | 0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2, 52 | 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, 53 | 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 54 | 0x0417b1db, 0xf67c32d8, 0xe52cc12c, 0x1747422f, 55 | 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, 56 | 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 57 | 0xd3d3e1ab, 0x21b862a8, 0x32e8915c, 0xc083125f, 58 | 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, 59 | 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 60 | 0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f, 61 | 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, 62 | 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 63 | 0x69e9f0d5, 0x9b8273d6, 0x88d28022, 0x7ab90321, 64 | 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, 65 | 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 66 | 0x34f4f86a, 0xc69f7b69, 0xd5cf889d, 0x27a40b9e, 67 | 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, 68 | 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, 69 | ) 70 | 71 | 72 | CRC_INIT = 0 73 | 74 | _MASK = 0xFFFFFFFF 75 | 76 | 77 | def crc_update(crc, data): 78 | """Update CRC-32C checksum with data. 79 | 80 | Args: 81 | crc: 32-bit checksum to update as long. 82 | data: byte array, string or iterable over bytes. 83 | 84 | Returns: 85 | 32-bit updated CRC-32C as long. 86 | """ 87 | 88 | if type(data) != array.array or data.itemsize != 1: 89 | buf = array.array("B", data) 90 | else: 91 | buf = data 92 | 93 | crc ^= _MASK 94 | for b in buf: 95 | table_index = (crc ^ b) & 0xff 96 | crc = (CRC_TABLE[table_index] ^ (crc >> 8)) & _MASK 97 | return crc ^ _MASK 98 | 99 | 100 | def crc_finalize(crc): 101 | """Finalize CRC-32C checksum. 102 | 103 | This function should be called as last step of crc calculation. 104 | 105 | Args: 106 | crc: 32-bit checksum as long. 107 | 108 | Returns: 109 | finalized 32-bit checksum as long 110 | """ 111 | return crc & _MASK 112 | 113 | 114 | def crc32c(data): 115 | """Compute CRC-32C checksum of the data. 116 | 117 | Args: 118 | data: byte array, string or iterable over bytes. 119 | 120 | Returns: 121 | 32-bit CRC-32C checksum of data as long. 122 | """ 123 | return crc_finalize(crc_update(CRC_INIT, data)) 124 | -------------------------------------------------------------------------------- /tb_chainer/src/node_def_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/node_def.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | from tb_chainer.src import attr_value_pb2 as tb__chainer_dot_src_dot_attr__value__pb2 17 | 18 | 19 | DESCRIPTOR = _descriptor.FileDescriptor( 20 | name='tb_chainer/src/node_def.proto', 21 | package='tensorboard', 22 | syntax='proto3', 23 | serialized_pb=_b('\n\x1dtb_chainer/src/node_def.proto\x12\x0btensorboard\x1a\x1ftb_chainer/src/attr_value.proto\"\xb5\x01\n\x07NodeDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\n\n\x02op\x18\x02 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12\x0e\n\x06\x64\x65vice\x18\x04 \x01(\t\x12,\n\x04\x61ttr\x18\x05 \x03(\x0b\x32\x1e.tensorboard.NodeDef.AttrEntry\x1a\x43\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\x42*\n\x18org.tensorflow.frameworkB\tNodeProtoP\x01\xf8\x01\x01\x62\x06proto3') 24 | , 25 | dependencies=[tb__chainer_dot_src_dot_attr__value__pb2.DESCRIPTOR,]) 26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 27 | 28 | 29 | 30 | 31 | _NODEDEF_ATTRENTRY = _descriptor.Descriptor( 32 | name='AttrEntry', 33 | full_name='tensorboard.NodeDef.AttrEntry', 34 | filename=None, 35 | file=DESCRIPTOR, 36 | containing_type=None, 37 | fields=[ 38 | _descriptor.FieldDescriptor( 39 | name='key', full_name='tensorboard.NodeDef.AttrEntry.key', index=0, 40 | number=1, type=9, cpp_type=9, label=1, 41 | has_default_value=False, default_value=_b("").decode('utf-8'), 42 | message_type=None, enum_type=None, containing_type=None, 43 | is_extension=False, extension_scope=None, 44 | options=None), 45 | _descriptor.FieldDescriptor( 46 | name='value', full_name='tensorboard.NodeDef.AttrEntry.value', index=1, 47 | number=2, type=11, cpp_type=10, label=1, 48 | has_default_value=False, default_value=None, 49 | message_type=None, enum_type=None, containing_type=None, 50 | is_extension=False, extension_scope=None, 51 | options=None), 52 | ], 53 | extensions=[ 54 | ], 55 | nested_types=[], 56 | enum_types=[ 57 | ], 58 | options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), 59 | is_extendable=False, 60 | syntax='proto3', 61 | extension_ranges=[], 62 | oneofs=[ 63 | ], 64 | serialized_start=194, 65 | serialized_end=261, 66 | ) 67 | 68 | _NODEDEF = _descriptor.Descriptor( 69 | name='NodeDef', 70 | full_name='tensorboard.NodeDef', 71 | filename=None, 72 | file=DESCRIPTOR, 73 | containing_type=None, 74 | fields=[ 75 | _descriptor.FieldDescriptor( 76 | name='name', full_name='tensorboard.NodeDef.name', index=0, 77 | number=1, type=9, cpp_type=9, label=1, 78 | has_default_value=False, default_value=_b("").decode('utf-8'), 79 | message_type=None, enum_type=None, containing_type=None, 80 | is_extension=False, extension_scope=None, 81 | options=None), 82 | _descriptor.FieldDescriptor( 83 | name='op', full_name='tensorboard.NodeDef.op', index=1, 84 | number=2, type=9, cpp_type=9, label=1, 85 | has_default_value=False, default_value=_b("").decode('utf-8'), 86 | message_type=None, enum_type=None, containing_type=None, 87 | is_extension=False, extension_scope=None, 88 | options=None), 89 | _descriptor.FieldDescriptor( 90 | name='input', full_name='tensorboard.NodeDef.input', index=2, 91 | number=3, type=9, cpp_type=9, label=3, 92 | has_default_value=False, default_value=[], 93 | message_type=None, enum_type=None, containing_type=None, 94 | is_extension=False, extension_scope=None, 95 | options=None), 96 | _descriptor.FieldDescriptor( 97 | name='device', full_name='tensorboard.NodeDef.device', index=3, 98 | number=4, type=9, cpp_type=9, label=1, 99 | has_default_value=False, default_value=_b("").decode('utf-8'), 100 | message_type=None, enum_type=None, containing_type=None, 101 | is_extension=False, extension_scope=None, 102 | options=None), 103 | _descriptor.FieldDescriptor( 104 | name='attr', full_name='tensorboard.NodeDef.attr', index=4, 105 | number=5, type=11, cpp_type=10, label=3, 106 | has_default_value=False, default_value=[], 107 | message_type=None, enum_type=None, containing_type=None, 108 | is_extension=False, extension_scope=None, 109 | options=None), 110 | ], 111 | extensions=[ 112 | ], 113 | nested_types=[_NODEDEF_ATTRENTRY, ], 114 | enum_types=[ 115 | ], 116 | options=None, 117 | is_extendable=False, 118 | syntax='proto3', 119 | extension_ranges=[], 120 | oneofs=[ 121 | ], 122 | serialized_start=80, 123 | serialized_end=261, 124 | ) 125 | 126 | _NODEDEF_ATTRENTRY.fields_by_name['value'].message_type = tb__chainer_dot_src_dot_attr__value__pb2._ATTRVALUE 127 | _NODEDEF_ATTRENTRY.containing_type = _NODEDEF 128 | _NODEDEF.fields_by_name['attr'].message_type = _NODEDEF_ATTRENTRY 129 | DESCRIPTOR.message_types_by_name['NodeDef'] = _NODEDEF 130 | 131 | NodeDef = _reflection.GeneratedProtocolMessageType('NodeDef', (_message.Message,), dict( 132 | 133 | AttrEntry = _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), dict( 134 | DESCRIPTOR = _NODEDEF_ATTRENTRY, 135 | __module__ = 'tb_chainer.src.node_def_pb2' 136 | # @@protoc_insertion_point(class_scope:tensorboard.NodeDef.AttrEntry) 137 | )) 138 | , 139 | DESCRIPTOR = _NODEDEF, 140 | __module__ = 'tb_chainer.src.node_def_pb2' 141 | # @@protoc_insertion_point(class_scope:tensorboard.NodeDef) 142 | )) 143 | _sym_db.RegisterMessage(NodeDef) 144 | _sym_db.RegisterMessage(NodeDef.AttrEntry) 145 | 146 | 147 | DESCRIPTOR.has_options = True 148 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\tNodeProtoP\001\370\001\001')) 149 | _NODEDEF_ATTRENTRY.has_options = True 150 | _NODEDEF_ATTRENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) 151 | # @@protoc_insertion_point(module_scope) 152 | -------------------------------------------------------------------------------- /tb_chainer/event_file_writer.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Writes events to disk in a logdir.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import logging 22 | import os.path 23 | import socket 24 | import threading 25 | import time 26 | 27 | import six 28 | 29 | from .src import event_pb2 30 | from .record_writer import RecordWriter 31 | 32 | 33 | def directory_check(path): 34 | '''Initialize the directory for log files.''' 35 | # If the direcotry does not exist, create it! 36 | if not os.path.exists(path): 37 | os.makedirs(path) 38 | 39 | 40 | class EventsWriter(object): 41 | '''Writes `Event` protocol buffers to an event file.''' 42 | 43 | def __init__(self, file_prefix): 44 | ''' 45 | Events files have a name of the form 46 | '/some/file/path/events.out.tfevents.[timestamp].[hostname]' 47 | ''' 48 | self._file_prefix = file_prefix + ".out.tfevents." \ 49 | + str(time.time())[:10] + "." + socket.gethostname() 50 | 51 | # Open(Create) the log file with the particular form of name. 52 | logging.basicConfig(filename=self._file_prefix) 53 | 54 | self._num_outstanding_events = 0 55 | 56 | self._py_recordio_writer = RecordWriter(self._file_prefix) 57 | 58 | # Initialize an event instance. 59 | self._event = event_pb2.Event() 60 | 61 | self._event.wall_time = time.time() 62 | 63 | self.write_event(self._event) 64 | 65 | def write_event(self, event): 66 | '''Append "event" to the file.''' 67 | 68 | # Check if event is of type event_pb2.Event proto. 69 | if not isinstance(event, event_pb2.Event): 70 | raise TypeError("Expected an event_pb2.Event proto, " 71 | " but got %s" % type(event)) 72 | return self._write_serialized_event(event.SerializeToString()) 73 | 74 | def _write_serialized_event(self, event_str): 75 | self._num_outstanding_events += 1 76 | self._py_recordio_writer.write(event_str) 77 | 78 | def flush(self): 79 | '''Flushes the event file to disk.''' 80 | self._num_outstanding_events = 0 81 | return True 82 | 83 | def close(self): 84 | '''Call self.flush().''' 85 | return_value = self.flush() 86 | return return_value 87 | 88 | 89 | class EventFileWriter(object): 90 | """Writes `Event` protocol buffers to an event file. 91 | The `EventFileWriter` class creates an event file in the specified directory, 92 | and asynchronously writes Event protocol buffers to the file. The Event file 93 | is encoded using the tfrecord format, which is similar to RecordIO. 94 | @@__init__ 95 | @@add_event 96 | @@flush 97 | @@close 98 | """ 99 | 100 | def __init__(self, logdir, max_queue=10, flush_secs=120): 101 | """Creates a `EventFileWriter` and an event file to write to. 102 | On construction the summary writer creates a new event file in `logdir`. 103 | This event file will contain `Event` protocol buffers, which are written to 104 | disk via the add_event method. 105 | The other arguments to the constructor control the asynchronous writes to 106 | the event file: 107 | * `flush_secs`: How often, in seconds, to flush the added summaries 108 | and events to disk. 109 | * `max_queue`: Maximum number of summaries or events pending to be 110 | written to disk before one of the 'add' calls block. 111 | Args: 112 | logdir: A string. Directory where event file will be written. 113 | max_queue: Integer. Size of the queue for pending events and summaries. 114 | flush_secs: Number. How often, in seconds, to flush the 115 | pending events and summaries to disk. 116 | """ 117 | self._logdir = logdir 118 | directory_check(self._logdir) 119 | self._event_queue = six.moves.queue.Queue(max_queue) 120 | self._ev_writer = EventsWriter(os.path.join(self._logdir, "events")) 121 | self._closed = False 122 | self._worker = _EventLoggerThread(self._event_queue, self._ev_writer, 123 | flush_secs) 124 | 125 | self._worker.start() 126 | 127 | def get_logdir(self): 128 | """Returns the directory where event file will be written.""" 129 | return self._logdir 130 | 131 | def reopen(self): 132 | """Reopens the EventFileWriter. 133 | Can be called after `close()` to add more events in the same directory. 134 | The events will go into a new events file. 135 | Does nothing if the EventFileWriter was not closed. 136 | """ 137 | if self._closed: 138 | self._closed = False 139 | 140 | def add_event(self, event): 141 | """Adds an event to the event file. 142 | Args: 143 | event: An `Event` protocol buffer. 144 | """ 145 | if not self._closed: 146 | self._event_queue.put(event) 147 | 148 | def flush(self): 149 | """Flushes the event file to disk. 150 | Call this method to make sure that all pending events have been written to 151 | disk. 152 | """ 153 | self._event_queue.join() 154 | self._ev_writer.flush() 155 | 156 | def close(self): 157 | """Flushes the event file to disk and close the file. 158 | Call this method when you do not need the summary writer anymore. 159 | """ 160 | self.flush() 161 | self._ev_writer.close() 162 | self._closed = True 163 | 164 | 165 | class _EventLoggerThread(threading.Thread): 166 | """Thread that logs events.""" 167 | 168 | def __init__(self, queue, ev_writer, flush_secs): 169 | """Creates an _EventLoggerThread. 170 | Args: 171 | queue: A Queue from which to dequeue events. 172 | ev_writer: An event writer. Used to log brain events for 173 | the visualizer. 174 | flush_secs: How often, in seconds, to flush the 175 | pending file to disk. 176 | """ 177 | threading.Thread.__init__(self) 178 | self.daemon = True 179 | self._queue = queue 180 | self._ev_writer = ev_writer 181 | self._flush_secs = flush_secs 182 | # The first event will be flushed immediately. 183 | self._next_event_flush_time = 0 184 | 185 | def run(self): 186 | while True: 187 | event = self._queue.get() 188 | try: 189 | self._ev_writer.write_event(event) 190 | # Flush the event writer every so often. 191 | now = time.time() 192 | if now > self._next_event_flush_time: 193 | self._ev_writer.flush() 194 | # Do it again in two minutes. 195 | self._next_event_flush_time = now + self._flush_secs 196 | finally: 197 | self._queue.task_done() 198 | -------------------------------------------------------------------------------- /tb_chainer/graph.py: -------------------------------------------------------------------------------- 1 | from .src.graph_pb2 import GraphDef 2 | from .src.node_def_pb2 import NodeDef 3 | from .src.versions_pb2 import VersionDef 4 | from .src.attr_value_pb2 import AttrValue 5 | from .src.tensor_shape_pb2 import TensorShapeProto 6 | from .src import types_pb2 as dt 7 | from .ordered_set import OrderedSet 8 | import heapq 9 | from collections import defaultdict 10 | import numpy as np 11 | import chainer.variable 12 | import chainer.function_node 13 | import chainer.computational_graph as c 14 | 15 | 16 | def build_computational_graph( 17 | outputs, remove_split=True, variable_style='default', 18 | function_style='default', rankdir='TB', remove_variable=False, 19 | show_name=True): 20 | """Builds a graph of functions and variables backward-reachable from outputs. 21 | Args: 22 | outputs (:class:`~chainer.Variable`, \ 23 | :class:`~chainer.variable.VariableNode`, \ 24 | :class:`~chainer.FunctionNode`, or :class:`list`): node(s) from which 25 | the graph is constructed. 26 | Each element of outputs must be either :class:`~chainer.Variable` 27 | object, :class:`~chainer.variable.VariableNode` object, or 28 | :class:`~chainer.FunctionNode` object. 29 | remove_split(bool): It must be ``True``. This argument is left for 30 | backward compatibility. 31 | variable_style(dict or 'default'): Dot node style for variable. 32 | Possible keys are 'shape', 'color', 'fillcolor', 'style' etc. 33 | If the special value ``'default'`` is specified, the default 34 | configuration will be used. 35 | function_style(dict or 'default'): Dot node style for function. 36 | Possible keys are 'shape', 'color', 'fillcolor', 'style' etc. 37 | If the special value ``'default'`` is specified, the default 38 | configuration will be used. 39 | rankdir (str): Direction of the graph that must be 40 | TB (top to bottom), BT (bottom to top), LR (left to right) 41 | or RL (right to left). 42 | remove_variable (bool): If ``True``, :class:`VariableNode`\\ s are 43 | removed from the resulting computational graph. Only 44 | :class:`FunctionNode`\\ s are shown in the output. 45 | show_name (bool): If ``True``, the ``name`` attribute of each node is 46 | added to the label of the node. Default is ``True``. 47 | Returns: 48 | ComputationalGraph: A graph consisting of nodes and edges that 49 | are backward-reachable from at least one of ``outputs``. 50 | If ``unchain_backward`` was called in some variable in the 51 | computational graph before this function, backward step is 52 | stopped at this variable. 53 | For example, suppose that computational graph is as follows:: 54 | |--> f ---> y 55 | x --+ 56 | |--> g ---> z 57 | Let ``outputs = [y, z]``. 58 | Then the full graph is emitted. 59 | Next, let ``outputs = [y]``. Note that ``z`` and ``g`` 60 | are not backward-reachable from ``y``. 61 | The resulting graph would be following:: 62 | x ---> f ---> y 63 | See :class:`TestGraphBuilder` for details. 64 | .. note:: 65 | The default configuration for ``variable_style`` is 66 | ``{'shape': 'octagon', 'fillcolor': '#E0E0E0', 'style': 'filled'}`` and 67 | the default configuration for ``function_style`` is 68 | ``{'shape': 'record', 'fillcolor': '#6495ED', 'style': 'filled'}``. 69 | .. note:: 70 | The default behavior of :class:`~chainer.ComputationalGraph` has been 71 | changed from v1.23.0, so that it ouputs the richest representation of 72 | a graph as default, namely, styles are set and names of functions and 73 | variables are shown. To reproduce the same result as previous versions 74 | (<= v1.22.0), please specify `variable_style=None`, 75 | `function_style=None`, and `show_name=False` explicitly. 76 | """ 77 | if not remove_split: 78 | raise ValueError('remove_split=False is not supported anymore') 79 | 80 | output_types = ( 81 | chainer.variable.Variable, chainer.variable.VariableNode, 82 | chainer.function_node.FunctionNode) 83 | 84 | if isinstance(outputs, output_types): 85 | outputs = [outputs] 86 | else: 87 | if not all(isinstance(o, output_types) for o in outputs): 88 | raise TypeError( 89 | 'element of outputs must be either Variable, VariableNode, ' 90 | ' or FunctionNode.') 91 | 92 | cands = [] 93 | seen_edges = OrderedSet() 94 | nodes = OrderedSet() 95 | push_count = [0] 96 | 97 | def add_cand(cand): 98 | heapq.heappush(cands, (-cand.rank, push_count[0], cand)) 99 | push_count[0] += 1 100 | 101 | for o in outputs: 102 | if isinstance(o, chainer.variable.Variable): 103 | o = o.node 104 | add_cand(o) 105 | nodes.add(o) 106 | 107 | while cands: 108 | _, _, cand = heapq.heappop(cands) 109 | if isinstance(cand, chainer.variable.VariableNode): 110 | creator = cand.creator_node 111 | if creator is not None and (creator, cand) not in seen_edges: 112 | add_cand(creator) 113 | seen_edges.add((creator, cand)) 114 | nodes.add(creator) 115 | nodes.add(cand) 116 | elif isinstance(cand, chainer.function_node.FunctionNode): 117 | for input_ in cand.inputs: 118 | if input_ is not cand and (input_, cand) not in seen_edges: 119 | add_cand(input_) 120 | seen_edges.add((input_, cand)) 121 | nodes.add(input_) 122 | nodes.add(cand) 123 | return c.ComputationalGraph( 124 | list(nodes), list(seen_edges), variable_style, 125 | function_style, rankdir, remove_variable, show_name) 126 | 127 | 128 | def convert_dtype(dtype): 129 | if dtype == np.float32: 130 | return dt.DT_FLOAT 131 | elif dtype == np.float64: 132 | return dt.DT_DOUBLE 133 | elif dtype == np.int32: 134 | return dt.DT_INT32 135 | elif dtype == np.uint8: 136 | return dt.DT_UINT8 137 | elif dtype == np.int16: 138 | return dt.DT_INT16 139 | elif dtype == np.int8: 140 | return dt.DT_INT8 141 | elif dtype == np.dtype('S1'): 142 | return dt.DT_STRING 143 | else: 144 | raise ValueError('Unsupported type.') 145 | 146 | class NodeName: 147 | """Class that creates the node's name from the list of nodes on the network. 148 | Give unique names to unique nodes on the network. 149 | Attributes: 150 | name_to_id :A dictionary in which the key is the object name and the value 151 | is list of the object IDs. 152 | """ 153 | def __init__(self, nodes): 154 | self.name_to_id = defaultdict(list) 155 | for n in nodes: 156 | name = NodeName.base_name(n) 157 | if not id(n) in self.name_to_id[name]: 158 | self.name_to_id[name].append(id(n)) 159 | 160 | @staticmethod 161 | def base_name(obj): 162 | name_scope = (obj.name_scope + '/') if hasattr(obj, 'name_scope') else '' 163 | if hasattr(obj, '_variable') and obj._variable is not None: 164 | if isinstance(obj._variable(), chainer.Parameter): 165 | return name_scope + (('Parameter_' + obj.name) if obj.name is not None else 'Parameter') 166 | if isinstance(obj, chainer.variable.VariableNode): 167 | return name_scope + 'Variable_' + obj.label 168 | return name_scope + obj.label 169 | 170 | def name(self, obj): 171 | """Return the name of the object. 172 | Args: 173 | obj :A object on the network 174 | """ 175 | bn = NodeName.base_name(obj) 176 | if len(self.name_to_id[bn]) == 1: 177 | return bn 178 | else: 179 | return bn + '_' + str(self.name_to_id[bn].index(id(obj))) 180 | 181 | def make_list_of_nodes(fn): 182 | list_of_nodes = [] 183 | g = build_computational_graph(fn) 184 | node_name = NodeName(g.nodes) 185 | for n in g.nodes: 186 | inputs = [] 187 | for e1, e2 in g.edges: 188 | if e2 == n: 189 | inputs.append(node_name.name(e1)) 190 | attr_shape = [] 191 | if hasattr(n, 'shape'): 192 | attr_shape = list(n.shape) 193 | dtype = dt.DT_INVALID 194 | if hasattr(n, 'dtype'): 195 | dtype = convert_dtype(n.dtype) 196 | list_of_nodes.append({'name': node_name.name(n), 197 | 'op': n.__class__.__name__, 198 | 'inputs': inputs, 199 | 'attr.shape': attr_shape, 200 | 'attr.dtype': dtype}) 201 | return list_of_nodes 202 | 203 | def make_attr(shape, dtype): 204 | dim_list = [TensorShapeProto.Dim(size=s) for s in shape] 205 | if len(dim_list) == 0: 206 | return None 207 | return {'shape': AttrValue(shape=TensorShapeProto(dim=dim_list)), 208 | 'dtype': AttrValue(type=dtype)} 209 | 210 | def graph(lastVar): 211 | nodes = [] 212 | list_of_nodes = make_list_of_nodes(lastVar) 213 | for node in list_of_nodes: 214 | nodes.append(NodeDef(name=node['name'], op=node['op'], 215 | input=node['inputs'], 216 | attr=make_attr(node['attr.shape'], node['attr.dtype']))) 217 | return GraphDef(node=nodes, versions=VersionDef(producer=22)) 218 | -------------------------------------------------------------------------------- /tb_chainer/src/types_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/types.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf.internal import enum_type_wrapper 7 | from google.protobuf import descriptor as _descriptor 8 | from google.protobuf import message as _message 9 | from google.protobuf import reflection as _reflection 10 | from google.protobuf import symbol_database as _symbol_database 11 | from google.protobuf import descriptor_pb2 12 | # @@protoc_insertion_point(imports) 13 | 14 | _sym_db = _symbol_database.Default() 15 | 16 | 17 | 18 | 19 | DESCRIPTOR = _descriptor.FileDescriptor( 20 | name='tb_chainer/src/types.proto', 21 | package='tensorboard', 22 | syntax='proto3', 23 | serialized_pb=_b('\n\x1atb_chainer/src/types.proto\x12\x0btensorboard*\xc2\x05\n\x08\x44\x61taType\x12\x0e\n\nDT_INVALID\x10\x00\x12\x0c\n\x08\x44T_FLOAT\x10\x01\x12\r\n\tDT_DOUBLE\x10\x02\x12\x0c\n\x08\x44T_INT32\x10\x03\x12\x0c\n\x08\x44T_UINT8\x10\x04\x12\x0c\n\x08\x44T_INT16\x10\x05\x12\x0b\n\x07\x44T_INT8\x10\x06\x12\r\n\tDT_STRING\x10\x07\x12\x10\n\x0c\x44T_COMPLEX64\x10\x08\x12\x0c\n\x08\x44T_INT64\x10\t\x12\x0b\n\x07\x44T_BOOL\x10\n\x12\x0c\n\x08\x44T_QINT8\x10\x0b\x12\r\n\tDT_QUINT8\x10\x0c\x12\r\n\tDT_QINT32\x10\r\x12\x0f\n\x0b\x44T_BFLOAT16\x10\x0e\x12\r\n\tDT_QINT16\x10\x0f\x12\x0e\n\nDT_QUINT16\x10\x10\x12\r\n\tDT_UINT16\x10\x11\x12\x11\n\rDT_COMPLEX128\x10\x12\x12\x0b\n\x07\x44T_HALF\x10\x13\x12\x0f\n\x0b\x44T_RESOURCE\x10\x14\x12\x10\n\x0c\x44T_FLOAT_REF\x10\x65\x12\x11\n\rDT_DOUBLE_REF\x10\x66\x12\x10\n\x0c\x44T_INT32_REF\x10g\x12\x10\n\x0c\x44T_UINT8_REF\x10h\x12\x10\n\x0c\x44T_INT16_REF\x10i\x12\x0f\n\x0b\x44T_INT8_REF\x10j\x12\x11\n\rDT_STRING_REF\x10k\x12\x14\n\x10\x44T_COMPLEX64_REF\x10l\x12\x10\n\x0c\x44T_INT64_REF\x10m\x12\x0f\n\x0b\x44T_BOOL_REF\x10n\x12\x10\n\x0c\x44T_QINT8_REF\x10o\x12\x11\n\rDT_QUINT8_REF\x10p\x12\x11\n\rDT_QINT32_REF\x10q\x12\x13\n\x0f\x44T_BFLOAT16_REF\x10r\x12\x11\n\rDT_QINT16_REF\x10s\x12\x12\n\x0e\x44T_QUINT16_REF\x10t\x12\x11\n\rDT_UINT16_REF\x10u\x12\x15\n\x11\x44T_COMPLEX128_REF\x10v\x12\x0f\n\x0b\x44T_HALF_REF\x10w\x12\x13\n\x0f\x44T_RESOURCE_REF\x10xB,\n\x18org.tensorflow.frameworkB\x0bTypesProtosP\x01\xf8\x01\x01\x62\x06proto3') 24 | ) 25 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 26 | 27 | _DATATYPE = _descriptor.EnumDescriptor( 28 | name='DataType', 29 | full_name='tensorboard.DataType', 30 | filename=None, 31 | file=DESCRIPTOR, 32 | values=[ 33 | _descriptor.EnumValueDescriptor( 34 | name='DT_INVALID', index=0, number=0, 35 | options=None, 36 | type=None), 37 | _descriptor.EnumValueDescriptor( 38 | name='DT_FLOAT', index=1, number=1, 39 | options=None, 40 | type=None), 41 | _descriptor.EnumValueDescriptor( 42 | name='DT_DOUBLE', index=2, number=2, 43 | options=None, 44 | type=None), 45 | _descriptor.EnumValueDescriptor( 46 | name='DT_INT32', index=3, number=3, 47 | options=None, 48 | type=None), 49 | _descriptor.EnumValueDescriptor( 50 | name='DT_UINT8', index=4, number=4, 51 | options=None, 52 | type=None), 53 | _descriptor.EnumValueDescriptor( 54 | name='DT_INT16', index=5, number=5, 55 | options=None, 56 | type=None), 57 | _descriptor.EnumValueDescriptor( 58 | name='DT_INT8', index=6, number=6, 59 | options=None, 60 | type=None), 61 | _descriptor.EnumValueDescriptor( 62 | name='DT_STRING', index=7, number=7, 63 | options=None, 64 | type=None), 65 | _descriptor.EnumValueDescriptor( 66 | name='DT_COMPLEX64', index=8, number=8, 67 | options=None, 68 | type=None), 69 | _descriptor.EnumValueDescriptor( 70 | name='DT_INT64', index=9, number=9, 71 | options=None, 72 | type=None), 73 | _descriptor.EnumValueDescriptor( 74 | name='DT_BOOL', index=10, number=10, 75 | options=None, 76 | type=None), 77 | _descriptor.EnumValueDescriptor( 78 | name='DT_QINT8', index=11, number=11, 79 | options=None, 80 | type=None), 81 | _descriptor.EnumValueDescriptor( 82 | name='DT_QUINT8', index=12, number=12, 83 | options=None, 84 | type=None), 85 | _descriptor.EnumValueDescriptor( 86 | name='DT_QINT32', index=13, number=13, 87 | options=None, 88 | type=None), 89 | _descriptor.EnumValueDescriptor( 90 | name='DT_BFLOAT16', index=14, number=14, 91 | options=None, 92 | type=None), 93 | _descriptor.EnumValueDescriptor( 94 | name='DT_QINT16', index=15, number=15, 95 | options=None, 96 | type=None), 97 | _descriptor.EnumValueDescriptor( 98 | name='DT_QUINT16', index=16, number=16, 99 | options=None, 100 | type=None), 101 | _descriptor.EnumValueDescriptor( 102 | name='DT_UINT16', index=17, number=17, 103 | options=None, 104 | type=None), 105 | _descriptor.EnumValueDescriptor( 106 | name='DT_COMPLEX128', index=18, number=18, 107 | options=None, 108 | type=None), 109 | _descriptor.EnumValueDescriptor( 110 | name='DT_HALF', index=19, number=19, 111 | options=None, 112 | type=None), 113 | _descriptor.EnumValueDescriptor( 114 | name='DT_RESOURCE', index=20, number=20, 115 | options=None, 116 | type=None), 117 | _descriptor.EnumValueDescriptor( 118 | name='DT_FLOAT_REF', index=21, number=101, 119 | options=None, 120 | type=None), 121 | _descriptor.EnumValueDescriptor( 122 | name='DT_DOUBLE_REF', index=22, number=102, 123 | options=None, 124 | type=None), 125 | _descriptor.EnumValueDescriptor( 126 | name='DT_INT32_REF', index=23, number=103, 127 | options=None, 128 | type=None), 129 | _descriptor.EnumValueDescriptor( 130 | name='DT_UINT8_REF', index=24, number=104, 131 | options=None, 132 | type=None), 133 | _descriptor.EnumValueDescriptor( 134 | name='DT_INT16_REF', index=25, number=105, 135 | options=None, 136 | type=None), 137 | _descriptor.EnumValueDescriptor( 138 | name='DT_INT8_REF', index=26, number=106, 139 | options=None, 140 | type=None), 141 | _descriptor.EnumValueDescriptor( 142 | name='DT_STRING_REF', index=27, number=107, 143 | options=None, 144 | type=None), 145 | _descriptor.EnumValueDescriptor( 146 | name='DT_COMPLEX64_REF', index=28, number=108, 147 | options=None, 148 | type=None), 149 | _descriptor.EnumValueDescriptor( 150 | name='DT_INT64_REF', index=29, number=109, 151 | options=None, 152 | type=None), 153 | _descriptor.EnumValueDescriptor( 154 | name='DT_BOOL_REF', index=30, number=110, 155 | options=None, 156 | type=None), 157 | _descriptor.EnumValueDescriptor( 158 | name='DT_QINT8_REF', index=31, number=111, 159 | options=None, 160 | type=None), 161 | _descriptor.EnumValueDescriptor( 162 | name='DT_QUINT8_REF', index=32, number=112, 163 | options=None, 164 | type=None), 165 | _descriptor.EnumValueDescriptor( 166 | name='DT_QINT32_REF', index=33, number=113, 167 | options=None, 168 | type=None), 169 | _descriptor.EnumValueDescriptor( 170 | name='DT_BFLOAT16_REF', index=34, number=114, 171 | options=None, 172 | type=None), 173 | _descriptor.EnumValueDescriptor( 174 | name='DT_QINT16_REF', index=35, number=115, 175 | options=None, 176 | type=None), 177 | _descriptor.EnumValueDescriptor( 178 | name='DT_QUINT16_REF', index=36, number=116, 179 | options=None, 180 | type=None), 181 | _descriptor.EnumValueDescriptor( 182 | name='DT_UINT16_REF', index=37, number=117, 183 | options=None, 184 | type=None), 185 | _descriptor.EnumValueDescriptor( 186 | name='DT_COMPLEX128_REF', index=38, number=118, 187 | options=None, 188 | type=None), 189 | _descriptor.EnumValueDescriptor( 190 | name='DT_HALF_REF', index=39, number=119, 191 | options=None, 192 | type=None), 193 | _descriptor.EnumValueDescriptor( 194 | name='DT_RESOURCE_REF', index=40, number=120, 195 | options=None, 196 | type=None), 197 | ], 198 | containing_type=None, 199 | options=None, 200 | serialized_start=44, 201 | serialized_end=750, 202 | ) 203 | _sym_db.RegisterEnumDescriptor(_DATATYPE) 204 | 205 | DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) 206 | DT_INVALID = 0 207 | DT_FLOAT = 1 208 | DT_DOUBLE = 2 209 | DT_INT32 = 3 210 | DT_UINT8 = 4 211 | DT_INT16 = 5 212 | DT_INT8 = 6 213 | DT_STRING = 7 214 | DT_COMPLEX64 = 8 215 | DT_INT64 = 9 216 | DT_BOOL = 10 217 | DT_QINT8 = 11 218 | DT_QUINT8 = 12 219 | DT_QINT32 = 13 220 | DT_BFLOAT16 = 14 221 | DT_QINT16 = 15 222 | DT_QUINT16 = 16 223 | DT_UINT16 = 17 224 | DT_COMPLEX128 = 18 225 | DT_HALF = 19 226 | DT_RESOURCE = 20 227 | DT_FLOAT_REF = 101 228 | DT_DOUBLE_REF = 102 229 | DT_INT32_REF = 103 230 | DT_UINT8_REF = 104 231 | DT_INT16_REF = 105 232 | DT_INT8_REF = 106 233 | DT_STRING_REF = 107 234 | DT_COMPLEX64_REF = 108 235 | DT_INT64_REF = 109 236 | DT_BOOL_REF = 110 237 | DT_QINT8_REF = 111 238 | DT_QUINT8_REF = 112 239 | DT_QINT32_REF = 113 240 | DT_BFLOAT16_REF = 114 241 | DT_QINT16_REF = 115 242 | DT_QUINT16_REF = 116 243 | DT_UINT16_REF = 117 244 | DT_COMPLEX128_REF = 118 245 | DT_HALF_REF = 119 246 | DT_RESOURCE_REF = 120 247 | 248 | 249 | DESCRIPTOR.enum_types_by_name['DataType'] = _DATATYPE 250 | 251 | 252 | DESCRIPTOR.has_options = True 253 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\013TypesProtosP\001\370\001\001')) 254 | # @@protoc_insertion_point(module_scope) 255 | -------------------------------------------------------------------------------- /tb_chainer/src/tensor_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/tensor.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | from tb_chainer.src import resource_handle_pb2 as tb__chainer_dot_src_dot_resource__handle__pb2 17 | from tb_chainer.src import tensor_shape_pb2 as tb__chainer_dot_src_dot_tensor__shape__pb2 18 | from tb_chainer.src import types_pb2 as tb__chainer_dot_src_dot_types__pb2 19 | 20 | 21 | DESCRIPTOR = _descriptor.FileDescriptor( 22 | name='tb_chainer/src/tensor.proto', 23 | package='tensorboard', 24 | syntax='proto3', 25 | serialized_pb=_b('\n\x1btb_chainer/src/tensor.proto\x12\x0btensorboard\x1a$tb_chainer/src/resource_handle.proto\x1a!tb_chainer/src/tensor_shape.proto\x1a\x1atb_chainer/src/types.proto\"\xa6\x03\n\x0bTensorProto\x12$\n\x05\x64type\x18\x01 \x01(\x0e\x32\x15.tensorboard.DataType\x12\x33\n\x0ctensor_shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12\x16\n\x0eversion_number\x18\x03 \x01(\x05\x12\x16\n\x0etensor_content\x18\x04 \x01(\x0c\x12\x14\n\x08half_val\x18\r \x03(\x05\x42\x02\x10\x01\x12\x15\n\tfloat_val\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x16\n\ndouble_val\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x13\n\x07int_val\x18\x07 \x03(\x05\x42\x02\x10\x01\x12\x12\n\nstring_val\x18\x08 \x03(\x0c\x12\x18\n\x0cscomplex_val\x18\t \x03(\x02\x42\x02\x10\x01\x12\x15\n\tint64_val\x18\n \x03(\x03\x42\x02\x10\x01\x12\x14\n\x08\x62ool_val\x18\x0b \x03(\x08\x42\x02\x10\x01\x12\x18\n\x0c\x64\x63omplex_val\x18\x0c \x03(\x01\x42\x02\x10\x01\x12=\n\x13resource_handle_val\x18\x0e \x03(\x0b\x32 .tensorboard.ResourceHandleProtoB-\n\x18org.tensorflow.frameworkB\x0cTensorProtosP\x01\xf8\x01\x01\x62\x06proto3') 26 | , 27 | dependencies=[tb__chainer_dot_src_dot_resource__handle__pb2.DESCRIPTOR,tb__chainer_dot_src_dot_tensor__shape__pb2.DESCRIPTOR,tb__chainer_dot_src_dot_types__pb2.DESCRIPTOR,]) 28 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 29 | 30 | 31 | 32 | 33 | _TENSORPROTO = _descriptor.Descriptor( 34 | name='TensorProto', 35 | full_name='tensorboard.TensorProto', 36 | filename=None, 37 | file=DESCRIPTOR, 38 | containing_type=None, 39 | fields=[ 40 | _descriptor.FieldDescriptor( 41 | name='dtype', full_name='tensorboard.TensorProto.dtype', index=0, 42 | number=1, type=14, cpp_type=8, label=1, 43 | has_default_value=False, default_value=0, 44 | message_type=None, enum_type=None, containing_type=None, 45 | is_extension=False, extension_scope=None, 46 | options=None), 47 | _descriptor.FieldDescriptor( 48 | name='tensor_shape', full_name='tensorboard.TensorProto.tensor_shape', index=1, 49 | number=2, type=11, cpp_type=10, label=1, 50 | has_default_value=False, default_value=None, 51 | message_type=None, enum_type=None, containing_type=None, 52 | is_extension=False, extension_scope=None, 53 | options=None), 54 | _descriptor.FieldDescriptor( 55 | name='version_number', full_name='tensorboard.TensorProto.version_number', index=2, 56 | number=3, type=5, cpp_type=1, label=1, 57 | has_default_value=False, default_value=0, 58 | message_type=None, enum_type=None, containing_type=None, 59 | is_extension=False, extension_scope=None, 60 | options=None), 61 | _descriptor.FieldDescriptor( 62 | name='tensor_content', full_name='tensorboard.TensorProto.tensor_content', index=3, 63 | number=4, type=12, cpp_type=9, label=1, 64 | has_default_value=False, default_value=_b(""), 65 | message_type=None, enum_type=None, containing_type=None, 66 | is_extension=False, extension_scope=None, 67 | options=None), 68 | _descriptor.FieldDescriptor( 69 | name='half_val', full_name='tensorboard.TensorProto.half_val', index=4, 70 | number=13, type=5, cpp_type=1, label=3, 71 | has_default_value=False, default_value=[], 72 | message_type=None, enum_type=None, containing_type=None, 73 | is_extension=False, extension_scope=None, 74 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 75 | _descriptor.FieldDescriptor( 76 | name='float_val', full_name='tensorboard.TensorProto.float_val', index=5, 77 | number=5, type=2, cpp_type=6, label=3, 78 | has_default_value=False, default_value=[], 79 | message_type=None, enum_type=None, containing_type=None, 80 | is_extension=False, extension_scope=None, 81 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 82 | _descriptor.FieldDescriptor( 83 | name='double_val', full_name='tensorboard.TensorProto.double_val', index=6, 84 | number=6, type=1, cpp_type=5, label=3, 85 | has_default_value=False, default_value=[], 86 | message_type=None, enum_type=None, containing_type=None, 87 | is_extension=False, extension_scope=None, 88 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 89 | _descriptor.FieldDescriptor( 90 | name='int_val', full_name='tensorboard.TensorProto.int_val', index=7, 91 | number=7, type=5, cpp_type=1, label=3, 92 | has_default_value=False, default_value=[], 93 | message_type=None, enum_type=None, containing_type=None, 94 | is_extension=False, extension_scope=None, 95 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 96 | _descriptor.FieldDescriptor( 97 | name='string_val', full_name='tensorboard.TensorProto.string_val', index=8, 98 | number=8, type=12, cpp_type=9, label=3, 99 | has_default_value=False, default_value=[], 100 | message_type=None, enum_type=None, containing_type=None, 101 | is_extension=False, extension_scope=None, 102 | options=None), 103 | _descriptor.FieldDescriptor( 104 | name='scomplex_val', full_name='tensorboard.TensorProto.scomplex_val', index=9, 105 | number=9, type=2, cpp_type=6, label=3, 106 | has_default_value=False, default_value=[], 107 | message_type=None, enum_type=None, containing_type=None, 108 | is_extension=False, extension_scope=None, 109 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 110 | _descriptor.FieldDescriptor( 111 | name='int64_val', full_name='tensorboard.TensorProto.int64_val', index=10, 112 | number=10, type=3, cpp_type=2, label=3, 113 | has_default_value=False, default_value=[], 114 | message_type=None, enum_type=None, containing_type=None, 115 | is_extension=False, extension_scope=None, 116 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 117 | _descriptor.FieldDescriptor( 118 | name='bool_val', full_name='tensorboard.TensorProto.bool_val', index=11, 119 | number=11, type=8, cpp_type=7, label=3, 120 | has_default_value=False, default_value=[], 121 | message_type=None, enum_type=None, containing_type=None, 122 | is_extension=False, extension_scope=None, 123 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 124 | _descriptor.FieldDescriptor( 125 | name='dcomplex_val', full_name='tensorboard.TensorProto.dcomplex_val', index=12, 126 | number=12, type=1, cpp_type=5, label=3, 127 | has_default_value=False, default_value=[], 128 | message_type=None, enum_type=None, containing_type=None, 129 | is_extension=False, extension_scope=None, 130 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 131 | _descriptor.FieldDescriptor( 132 | name='resource_handle_val', full_name='tensorboard.TensorProto.resource_handle_val', index=13, 133 | number=14, type=11, cpp_type=10, label=3, 134 | has_default_value=False, default_value=[], 135 | message_type=None, enum_type=None, containing_type=None, 136 | is_extension=False, extension_scope=None, 137 | options=None), 138 | ], 139 | extensions=[ 140 | ], 141 | nested_types=[], 142 | enum_types=[ 143 | ], 144 | options=None, 145 | is_extendable=False, 146 | syntax='proto3', 147 | extension_ranges=[], 148 | oneofs=[ 149 | ], 150 | serialized_start=146, 151 | serialized_end=568, 152 | ) 153 | 154 | _TENSORPROTO.fields_by_name['dtype'].enum_type = tb__chainer_dot_src_dot_types__pb2._DATATYPE 155 | _TENSORPROTO.fields_by_name['tensor_shape'].message_type = tb__chainer_dot_src_dot_tensor__shape__pb2._TENSORSHAPEPROTO 156 | _TENSORPROTO.fields_by_name['resource_handle_val'].message_type = tb__chainer_dot_src_dot_resource__handle__pb2._RESOURCEHANDLEPROTO 157 | DESCRIPTOR.message_types_by_name['TensorProto'] = _TENSORPROTO 158 | 159 | TensorProto = _reflection.GeneratedProtocolMessageType('TensorProto', (_message.Message,), dict( 160 | DESCRIPTOR = _TENSORPROTO, 161 | __module__ = 'tb_chainer.src.tensor_pb2' 162 | # @@protoc_insertion_point(class_scope:tensorboard.TensorProto) 163 | )) 164 | _sym_db.RegisterMessage(TensorProto) 165 | 166 | 167 | DESCRIPTOR.has_options = True 168 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\014TensorProtosP\001\370\001\001')) 169 | _TENSORPROTO.fields_by_name['half_val'].has_options = True 170 | _TENSORPROTO.fields_by_name['half_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 171 | _TENSORPROTO.fields_by_name['float_val'].has_options = True 172 | _TENSORPROTO.fields_by_name['float_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 173 | _TENSORPROTO.fields_by_name['double_val'].has_options = True 174 | _TENSORPROTO.fields_by_name['double_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 175 | _TENSORPROTO.fields_by_name['int_val'].has_options = True 176 | _TENSORPROTO.fields_by_name['int_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 177 | _TENSORPROTO.fields_by_name['scomplex_val'].has_options = True 178 | _TENSORPROTO.fields_by_name['scomplex_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 179 | _TENSORPROTO.fields_by_name['int64_val'].has_options = True 180 | _TENSORPROTO.fields_by_name['int64_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 181 | _TENSORPROTO.fields_by_name['bool_val'].has_options = True 182 | _TENSORPROTO.fields_by_name['bool_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 183 | _TENSORPROTO.fields_by_name['dcomplex_val'].has_options = True 184 | _TENSORPROTO.fields_by_name['dcomplex_val']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 185 | # @@protoc_insertion_point(module_scope) 186 | -------------------------------------------------------------------------------- /tb_chainer/summary.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """## Generation of summaries. 17 | ### Class for writing Summaries 18 | @@FileWriter 19 | @@FileWriterCache 20 | ### Summary Ops 21 | @@tensor_summary 22 | @@scalar 23 | @@histogram 24 | @@audio 25 | @@image 26 | @@merge 27 | @@merge_all 28 | ## Utilities 29 | @@get_summary_description 30 | """ 31 | 32 | from __future__ import absolute_import 33 | from __future__ import division 34 | from __future__ import print_function 35 | 36 | import logging 37 | import re as _re 38 | import bisect 39 | from six import StringIO 40 | from six.moves import range 41 | from PIL import Image 42 | import numpy as np 43 | import chainer.cuda 44 | try: 45 | import cupy 46 | except ImportError: 47 | print('Not found cupy.') 48 | # pylint: disable=unused-import 49 | from .src.summary_pb2 import Summary 50 | from .src.summary_pb2 import HistogramProto 51 | from .src.summary_pb2 import SummaryMetadata 52 | from .src.tensor_pb2 import TensorProto 53 | from .src.tensor_shape_pb2 import TensorShapeProto 54 | 55 | _INVALID_TAG_CHARACTERS = _re.compile(r'[^-/\w\.]') 56 | 57 | 58 | def _clean_tag(name): 59 | # In the past, the first argument to summary ops was a tag, which allowed 60 | # arbitrary characters. Now we are changing the first argument to be the node 61 | # name. This has a number of advantages (users of summary ops now can 62 | # take advantage of the tf name scope system) but risks breaking existing 63 | # usage, because a much smaller set of characters are allowed in node names. 64 | # This function replaces all illegal characters with _s, and logs a warning. 65 | # It also strips leading slashes from the name. 66 | if name is not None: 67 | new_name = _INVALID_TAG_CHARACTERS.sub('_', name) 68 | new_name = new_name.lstrip('/') # Remove leading slashes 69 | if new_name != name: 70 | logging.info( 71 | 'Summary name %s is illegal; using %s instead.' % 72 | (name, new_name)) 73 | name = new_name 74 | return name 75 | 76 | 77 | def scalar(name, scalar, collections=None): 78 | """Outputs a `Summary` protocol buffer containing a single scalar value. 79 | The generated Summary has a Tensor.proto containing the input Tensor. 80 | Args: 81 | name: A name for the generated node. Will also serve as the series name in 82 | TensorBoard. 83 | tensor: A real numeric Tensor containing a single value. 84 | collections: Optional list of graph collections keys. The new summary op is 85 | added to these collections. Defaults to `[GraphKeys.SUMMARIES]`. 86 | Returns: 87 | A scalar `Tensor` of type `string`. Which contains a `Summary` protobuf. 88 | Raises: 89 | ValueError: If tensor has the wrong shape or type. 90 | """ 91 | name = _clean_tag(name) 92 | if not isinstance(scalar, float): 93 | # try conversion, if failed then need handle by user. 94 | scalar = float(scalar) 95 | return Summary(value=[Summary.Value(tag=name, simple_value=scalar)]) 96 | 97 | 98 | def histogram(name, values, bins, collections=None): 99 | # pylint: disable=line-too-long 100 | """Outputs a `Summary` protocol buffer with a histogram. 101 | The generated 102 | [`Summary`](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto) 103 | has one summary value containing a histogram for `values`. 104 | This op reports an `InvalidArgument` error if any value is not finite. 105 | Args: 106 | name: A name for the generated node. Will also serve as a series name in 107 | TensorBoard. 108 | values: A real numeric `Tensor`. Any shape. Values to use to 109 | build the histogram. 110 | collections: Optional list of graph collections keys. The new summary op is 111 | added to these collections. Defaults to `[GraphKeys.SUMMARIES]`. 112 | Returns: 113 | A scalar `Tensor` of type `string`. The serialized `Summary` protocol 114 | buffer. 115 | """ 116 | name = _clean_tag(name) 117 | hist = make_histogram(values.astype(float), bins) 118 | return Summary(value=[Summary.Value(tag=name, histo=hist)]) 119 | 120 | 121 | 122 | def make_histogram(values, bins): 123 | """Convert values into a histogram proto using logic from histogram.cc.""" 124 | values = values.reshape(-1) 125 | counts, limits = np.histogram(values, bins=bins) 126 | limits = limits[1:] 127 | 128 | sum_sq = values.dot(values) 129 | return HistogramProto(min=values.min(), 130 | max=values.max(), 131 | num=len(values), 132 | sum=values.sum(), 133 | sum_squares=sum_sq, 134 | bucket_limit=limits, 135 | bucket=counts) 136 | 137 | 138 | def image(tag, tensor): 139 | """Outputs a `Summary` protocol buffer with images. 140 | The summary has up to `max_images` summary values containing images. The 141 | images are built from `tensor` which must be 3-D with shape `[height, width, 142 | channels]` and where `channels` can be: 143 | * 1: `tensor` is interpreted as Grayscale. 144 | * 3: `tensor` is interpreted as RGB. 145 | * 4: `tensor` is interpreted as RGBA. 146 | The `name` in the outputted Summary.Value protobufs is generated based on the 147 | name, with a suffix depending on the max_outputs setting: 148 | * If `max_outputs` is 1, the summary value tag is '*name*/image'. 149 | * If `max_outputs` is greater than 1, the summary value tags are 150 | generated sequentially as '*name*/image/0', '*name*/image/1', etc. 151 | Args: 152 | tag: A name for the generated node. Will also serve as a series name in 153 | TensorBoard. 154 | tensor: A 3-D `uint8` or `float32` `Tensor` of shape `[height, width, 155 | channels]` where `channels` is 1, 3, or 4. 156 | Returns: 157 | A scalar `Tensor` of type `string`. The serialized `Summary` protocol 158 | buffer. 159 | """ 160 | tag = _clean_tag(tag) 161 | assert isinstance(tensor, np.ndarray) or isinstance(tensor, cupy.ndarray), 'input tensor should be one of numpy.ndarray, cupy.ndarray' 162 | if not isinstance(tensor, np.ndarray): 163 | assert tensor.ndim<4 and tensor.ndim>1, 'input tensor should be 3 dimensional.' 164 | if tensor.ndim==2: 165 | tensor = cupy.expand_dims(tensor, 0) 166 | tensor = chainer.cuda.to_cpu(cupy.transpose(tensor, (1,2,0))) 167 | else: 168 | if tensor.ndim==2: 169 | tensor = np.expand_dims(tensor, 0) 170 | tensor = np.transpose(tensor, (1,2,0)) 171 | tensor = tensor.astype(np.float32) 172 | tensor = (tensor*255).astype(np.uint8) 173 | image = make_image(tensor) 174 | return Summary(value=[Summary.Value(tag=tag, image=image)]) 175 | 176 | 177 | def make_image(tensor): 178 | """Convert an numpy representation image to Image protobuf""" 179 | height, width, channel = tensor.shape 180 | image = Image.fromarray(tensor) 181 | import io 182 | output = io.BytesIO() 183 | image.save(output, format='PNG') 184 | image_string = output.getvalue() 185 | output.close() 186 | return Summary.Image(height=height, 187 | width=width, 188 | colorspace=channel, 189 | encoded_image_string=image_string) 190 | 191 | def video(tag, tensor, fps): 192 | tag = _clean_tag(tag) 193 | assert isinstance(tensor, np.ndarray) or isinstance(tensor, cupy.ndarray), 'input tensor should be one of numpy.ndarray, cupy.ndarray' 194 | if isinstance(tensor, np.ndarray): 195 | xp = np 196 | else: 197 | xp = cupy 198 | 199 | assert tensor.ndim==5, 'input tensor should be 5 dimensional. (batch, channels, time, height, width)' 200 | 201 | b, c, t, h, w = tensor.shape 202 | 203 | if tensor.dtype == xp.uint8: 204 | tensor = xp.float32(tensor) / 255. 205 | 206 | def is_power2(num): 207 | return num != 0 and ((num & (num - 1)) == 0) 208 | 209 | # pad to power of 2 210 | while not is_power2(tensor.shape[0]): 211 | tensor = xp.concatenate((tensor, xp.zeros(shape=(1, c, t, h, w))), axis=0) 212 | 213 | b = tensor.shape[0] 214 | n_rows = 2**(int(xp.log(b) / xp.log(2)) // 2) 215 | n_cols = b // n_rows 216 | 217 | tensor = np.reshape(tensor, newshape=(n_rows, n_cols, c, t, h, w)) 218 | tensor = np.transpose(tensor, axes=(3, 0, 4, 1, 5, 2)) 219 | tensor = np.reshape(tensor, newshape=(t, n_rows * h, n_cols * w, c)) 220 | tensor = tensor.astype(xp.float32) 221 | tensor = (tensor * 255).astype(xp.uint8) 222 | 223 | tensor = chainer.cuda.to_cpu(tensor) 224 | video = make_video(tensor, fps) 225 | 226 | return Summary(value=[Summary.Value(tag=tag, image=video)]) 227 | 228 | def make_video(tensor, fps): 229 | try: 230 | import moviepy.editor as mpy 231 | except ImportError: 232 | print('add_video needs package moviepy') 233 | return 234 | import tempfile 235 | 236 | t, h, w, c = tensor.shape 237 | 238 | # encode sequence of images into gif string 239 | clip = mpy.ImageSequenceClip(list(tensor), fps=fps) 240 | with tempfile.NamedTemporaryFile() as f: 241 | filename = f.name + '.gif' 242 | 243 | clip.write_gif(filename, verbose=True) 244 | with open(filename, 'rb') as f: 245 | tensor_string = f.read() 246 | return Summary.Image(height=h, width=w, colorspace=c, encoded_image_string=tensor_string) 247 | 248 | def audio(tag, tensor, sample_rate=44100): 249 | tensor = tensor.squeeze() 250 | assert tensor.ndim==1, 'input tensor should be 1 dimensional.' 251 | tensor_list = [int(32767.0*x) for x in tensor] 252 | import io 253 | import wave 254 | import struct 255 | fio = io.BytesIO() 256 | Wave_write = wave.open(fio, 'wb') 257 | Wave_write.setnchannels(1) 258 | Wave_write.setsampwidth(2) 259 | Wave_write.setframerate(sample_rate) 260 | tensor_enc = b'' 261 | for v in tensor_list: 262 | tensor_enc += struct.pack(', sess.graph) 67 | ``` 68 | Args: 69 | event_writer: An EventWriter. Implements add_event method. 70 | graph: A `Graph` object, such as `sess.graph`. 71 | graph_def: DEPRECATED: Use the `graph` argument instead. 72 | """ 73 | self.event_writer = event_writer 74 | # For storing used tags for session.run() outputs. 75 | self._session_run_tags = {} 76 | # TODO(zihaolucky). pass this an empty graph to check whether it's necessary. 77 | # currently we don't support graph in MXNet using tensorboard. 78 | 79 | def add_summary(self, summary, global_step=None): 80 | """Adds a `Summary` protocol buffer to the event file. 81 | This method wraps the provided summary in an `Event` protocol buffer 82 | and adds it to the event file. 83 | You can pass the result of evaluating any summary op, using 84 | [`Session.run()`](client.md#Session.run) or 85 | [`Tensor.eval()`](framework.md#Tensor.eval), to this 86 | function. Alternatively, you can pass a `tf.Summary` protocol 87 | buffer that you populate with your own data. The latter is 88 | commonly done to report evaluation results in event files. 89 | Args: 90 | summary: A `Summary` protocol buffer, optionally serialized as a string. 91 | global_step: Number. Optional global step value to record with the 92 | summary. 93 | """ 94 | if isinstance(summary, bytes): 95 | summ = summary_pb2.Summary() 96 | summ.ParseFromString(summary) 97 | summary = summ 98 | event = event_pb2.Event(summary=summary) 99 | self._add_event(event, global_step) 100 | 101 | def add_graph(self, graph): 102 | """Adds a `Graph` protocol buffer to the event file. 103 | """ 104 | event = event_pb2.Event(graph_def=graph.SerializeToString()) 105 | self._add_event(event, None) 106 | 107 | def add_session_log(self, session_log, global_step=None): 108 | """Adds a `SessionLog` protocol buffer to the event file. 109 | This method wraps the provided session in an `Event` protocol buffer 110 | and adds it to the event file. 111 | Args: 112 | session_log: A `SessionLog` protocol buffer. 113 | global_step: Number. Optional global step value to record with the 114 | summary. 115 | """ 116 | event = event_pb2.Event(session_log=session_log) 117 | self._add_event(event, global_step) 118 | 119 | def _add_event(self, event, step): 120 | event.wall_time = time.time() 121 | if step is not None: 122 | event.step = int(step) 123 | self.event_writer.add_event(event) 124 | 125 | 126 | class FileWriter(SummaryToEventTransformer): 127 | """Writes `Summary` protocol buffers to event files. 128 | The `FileWriter` class provides a mechanism to create an event file in a 129 | given directory and add summaries and events to it. The class updates the 130 | file contents asynchronously. This allows a training program to call methods 131 | to add data to the file directly from the training loop, without slowing down 132 | training. 133 | @@__init__ 134 | @@add_summary 135 | @@add_session_log 136 | @@add_event 137 | @@add_graph 138 | @@add_run_metadata 139 | @@get_logdir 140 | @@flush 141 | @@close 142 | """ 143 | 144 | def __init__(self, 145 | logdir, 146 | graph=None, 147 | max_queue=10, 148 | flush_secs=120, 149 | graph_def=None): 150 | """Creates a `FileWriter` and an event file. 151 | On construction the summary writer creates a new event file in `logdir`. 152 | This event file will contain `Event` protocol buffers constructed when you 153 | call one of the following functions: `add_summary()`, `add_session_log()`, 154 | `add_event()`, or `add_graph()`. 155 | If you pass a `Graph` to the constructor it is added to 156 | the event file. (This is equivalent to calling `add_graph()` later). 157 | TensorBoard will pick the graph from the file and display it graphically so 158 | you can interactively explore the graph you built. You will usually pass 159 | the graph from the session in which you launched it: 160 | ```python 161 | ...create a graph... 162 | # Launch the graph in a session. 163 | sess = tf.Session() 164 | # Create a summary writer, add the 'graph' to the event file. 165 | writer = tf.summary.FileWriter(, sess.graph) 166 | ``` 167 | The other arguments to the constructor control the asynchronous writes to 168 | the event file: 169 | * `flush_secs`: How often, in seconds, to flush the added summaries 170 | and events to disk. 171 | * `max_queue`: Maximum number of summaries or events pending to be 172 | written to disk before one of the 'add' calls block. 173 | Args: 174 | logdir: A string. Directory where event file will be written. 175 | graph: A `Graph` object, such as `sess.graph`. 176 | max_queue: Integer. Size of the queue for pending events and summaries. 177 | flush_secs: Number. How often, in seconds, to flush the 178 | pending events and summaries to disk. 179 | graph_def: DEPRECATED: Use the `graph` argument instead. 180 | """ 181 | event_writer = EventFileWriter(logdir, max_queue, flush_secs) 182 | super(FileWriter, self).__init__(event_writer, graph, graph_def) 183 | 184 | def get_logdir(self): 185 | """Returns the directory where event file will be written.""" 186 | return self.event_writer.get_logdir() 187 | 188 | def add_event(self, event): 189 | """Adds an event to the event file. 190 | Args: 191 | event: An `Event` protocol buffer. 192 | """ 193 | self.event_writer.add_event(event) 194 | 195 | def flush(self): 196 | """Flushes the event file to disk. 197 | Call this method to make sure that all pending events have been written to 198 | disk. 199 | """ 200 | self.event_writer.flush() 201 | 202 | def close(self): 203 | """Flushes the event file to disk and close the file. 204 | Call this method when you do not need the summary writer anymore. 205 | """ 206 | self.event_writer.close() 207 | 208 | def reopen(self): 209 | """Reopens the EventFileWriter. 210 | Can be called after `close()` to add more events in the same directory. 211 | The events will go into a new events file. 212 | Does nothing if the EventFileWriter was not closed. 213 | """ 214 | self.event_writer.reopen() 215 | 216 | 217 | class SummaryWriter(object): 218 | """Writes `Summary` directly to event files. 219 | The `SummaryWriter` class provides a high-level api to create an event file in a 220 | given directory and add summaries and events to it. The class updates the 221 | file contents asynchronously. This allows a training program to call methods 222 | to add data to the file directly from the training loop, without slowing down 223 | training. 224 | """ 225 | def __init__(self, log_dir): 226 | self.file_writer = FileWriter(logdir=log_dir) 227 | v = 1E-12 228 | buckets = [] 229 | neg_buckets = [] 230 | while v < 1E20: 231 | buckets.append(v) 232 | neg_buckets.append(-v) 233 | v *= 1.1 234 | self.default_bins = neg_buckets[::-1] + [0] + buckets 235 | self.text_tags = [] 236 | def add_scalar(self, name, scalar_value, global_step=None): 237 | self.file_writer.add_summary(scalar(name, scalar_value), global_step) 238 | 239 | def add_histogram(self, name, values, global_step=None, bins='tensorflow'): 240 | if bins=='tensorflow': 241 | bins = self.default_bins 242 | self.file_writer.add_summary(histogram(name, values, bins), global_step) 243 | 244 | def add_image(self, tag, img_tensor, global_step=None): 245 | self.file_writer.add_summary(image(tag, img_tensor), global_step) 246 | def add_audio(self, tag, snd_tensor, global_step=None): 247 | self.file_writer.add_summary(audio(tag, snd_tensor), global_step) 248 | def add_video(self, tag, vid_tensor, global_step=None, fps=4): 249 | self.file_writer.add_summary(video(tag, vid_tensor, fps), global_step) 250 | def add_text(self, tag, text_string, global_step=None): 251 | self.file_writer.add_summary(text(tag, text_string), global_step) 252 | if tag not in self.text_tags: 253 | self.text_tags.append(tag) 254 | extensionDIR = self.file_writer.get_logdir()+'/plugins/tensorboard_text/' 255 | if not os.path.exists(extensionDIR): 256 | os.makedirs(extensionDIR) 257 | with open(extensionDIR + 'tensors.json', 'w') as fp: 258 | json.dump(self.text_tags, fp) 259 | def add_graph(self, last_var): 260 | self.file_writer.add_graph(graph(last_var)) 261 | 262 | def add_all_parameter_histograms(self, last_var, global_step=None, pattern='.*'): 263 | cp = re.compile(pattern) 264 | g = build_computational_graph(last_var) 265 | names = NodeName(g.nodes) 266 | for n in g.nodes: 267 | if isinstance(n, chainer.variable.VariableNode) and \ 268 | isinstance(n._variable(), chainer.Parameter) and \ 269 | cp.match(names.name(n)): 270 | data = chainer.cuda.to_cpu(n._variable().data) 271 | self.add_histogram(names.name(n), data, global_step) 272 | 273 | def add_all_variable_images(self, last_var, exclude_params=True, global_step=None, pattern='.*'): 274 | cp = re.compile(pattern) 275 | g = build_computational_graph(last_var) 276 | names = NodeName(g.nodes) 277 | for n in g.nodes: 278 | if isinstance(n, chainer.variable.VariableNode) and \ 279 | (exclude_params and not isinstance(n._variable(), chainer.Parameter)) and \ 280 | n.data is not None and \ 281 | cp.match(names.name(n)): 282 | data = chainer.cuda.to_cpu(n.data) 283 | assert data.ndim < 5, "'variable.data' must be less than 5. the given 'variable.data.ndim' is %d." % data.ndim 284 | if data.ndim == 4: 285 | for i, d in enumerate(data): 286 | img = make_grid(np.expand_dims(d, 1) if d.shape[0] != 3 else d) 287 | self.add_image(names.name(n) + '/' + str(i), img, global_step) 288 | else: 289 | img = make_grid(np.expand_dims(data, 1) if data.shape[0] != 3 else data) 290 | self.add_image(names.name(n), img, global_step) 291 | 292 | def close(self): 293 | self.file_writer.flush() 294 | self.file_writer.close() 295 | 296 | def __del__(self): 297 | if self.file_writer is not None: 298 | self.file_writer.close() 299 | 300 | -------------------------------------------------------------------------------- /tb_chainer/src/event_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/event.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | from tb_chainer.src import summary_pb2 as tb__chainer_dot_src_dot_summary__pb2 17 | 18 | 19 | DESCRIPTOR = _descriptor.FileDescriptor( 20 | name='tb_chainer/src/event.proto', 21 | package='tensorboard', 22 | syntax='proto3', 23 | serialized_pb=_b('\n\x1atb_chainer/src/event.proto\x12\x0btensorboard\x1a\x1ctb_chainer/src/summary.proto\"\xbf\x02\n\x05\x45vent\x12\x11\n\twall_time\x18\x01 \x01(\x01\x12\x0c\n\x04step\x18\x02 \x01(\x03\x12\x16\n\x0c\x66ile_version\x18\x03 \x01(\tH\x00\x12\x13\n\tgraph_def\x18\x04 \x01(\x0cH\x00\x12\'\n\x07summary\x18\x05 \x01(\x0b\x32\x14.tensorboard.SummaryH\x00\x12.\n\x0blog_message\x18\x06 \x01(\x0b\x32\x17.tensorboard.LogMessageH\x00\x12.\n\x0bsession_log\x18\x07 \x01(\x0b\x32\x17.tensorboard.SessionLogH\x00\x12=\n\x13tagged_run_metadata\x18\x08 \x01(\x0b\x32\x1e.tensorboard.TaggedRunMetadataH\x00\x12\x18\n\x0emeta_graph_def\x18\t \x01(\x0cH\x00\x42\x06\n\x04what\"\x96\x01\n\nLogMessage\x12,\n\x05level\x18\x01 \x01(\x0e\x32\x1d.tensorboard.LogMessage.Level\x12\x0f\n\x07message\x18\x02 \x01(\t\"I\n\x05Level\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\n\x12\x08\n\x04INFO\x10\x14\x12\x08\n\x04WARN\x10\x1e\x12\t\n\x05\x45RROR\x10(\x12\t\n\x05\x46\x41TAL\x10\x32\"\xb7\x01\n\nSessionLog\x12\x35\n\x06status\x18\x01 \x01(\x0e\x32%.tensorboard.SessionLog.SessionStatus\x12\x17\n\x0f\x63heckpoint_path\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\"L\n\rSessionStatus\x12\x16\n\x12STATUS_UNSPECIFIED\x10\x00\x12\t\n\x05START\x10\x01\x12\x08\n\x04STOP\x10\x02\x12\x0e\n\nCHECKPOINT\x10\x03\"6\n\x11TaggedRunMetadata\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x14\n\x0crun_metadata\x18\x02 \x01(\x0c\x42\'\n\x13org.tensorflow.utilB\x0b\x45ventProtosP\x01\xf8\x01\x01\x62\x06proto3') 24 | , 25 | dependencies=[tb__chainer_dot_src_dot_summary__pb2.DESCRIPTOR,]) 26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 27 | 28 | 29 | 30 | _LOGMESSAGE_LEVEL = _descriptor.EnumDescriptor( 31 | name='Level', 32 | full_name='tensorboard.LogMessage.Level', 33 | filename=None, 34 | file=DESCRIPTOR, 35 | values=[ 36 | _descriptor.EnumValueDescriptor( 37 | name='UNKNOWN', index=0, number=0, 38 | options=None, 39 | type=None), 40 | _descriptor.EnumValueDescriptor( 41 | name='DEBUG', index=1, number=10, 42 | options=None, 43 | type=None), 44 | _descriptor.EnumValueDescriptor( 45 | name='INFO', index=2, number=20, 46 | options=None, 47 | type=None), 48 | _descriptor.EnumValueDescriptor( 49 | name='WARN', index=3, number=30, 50 | options=None, 51 | type=None), 52 | _descriptor.EnumValueDescriptor( 53 | name='ERROR', index=4, number=40, 54 | options=None, 55 | type=None), 56 | _descriptor.EnumValueDescriptor( 57 | name='FATAL', index=5, number=50, 58 | options=None, 59 | type=None), 60 | ], 61 | containing_type=None, 62 | options=None, 63 | serialized_start=473, 64 | serialized_end=546, 65 | ) 66 | _sym_db.RegisterEnumDescriptor(_LOGMESSAGE_LEVEL) 67 | 68 | _SESSIONLOG_SESSIONSTATUS = _descriptor.EnumDescriptor( 69 | name='SessionStatus', 70 | full_name='tensorboard.SessionLog.SessionStatus', 71 | filename=None, 72 | file=DESCRIPTOR, 73 | values=[ 74 | _descriptor.EnumValueDescriptor( 75 | name='STATUS_UNSPECIFIED', index=0, number=0, 76 | options=None, 77 | type=None), 78 | _descriptor.EnumValueDescriptor( 79 | name='START', index=1, number=1, 80 | options=None, 81 | type=None), 82 | _descriptor.EnumValueDescriptor( 83 | name='STOP', index=2, number=2, 84 | options=None, 85 | type=None), 86 | _descriptor.EnumValueDescriptor( 87 | name='CHECKPOINT', index=3, number=3, 88 | options=None, 89 | type=None), 90 | ], 91 | containing_type=None, 92 | options=None, 93 | serialized_start=656, 94 | serialized_end=732, 95 | ) 96 | _sym_db.RegisterEnumDescriptor(_SESSIONLOG_SESSIONSTATUS) 97 | 98 | 99 | _EVENT = _descriptor.Descriptor( 100 | name='Event', 101 | full_name='tensorboard.Event', 102 | filename=None, 103 | file=DESCRIPTOR, 104 | containing_type=None, 105 | fields=[ 106 | _descriptor.FieldDescriptor( 107 | name='wall_time', full_name='tensorboard.Event.wall_time', index=0, 108 | number=1, type=1, cpp_type=5, label=1, 109 | has_default_value=False, default_value=float(0), 110 | message_type=None, enum_type=None, containing_type=None, 111 | is_extension=False, extension_scope=None, 112 | options=None), 113 | _descriptor.FieldDescriptor( 114 | name='step', full_name='tensorboard.Event.step', index=1, 115 | number=2, type=3, cpp_type=2, label=1, 116 | has_default_value=False, default_value=0, 117 | message_type=None, enum_type=None, containing_type=None, 118 | is_extension=False, extension_scope=None, 119 | options=None), 120 | _descriptor.FieldDescriptor( 121 | name='file_version', full_name='tensorboard.Event.file_version', index=2, 122 | number=3, type=9, cpp_type=9, label=1, 123 | has_default_value=False, default_value=_b("").decode('utf-8'), 124 | message_type=None, enum_type=None, containing_type=None, 125 | is_extension=False, extension_scope=None, 126 | options=None), 127 | _descriptor.FieldDescriptor( 128 | name='graph_def', full_name='tensorboard.Event.graph_def', index=3, 129 | number=4, type=12, cpp_type=9, label=1, 130 | has_default_value=False, default_value=_b(""), 131 | message_type=None, enum_type=None, containing_type=None, 132 | is_extension=False, extension_scope=None, 133 | options=None), 134 | _descriptor.FieldDescriptor( 135 | name='summary', full_name='tensorboard.Event.summary', index=4, 136 | number=5, type=11, cpp_type=10, label=1, 137 | has_default_value=False, default_value=None, 138 | message_type=None, enum_type=None, containing_type=None, 139 | is_extension=False, extension_scope=None, 140 | options=None), 141 | _descriptor.FieldDescriptor( 142 | name='log_message', full_name='tensorboard.Event.log_message', index=5, 143 | number=6, type=11, cpp_type=10, label=1, 144 | has_default_value=False, default_value=None, 145 | message_type=None, enum_type=None, containing_type=None, 146 | is_extension=False, extension_scope=None, 147 | options=None), 148 | _descriptor.FieldDescriptor( 149 | name='session_log', full_name='tensorboard.Event.session_log', index=6, 150 | number=7, type=11, cpp_type=10, label=1, 151 | has_default_value=False, default_value=None, 152 | message_type=None, enum_type=None, containing_type=None, 153 | is_extension=False, extension_scope=None, 154 | options=None), 155 | _descriptor.FieldDescriptor( 156 | name='tagged_run_metadata', full_name='tensorboard.Event.tagged_run_metadata', index=7, 157 | number=8, type=11, cpp_type=10, label=1, 158 | has_default_value=False, default_value=None, 159 | message_type=None, enum_type=None, containing_type=None, 160 | is_extension=False, extension_scope=None, 161 | options=None), 162 | _descriptor.FieldDescriptor( 163 | name='meta_graph_def', full_name='tensorboard.Event.meta_graph_def', index=8, 164 | number=9, type=12, cpp_type=9, label=1, 165 | has_default_value=False, default_value=_b(""), 166 | message_type=None, enum_type=None, containing_type=None, 167 | is_extension=False, extension_scope=None, 168 | options=None), 169 | ], 170 | extensions=[ 171 | ], 172 | nested_types=[], 173 | enum_types=[ 174 | ], 175 | options=None, 176 | is_extendable=False, 177 | syntax='proto3', 178 | extension_ranges=[], 179 | oneofs=[ 180 | _descriptor.OneofDescriptor( 181 | name='what', full_name='tensorboard.Event.what', 182 | index=0, containing_type=None, fields=[]), 183 | ], 184 | serialized_start=74, 185 | serialized_end=393, 186 | ) 187 | 188 | 189 | _LOGMESSAGE = _descriptor.Descriptor( 190 | name='LogMessage', 191 | full_name='tensorboard.LogMessage', 192 | filename=None, 193 | file=DESCRIPTOR, 194 | containing_type=None, 195 | fields=[ 196 | _descriptor.FieldDescriptor( 197 | name='level', full_name='tensorboard.LogMessage.level', index=0, 198 | number=1, type=14, cpp_type=8, label=1, 199 | has_default_value=False, default_value=0, 200 | message_type=None, enum_type=None, containing_type=None, 201 | is_extension=False, extension_scope=None, 202 | options=None), 203 | _descriptor.FieldDescriptor( 204 | name='message', full_name='tensorboard.LogMessage.message', index=1, 205 | number=2, type=9, cpp_type=9, label=1, 206 | has_default_value=False, default_value=_b("").decode('utf-8'), 207 | message_type=None, enum_type=None, containing_type=None, 208 | is_extension=False, extension_scope=None, 209 | options=None), 210 | ], 211 | extensions=[ 212 | ], 213 | nested_types=[], 214 | enum_types=[ 215 | _LOGMESSAGE_LEVEL, 216 | ], 217 | options=None, 218 | is_extendable=False, 219 | syntax='proto3', 220 | extension_ranges=[], 221 | oneofs=[ 222 | ], 223 | serialized_start=396, 224 | serialized_end=546, 225 | ) 226 | 227 | 228 | _SESSIONLOG = _descriptor.Descriptor( 229 | name='SessionLog', 230 | full_name='tensorboard.SessionLog', 231 | filename=None, 232 | file=DESCRIPTOR, 233 | containing_type=None, 234 | fields=[ 235 | _descriptor.FieldDescriptor( 236 | name='status', full_name='tensorboard.SessionLog.status', index=0, 237 | number=1, type=14, cpp_type=8, label=1, 238 | has_default_value=False, default_value=0, 239 | message_type=None, enum_type=None, containing_type=None, 240 | is_extension=False, extension_scope=None, 241 | options=None), 242 | _descriptor.FieldDescriptor( 243 | name='checkpoint_path', full_name='tensorboard.SessionLog.checkpoint_path', index=1, 244 | number=2, type=9, cpp_type=9, label=1, 245 | has_default_value=False, default_value=_b("").decode('utf-8'), 246 | message_type=None, enum_type=None, containing_type=None, 247 | is_extension=False, extension_scope=None, 248 | options=None), 249 | _descriptor.FieldDescriptor( 250 | name='msg', full_name='tensorboard.SessionLog.msg', index=2, 251 | number=3, type=9, cpp_type=9, label=1, 252 | has_default_value=False, default_value=_b("").decode('utf-8'), 253 | message_type=None, enum_type=None, containing_type=None, 254 | is_extension=False, extension_scope=None, 255 | options=None), 256 | ], 257 | extensions=[ 258 | ], 259 | nested_types=[], 260 | enum_types=[ 261 | _SESSIONLOG_SESSIONSTATUS, 262 | ], 263 | options=None, 264 | is_extendable=False, 265 | syntax='proto3', 266 | extension_ranges=[], 267 | oneofs=[ 268 | ], 269 | serialized_start=549, 270 | serialized_end=732, 271 | ) 272 | 273 | 274 | _TAGGEDRUNMETADATA = _descriptor.Descriptor( 275 | name='TaggedRunMetadata', 276 | full_name='tensorboard.TaggedRunMetadata', 277 | filename=None, 278 | file=DESCRIPTOR, 279 | containing_type=None, 280 | fields=[ 281 | _descriptor.FieldDescriptor( 282 | name='tag', full_name='tensorboard.TaggedRunMetadata.tag', index=0, 283 | number=1, type=9, cpp_type=9, label=1, 284 | has_default_value=False, default_value=_b("").decode('utf-8'), 285 | message_type=None, enum_type=None, containing_type=None, 286 | is_extension=False, extension_scope=None, 287 | options=None), 288 | _descriptor.FieldDescriptor( 289 | name='run_metadata', full_name='tensorboard.TaggedRunMetadata.run_metadata', index=1, 290 | number=2, type=12, cpp_type=9, label=1, 291 | has_default_value=False, default_value=_b(""), 292 | message_type=None, enum_type=None, containing_type=None, 293 | is_extension=False, extension_scope=None, 294 | options=None), 295 | ], 296 | extensions=[ 297 | ], 298 | nested_types=[], 299 | enum_types=[ 300 | ], 301 | options=None, 302 | is_extendable=False, 303 | syntax='proto3', 304 | extension_ranges=[], 305 | oneofs=[ 306 | ], 307 | serialized_start=734, 308 | serialized_end=788, 309 | ) 310 | 311 | _EVENT.fields_by_name['summary'].message_type = tb__chainer_dot_src_dot_summary__pb2._SUMMARY 312 | _EVENT.fields_by_name['log_message'].message_type = _LOGMESSAGE 313 | _EVENT.fields_by_name['session_log'].message_type = _SESSIONLOG 314 | _EVENT.fields_by_name['tagged_run_metadata'].message_type = _TAGGEDRUNMETADATA 315 | _EVENT.oneofs_by_name['what'].fields.append( 316 | _EVENT.fields_by_name['file_version']) 317 | _EVENT.fields_by_name['file_version'].containing_oneof = _EVENT.oneofs_by_name['what'] 318 | _EVENT.oneofs_by_name['what'].fields.append( 319 | _EVENT.fields_by_name['graph_def']) 320 | _EVENT.fields_by_name['graph_def'].containing_oneof = _EVENT.oneofs_by_name['what'] 321 | _EVENT.oneofs_by_name['what'].fields.append( 322 | _EVENT.fields_by_name['summary']) 323 | _EVENT.fields_by_name['summary'].containing_oneof = _EVENT.oneofs_by_name['what'] 324 | _EVENT.oneofs_by_name['what'].fields.append( 325 | _EVENT.fields_by_name['log_message']) 326 | _EVENT.fields_by_name['log_message'].containing_oneof = _EVENT.oneofs_by_name['what'] 327 | _EVENT.oneofs_by_name['what'].fields.append( 328 | _EVENT.fields_by_name['session_log']) 329 | _EVENT.fields_by_name['session_log'].containing_oneof = _EVENT.oneofs_by_name['what'] 330 | _EVENT.oneofs_by_name['what'].fields.append( 331 | _EVENT.fields_by_name['tagged_run_metadata']) 332 | _EVENT.fields_by_name['tagged_run_metadata'].containing_oneof = _EVENT.oneofs_by_name['what'] 333 | _EVENT.oneofs_by_name['what'].fields.append( 334 | _EVENT.fields_by_name['meta_graph_def']) 335 | _EVENT.fields_by_name['meta_graph_def'].containing_oneof = _EVENT.oneofs_by_name['what'] 336 | _LOGMESSAGE.fields_by_name['level'].enum_type = _LOGMESSAGE_LEVEL 337 | _LOGMESSAGE_LEVEL.containing_type = _LOGMESSAGE 338 | _SESSIONLOG.fields_by_name['status'].enum_type = _SESSIONLOG_SESSIONSTATUS 339 | _SESSIONLOG_SESSIONSTATUS.containing_type = _SESSIONLOG 340 | DESCRIPTOR.message_types_by_name['Event'] = _EVENT 341 | DESCRIPTOR.message_types_by_name['LogMessage'] = _LOGMESSAGE 342 | DESCRIPTOR.message_types_by_name['SessionLog'] = _SESSIONLOG 343 | DESCRIPTOR.message_types_by_name['TaggedRunMetadata'] = _TAGGEDRUNMETADATA 344 | 345 | Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), dict( 346 | DESCRIPTOR = _EVENT, 347 | __module__ = 'tb_chainer.src.event_pb2' 348 | # @@protoc_insertion_point(class_scope:tensorboard.Event) 349 | )) 350 | _sym_db.RegisterMessage(Event) 351 | 352 | LogMessage = _reflection.GeneratedProtocolMessageType('LogMessage', (_message.Message,), dict( 353 | DESCRIPTOR = _LOGMESSAGE, 354 | __module__ = 'tb_chainer.src.event_pb2' 355 | # @@protoc_insertion_point(class_scope:tensorboard.LogMessage) 356 | )) 357 | _sym_db.RegisterMessage(LogMessage) 358 | 359 | SessionLog = _reflection.GeneratedProtocolMessageType('SessionLog', (_message.Message,), dict( 360 | DESCRIPTOR = _SESSIONLOG, 361 | __module__ = 'tb_chainer.src.event_pb2' 362 | # @@protoc_insertion_point(class_scope:tensorboard.SessionLog) 363 | )) 364 | _sym_db.RegisterMessage(SessionLog) 365 | 366 | TaggedRunMetadata = _reflection.GeneratedProtocolMessageType('TaggedRunMetadata', (_message.Message,), dict( 367 | DESCRIPTOR = _TAGGEDRUNMETADATA, 368 | __module__ = 'tb_chainer.src.event_pb2' 369 | # @@protoc_insertion_point(class_scope:tensorboard.TaggedRunMetadata) 370 | )) 371 | _sym_db.RegisterMessage(TaggedRunMetadata) 372 | 373 | 374 | DESCRIPTOR.has_options = True 375 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023org.tensorflow.utilB\013EventProtosP\001\370\001\001')) 376 | # @@protoc_insertion_point(module_scope) 377 | -------------------------------------------------------------------------------- /tb_chainer/src/attr_value_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/attr_value.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | from tb_chainer.src import tensor_pb2 as tb__chainer_dot_src_dot_tensor__pb2 17 | from tb_chainer.src import tensor_shape_pb2 as tb__chainer_dot_src_dot_tensor__shape__pb2 18 | from tb_chainer.src import types_pb2 as tb__chainer_dot_src_dot_types__pb2 19 | 20 | 21 | DESCRIPTOR = _descriptor.FileDescriptor( 22 | name='tb_chainer/src/attr_value.proto', 23 | package='tensorboard', 24 | syntax='proto3', 25 | serialized_pb=_b('\n\x1ftb_chainer/src/attr_value.proto\x12\x0btensorboard\x1a\x1btb_chainer/src/tensor.proto\x1a!tb_chainer/src/tensor_shape.proto\x1a\x1atb_chainer/src/types.proto\"\xaf\x04\n\tAttrValue\x12\x0b\n\x01s\x18\x02 \x01(\x0cH\x00\x12\x0b\n\x01i\x18\x03 \x01(\x03H\x00\x12\x0b\n\x01\x66\x18\x04 \x01(\x02H\x00\x12\x0b\n\x01\x62\x18\x05 \x01(\x08H\x00\x12%\n\x04type\x18\x06 \x01(\x0e\x32\x15.tensorboard.DataTypeH\x00\x12.\n\x05shape\x18\x07 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProtoH\x00\x12*\n\x06tensor\x18\x08 \x01(\x0b\x32\x18.tensorboard.TensorProtoH\x00\x12\x30\n\x04list\x18\x01 \x01(\x0b\x32 .tensorboard.AttrValue.ListValueH\x00\x12)\n\x04\x66unc\x18\n \x01(\x0b\x32\x19.tensorboard.NameAttrListH\x00\x12\x15\n\x0bplaceholder\x18\t \x01(\tH\x00\x1a\xed\x01\n\tListValue\x12\t\n\x01s\x18\x02 \x03(\x0c\x12\r\n\x01i\x18\x03 \x03(\x03\x42\x02\x10\x01\x12\r\n\x01\x66\x18\x04 \x03(\x02\x42\x02\x10\x01\x12\r\n\x01\x62\x18\x05 \x03(\x08\x42\x02\x10\x01\x12\'\n\x04type\x18\x06 \x03(\x0e\x32\x15.tensorboard.DataTypeB\x02\x10\x01\x12,\n\x05shape\x18\x07 \x03(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12(\n\x06tensor\x18\x08 \x03(\x0b\x32\x18.tensorboard.TensorProto\x12\'\n\x04\x66unc\x18\t \x03(\x0b\x32\x19.tensorboard.NameAttrListB\x07\n\x05value\"\x94\x01\n\x0cNameAttrList\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\x04\x61ttr\x18\x02 \x03(\x0b\x32#.tensorboard.NameAttrList.AttrEntry\x1a\x43\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\x42\x30\n\x18org.tensorflow.frameworkB\x0f\x41ttrValueProtosP\x01\xf8\x01\x01\x62\x06proto3') 26 | , 27 | dependencies=[tb__chainer_dot_src_dot_tensor__pb2.DESCRIPTOR,tb__chainer_dot_src_dot_tensor__shape__pb2.DESCRIPTOR,tb__chainer_dot_src_dot_types__pb2.DESCRIPTOR,]) 28 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 29 | 30 | 31 | 32 | 33 | _ATTRVALUE_LISTVALUE = _descriptor.Descriptor( 34 | name='ListValue', 35 | full_name='tensorboard.AttrValue.ListValue', 36 | filename=None, 37 | file=DESCRIPTOR, 38 | containing_type=None, 39 | fields=[ 40 | _descriptor.FieldDescriptor( 41 | name='s', full_name='tensorboard.AttrValue.ListValue.s', index=0, 42 | number=2, type=12, cpp_type=9, label=3, 43 | has_default_value=False, default_value=[], 44 | message_type=None, enum_type=None, containing_type=None, 45 | is_extension=False, extension_scope=None, 46 | options=None), 47 | _descriptor.FieldDescriptor( 48 | name='i', full_name='tensorboard.AttrValue.ListValue.i', index=1, 49 | number=3, type=3, cpp_type=2, label=3, 50 | has_default_value=False, default_value=[], 51 | message_type=None, enum_type=None, containing_type=None, 52 | is_extension=False, extension_scope=None, 53 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 54 | _descriptor.FieldDescriptor( 55 | name='f', full_name='tensorboard.AttrValue.ListValue.f', index=2, 56 | number=4, type=2, cpp_type=6, label=3, 57 | has_default_value=False, default_value=[], 58 | message_type=None, enum_type=None, containing_type=None, 59 | is_extension=False, extension_scope=None, 60 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 61 | _descriptor.FieldDescriptor( 62 | name='b', full_name='tensorboard.AttrValue.ListValue.b', index=3, 63 | number=5, type=8, cpp_type=7, label=3, 64 | has_default_value=False, default_value=[], 65 | message_type=None, enum_type=None, containing_type=None, 66 | is_extension=False, extension_scope=None, 67 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 68 | _descriptor.FieldDescriptor( 69 | name='type', full_name='tensorboard.AttrValue.ListValue.type', index=4, 70 | number=6, type=14, cpp_type=8, label=3, 71 | has_default_value=False, default_value=[], 72 | message_type=None, enum_type=None, containing_type=None, 73 | is_extension=False, extension_scope=None, 74 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 75 | _descriptor.FieldDescriptor( 76 | name='shape', full_name='tensorboard.AttrValue.ListValue.shape', index=5, 77 | number=7, type=11, cpp_type=10, label=3, 78 | has_default_value=False, default_value=[], 79 | message_type=None, enum_type=None, containing_type=None, 80 | is_extension=False, extension_scope=None, 81 | options=None), 82 | _descriptor.FieldDescriptor( 83 | name='tensor', full_name='tensorboard.AttrValue.ListValue.tensor', index=6, 84 | number=8, type=11, cpp_type=10, label=3, 85 | has_default_value=False, default_value=[], 86 | message_type=None, enum_type=None, containing_type=None, 87 | is_extension=False, extension_scope=None, 88 | options=None), 89 | _descriptor.FieldDescriptor( 90 | name='func', full_name='tensorboard.AttrValue.ListValue.func', index=7, 91 | number=9, type=11, cpp_type=10, label=3, 92 | has_default_value=False, default_value=[], 93 | message_type=None, enum_type=None, containing_type=None, 94 | is_extension=False, extension_scope=None, 95 | options=None), 96 | ], 97 | extensions=[ 98 | ], 99 | nested_types=[], 100 | enum_types=[ 101 | ], 102 | options=None, 103 | is_extendable=False, 104 | syntax='proto3', 105 | extension_ranges=[], 106 | oneofs=[ 107 | ], 108 | serialized_start=454, 109 | serialized_end=691, 110 | ) 111 | 112 | _ATTRVALUE = _descriptor.Descriptor( 113 | name='AttrValue', 114 | full_name='tensorboard.AttrValue', 115 | filename=None, 116 | file=DESCRIPTOR, 117 | containing_type=None, 118 | fields=[ 119 | _descriptor.FieldDescriptor( 120 | name='s', full_name='tensorboard.AttrValue.s', index=0, 121 | number=2, type=12, cpp_type=9, label=1, 122 | has_default_value=False, default_value=_b(""), 123 | message_type=None, enum_type=None, containing_type=None, 124 | is_extension=False, extension_scope=None, 125 | options=None), 126 | _descriptor.FieldDescriptor( 127 | name='i', full_name='tensorboard.AttrValue.i', index=1, 128 | number=3, type=3, cpp_type=2, label=1, 129 | has_default_value=False, default_value=0, 130 | message_type=None, enum_type=None, containing_type=None, 131 | is_extension=False, extension_scope=None, 132 | options=None), 133 | _descriptor.FieldDescriptor( 134 | name='f', full_name='tensorboard.AttrValue.f', index=2, 135 | number=4, type=2, cpp_type=6, label=1, 136 | has_default_value=False, default_value=float(0), 137 | message_type=None, enum_type=None, containing_type=None, 138 | is_extension=False, extension_scope=None, 139 | options=None), 140 | _descriptor.FieldDescriptor( 141 | name='b', full_name='tensorboard.AttrValue.b', index=3, 142 | number=5, type=8, cpp_type=7, label=1, 143 | has_default_value=False, default_value=False, 144 | message_type=None, enum_type=None, containing_type=None, 145 | is_extension=False, extension_scope=None, 146 | options=None), 147 | _descriptor.FieldDescriptor( 148 | name='type', full_name='tensorboard.AttrValue.type', index=4, 149 | number=6, type=14, cpp_type=8, label=1, 150 | has_default_value=False, default_value=0, 151 | message_type=None, enum_type=None, containing_type=None, 152 | is_extension=False, extension_scope=None, 153 | options=None), 154 | _descriptor.FieldDescriptor( 155 | name='shape', full_name='tensorboard.AttrValue.shape', index=5, 156 | number=7, type=11, cpp_type=10, label=1, 157 | has_default_value=False, default_value=None, 158 | message_type=None, enum_type=None, containing_type=None, 159 | is_extension=False, extension_scope=None, 160 | options=None), 161 | _descriptor.FieldDescriptor( 162 | name='tensor', full_name='tensorboard.AttrValue.tensor', index=6, 163 | number=8, type=11, cpp_type=10, label=1, 164 | has_default_value=False, default_value=None, 165 | message_type=None, enum_type=None, containing_type=None, 166 | is_extension=False, extension_scope=None, 167 | options=None), 168 | _descriptor.FieldDescriptor( 169 | name='list', full_name='tensorboard.AttrValue.list', index=7, 170 | number=1, type=11, cpp_type=10, label=1, 171 | has_default_value=False, default_value=None, 172 | message_type=None, enum_type=None, containing_type=None, 173 | is_extension=False, extension_scope=None, 174 | options=None), 175 | _descriptor.FieldDescriptor( 176 | name='func', full_name='tensorboard.AttrValue.func', index=8, 177 | number=10, type=11, cpp_type=10, label=1, 178 | has_default_value=False, default_value=None, 179 | message_type=None, enum_type=None, containing_type=None, 180 | is_extension=False, extension_scope=None, 181 | options=None), 182 | _descriptor.FieldDescriptor( 183 | name='placeholder', full_name='tensorboard.AttrValue.placeholder', index=9, 184 | number=9, type=9, cpp_type=9, label=1, 185 | has_default_value=False, default_value=_b("").decode('utf-8'), 186 | message_type=None, enum_type=None, containing_type=None, 187 | is_extension=False, extension_scope=None, 188 | options=None), 189 | ], 190 | extensions=[ 191 | ], 192 | nested_types=[_ATTRVALUE_LISTVALUE, ], 193 | enum_types=[ 194 | ], 195 | options=None, 196 | is_extendable=False, 197 | syntax='proto3', 198 | extension_ranges=[], 199 | oneofs=[ 200 | _descriptor.OneofDescriptor( 201 | name='value', full_name='tensorboard.AttrValue.value', 202 | index=0, containing_type=None, fields=[]), 203 | ], 204 | serialized_start=141, 205 | serialized_end=700, 206 | ) 207 | 208 | 209 | _NAMEATTRLIST_ATTRENTRY = _descriptor.Descriptor( 210 | name='AttrEntry', 211 | full_name='tensorboard.NameAttrList.AttrEntry', 212 | filename=None, 213 | file=DESCRIPTOR, 214 | containing_type=None, 215 | fields=[ 216 | _descriptor.FieldDescriptor( 217 | name='key', full_name='tensorboard.NameAttrList.AttrEntry.key', index=0, 218 | number=1, type=9, cpp_type=9, label=1, 219 | has_default_value=False, default_value=_b("").decode('utf-8'), 220 | message_type=None, enum_type=None, containing_type=None, 221 | is_extension=False, extension_scope=None, 222 | options=None), 223 | _descriptor.FieldDescriptor( 224 | name='value', full_name='tensorboard.NameAttrList.AttrEntry.value', index=1, 225 | number=2, type=11, cpp_type=10, label=1, 226 | has_default_value=False, default_value=None, 227 | message_type=None, enum_type=None, containing_type=None, 228 | is_extension=False, extension_scope=None, 229 | options=None), 230 | ], 231 | extensions=[ 232 | ], 233 | nested_types=[], 234 | enum_types=[ 235 | ], 236 | options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), 237 | is_extendable=False, 238 | syntax='proto3', 239 | extension_ranges=[], 240 | oneofs=[ 241 | ], 242 | serialized_start=784, 243 | serialized_end=851, 244 | ) 245 | 246 | _NAMEATTRLIST = _descriptor.Descriptor( 247 | name='NameAttrList', 248 | full_name='tensorboard.NameAttrList', 249 | filename=None, 250 | file=DESCRIPTOR, 251 | containing_type=None, 252 | fields=[ 253 | _descriptor.FieldDescriptor( 254 | name='name', full_name='tensorboard.NameAttrList.name', index=0, 255 | number=1, type=9, cpp_type=9, label=1, 256 | has_default_value=False, default_value=_b("").decode('utf-8'), 257 | message_type=None, enum_type=None, containing_type=None, 258 | is_extension=False, extension_scope=None, 259 | options=None), 260 | _descriptor.FieldDescriptor( 261 | name='attr', full_name='tensorboard.NameAttrList.attr', index=1, 262 | number=2, type=11, cpp_type=10, label=3, 263 | has_default_value=False, default_value=[], 264 | message_type=None, enum_type=None, containing_type=None, 265 | is_extension=False, extension_scope=None, 266 | options=None), 267 | ], 268 | extensions=[ 269 | ], 270 | nested_types=[_NAMEATTRLIST_ATTRENTRY, ], 271 | enum_types=[ 272 | ], 273 | options=None, 274 | is_extendable=False, 275 | syntax='proto3', 276 | extension_ranges=[], 277 | oneofs=[ 278 | ], 279 | serialized_start=703, 280 | serialized_end=851, 281 | ) 282 | 283 | _ATTRVALUE_LISTVALUE.fields_by_name['type'].enum_type = tb__chainer_dot_src_dot_types__pb2._DATATYPE 284 | _ATTRVALUE_LISTVALUE.fields_by_name['shape'].message_type = tb__chainer_dot_src_dot_tensor__shape__pb2._TENSORSHAPEPROTO 285 | _ATTRVALUE_LISTVALUE.fields_by_name['tensor'].message_type = tb__chainer_dot_src_dot_tensor__pb2._TENSORPROTO 286 | _ATTRVALUE_LISTVALUE.fields_by_name['func'].message_type = _NAMEATTRLIST 287 | _ATTRVALUE_LISTVALUE.containing_type = _ATTRVALUE 288 | _ATTRVALUE.fields_by_name['type'].enum_type = tb__chainer_dot_src_dot_types__pb2._DATATYPE 289 | _ATTRVALUE.fields_by_name['shape'].message_type = tb__chainer_dot_src_dot_tensor__shape__pb2._TENSORSHAPEPROTO 290 | _ATTRVALUE.fields_by_name['tensor'].message_type = tb__chainer_dot_src_dot_tensor__pb2._TENSORPROTO 291 | _ATTRVALUE.fields_by_name['list'].message_type = _ATTRVALUE_LISTVALUE 292 | _ATTRVALUE.fields_by_name['func'].message_type = _NAMEATTRLIST 293 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 294 | _ATTRVALUE.fields_by_name['s']) 295 | _ATTRVALUE.fields_by_name['s'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 296 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 297 | _ATTRVALUE.fields_by_name['i']) 298 | _ATTRVALUE.fields_by_name['i'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 299 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 300 | _ATTRVALUE.fields_by_name['f']) 301 | _ATTRVALUE.fields_by_name['f'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 302 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 303 | _ATTRVALUE.fields_by_name['b']) 304 | _ATTRVALUE.fields_by_name['b'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 305 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 306 | _ATTRVALUE.fields_by_name['type']) 307 | _ATTRVALUE.fields_by_name['type'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 308 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 309 | _ATTRVALUE.fields_by_name['shape']) 310 | _ATTRVALUE.fields_by_name['shape'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 311 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 312 | _ATTRVALUE.fields_by_name['tensor']) 313 | _ATTRVALUE.fields_by_name['tensor'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 314 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 315 | _ATTRVALUE.fields_by_name['list']) 316 | _ATTRVALUE.fields_by_name['list'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 317 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 318 | _ATTRVALUE.fields_by_name['func']) 319 | _ATTRVALUE.fields_by_name['func'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 320 | _ATTRVALUE.oneofs_by_name['value'].fields.append( 321 | _ATTRVALUE.fields_by_name['placeholder']) 322 | _ATTRVALUE.fields_by_name['placeholder'].containing_oneof = _ATTRVALUE.oneofs_by_name['value'] 323 | _NAMEATTRLIST_ATTRENTRY.fields_by_name['value'].message_type = _ATTRVALUE 324 | _NAMEATTRLIST_ATTRENTRY.containing_type = _NAMEATTRLIST 325 | _NAMEATTRLIST.fields_by_name['attr'].message_type = _NAMEATTRLIST_ATTRENTRY 326 | DESCRIPTOR.message_types_by_name['AttrValue'] = _ATTRVALUE 327 | DESCRIPTOR.message_types_by_name['NameAttrList'] = _NAMEATTRLIST 328 | 329 | AttrValue = _reflection.GeneratedProtocolMessageType('AttrValue', (_message.Message,), dict( 330 | 331 | ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), dict( 332 | DESCRIPTOR = _ATTRVALUE_LISTVALUE, 333 | __module__ = 'tb_chainer.src.attr_value_pb2' 334 | # @@protoc_insertion_point(class_scope:tensorboard.AttrValue.ListValue) 335 | )) 336 | , 337 | DESCRIPTOR = _ATTRVALUE, 338 | __module__ = 'tb_chainer.src.attr_value_pb2' 339 | # @@protoc_insertion_point(class_scope:tensorboard.AttrValue) 340 | )) 341 | _sym_db.RegisterMessage(AttrValue) 342 | _sym_db.RegisterMessage(AttrValue.ListValue) 343 | 344 | NameAttrList = _reflection.GeneratedProtocolMessageType('NameAttrList', (_message.Message,), dict( 345 | 346 | AttrEntry = _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), dict( 347 | DESCRIPTOR = _NAMEATTRLIST_ATTRENTRY, 348 | __module__ = 'tb_chainer.src.attr_value_pb2' 349 | # @@protoc_insertion_point(class_scope:tensorboard.NameAttrList.AttrEntry) 350 | )) 351 | , 352 | DESCRIPTOR = _NAMEATTRLIST, 353 | __module__ = 'tb_chainer.src.attr_value_pb2' 354 | # @@protoc_insertion_point(class_scope:tensorboard.NameAttrList) 355 | )) 356 | _sym_db.RegisterMessage(NameAttrList) 357 | _sym_db.RegisterMessage(NameAttrList.AttrEntry) 358 | 359 | 360 | DESCRIPTOR.has_options = True 361 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\017AttrValueProtosP\001\370\001\001')) 362 | _ATTRVALUE_LISTVALUE.fields_by_name['i'].has_options = True 363 | _ATTRVALUE_LISTVALUE.fields_by_name['i']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 364 | _ATTRVALUE_LISTVALUE.fields_by_name['f'].has_options = True 365 | _ATTRVALUE_LISTVALUE.fields_by_name['f']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 366 | _ATTRVALUE_LISTVALUE.fields_by_name['b'].has_options = True 367 | _ATTRVALUE_LISTVALUE.fields_by_name['b']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 368 | _ATTRVALUE_LISTVALUE.fields_by_name['type'].has_options = True 369 | _ATTRVALUE_LISTVALUE.fields_by_name['type']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 370 | _NAMEATTRLIST_ATTRENTRY.has_options = True 371 | _NAMEATTRLIST_ATTRENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) 372 | # @@protoc_insertion_point(module_scope) 373 | -------------------------------------------------------------------------------- /tb_chainer/src/summary_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: tb_chainer/src/summary.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | from tb_chainer.src import tensor_pb2 as tb__chainer_dot_src_dot_tensor__pb2 17 | 18 | 19 | DESCRIPTOR = _descriptor.FileDescriptor( 20 | name='tb_chainer/src/summary.proto', 21 | package='tensorboard', 22 | syntax='proto3', 23 | serialized_pb=_b('\n\x1ctb_chainer/src/summary.proto\x12\x0btensorboard\x1a\x1btb_chainer/src/tensor.proto\"\'\n\x12SummaryDescription\x12\x11\n\ttype_hint\x18\x01 \x01(\t\"\x87\x01\n\x0eHistogramProto\x12\x0b\n\x03min\x18\x01 \x01(\x01\x12\x0b\n\x03max\x18\x02 \x01(\x01\x12\x0b\n\x03num\x18\x03 \x01(\x01\x12\x0b\n\x03sum\x18\x04 \x01(\x01\x12\x13\n\x0bsum_squares\x18\x05 \x01(\x01\x12\x18\n\x0c\x62ucket_limit\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x12\n\x06\x62ucket\x18\x07 \x03(\x01\x42\x02\x10\x01\"\x83\x01\n\x0fSummaryMetadata\x12<\n\x0bplugin_data\x18\x01 \x03(\x0b\x32\'.tensorboard.SummaryMetadata.PluginData\x1a\x32\n\nPluginData\x12\x13\n\x0bplugin_name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\"\xe4\x04\n\x07Summary\x12)\n\x05value\x18\x01 \x03(\x0b\x32\x1a.tensorboard.Summary.Value\x1aX\n\x05Image\x12\x0e\n\x06height\x18\x01 \x01(\x05\x12\r\n\x05width\x18\x02 \x01(\x05\x12\x12\n\ncolorspace\x18\x03 \x01(\x05\x12\x1c\n\x14\x65ncoded_image_string\x18\x04 \x01(\x0c\x1a}\n\x05\x41udio\x12\x13\n\x0bsample_rate\x18\x01 \x01(\x02\x12\x14\n\x0cnum_channels\x18\x02 \x01(\x03\x12\x15\n\rlength_frames\x18\x03 \x01(\x03\x12\x1c\n\x14\x65ncoded_audio_string\x18\x04 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x05 \x01(\t\x1a\xd4\x02\n\x05Value\x12\x11\n\tnode_name\x18\x07 \x01(\t\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12.\n\x08metadata\x18\t \x01(\x0b\x32\x1c.tensorboard.SummaryMetadata\x12\x16\n\x0csimple_value\x18\x02 \x01(\x02H\x00\x12&\n\x1cobsolete_old_style_histogram\x18\x03 \x01(\x0cH\x00\x12+\n\x05image\x18\x04 \x01(\x0b\x32\x1a.tensorboard.Summary.ImageH\x00\x12,\n\x05histo\x18\x05 \x01(\x0b\x32\x1b.tensorboard.HistogramProtoH\x00\x12+\n\x05\x61udio\x18\x06 \x01(\x0b\x32\x1a.tensorboard.Summary.AudioH\x00\x12*\n\x06tensor\x18\x08 \x01(\x0b\x32\x18.tensorboard.TensorProtoH\x00\x42\x07\n\x05valueB.\n\x18org.tensorflow.frameworkB\rSummaryProtosP\x01\xf8\x01\x01\x62\x06proto3') 24 | , 25 | dependencies=[tb__chainer_dot_src_dot_tensor__pb2.DESCRIPTOR,]) 26 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 27 | 28 | 29 | 30 | 31 | _SUMMARYDESCRIPTION = _descriptor.Descriptor( 32 | name='SummaryDescription', 33 | full_name='tensorboard.SummaryDescription', 34 | filename=None, 35 | file=DESCRIPTOR, 36 | containing_type=None, 37 | fields=[ 38 | _descriptor.FieldDescriptor( 39 | name='type_hint', full_name='tensorboard.SummaryDescription.type_hint', index=0, 40 | number=1, type=9, cpp_type=9, label=1, 41 | has_default_value=False, default_value=_b("").decode('utf-8'), 42 | message_type=None, enum_type=None, containing_type=None, 43 | is_extension=False, extension_scope=None, 44 | options=None), 45 | ], 46 | extensions=[ 47 | ], 48 | nested_types=[], 49 | enum_types=[ 50 | ], 51 | options=None, 52 | is_extendable=False, 53 | syntax='proto3', 54 | extension_ranges=[], 55 | oneofs=[ 56 | ], 57 | serialized_start=74, 58 | serialized_end=113, 59 | ) 60 | 61 | 62 | _HISTOGRAMPROTO = _descriptor.Descriptor( 63 | name='HistogramProto', 64 | full_name='tensorboard.HistogramProto', 65 | filename=None, 66 | file=DESCRIPTOR, 67 | containing_type=None, 68 | fields=[ 69 | _descriptor.FieldDescriptor( 70 | name='min', full_name='tensorboard.HistogramProto.min', index=0, 71 | number=1, type=1, cpp_type=5, label=1, 72 | has_default_value=False, default_value=float(0), 73 | message_type=None, enum_type=None, containing_type=None, 74 | is_extension=False, extension_scope=None, 75 | options=None), 76 | _descriptor.FieldDescriptor( 77 | name='max', full_name='tensorboard.HistogramProto.max', index=1, 78 | number=2, type=1, cpp_type=5, label=1, 79 | has_default_value=False, default_value=float(0), 80 | message_type=None, enum_type=None, containing_type=None, 81 | is_extension=False, extension_scope=None, 82 | options=None), 83 | _descriptor.FieldDescriptor( 84 | name='num', full_name='tensorboard.HistogramProto.num', index=2, 85 | number=3, type=1, cpp_type=5, label=1, 86 | has_default_value=False, default_value=float(0), 87 | message_type=None, enum_type=None, containing_type=None, 88 | is_extension=False, extension_scope=None, 89 | options=None), 90 | _descriptor.FieldDescriptor( 91 | name='sum', full_name='tensorboard.HistogramProto.sum', index=3, 92 | number=4, type=1, cpp_type=5, label=1, 93 | has_default_value=False, default_value=float(0), 94 | message_type=None, enum_type=None, containing_type=None, 95 | is_extension=False, extension_scope=None, 96 | options=None), 97 | _descriptor.FieldDescriptor( 98 | name='sum_squares', full_name='tensorboard.HistogramProto.sum_squares', index=4, 99 | number=5, type=1, cpp_type=5, label=1, 100 | has_default_value=False, default_value=float(0), 101 | message_type=None, enum_type=None, containing_type=None, 102 | is_extension=False, extension_scope=None, 103 | options=None), 104 | _descriptor.FieldDescriptor( 105 | name='bucket_limit', full_name='tensorboard.HistogramProto.bucket_limit', index=5, 106 | number=6, type=1, cpp_type=5, label=3, 107 | has_default_value=False, default_value=[], 108 | message_type=None, enum_type=None, containing_type=None, 109 | is_extension=False, extension_scope=None, 110 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 111 | _descriptor.FieldDescriptor( 112 | name='bucket', full_name='tensorboard.HistogramProto.bucket', index=6, 113 | number=7, type=1, cpp_type=5, label=3, 114 | has_default_value=False, default_value=[], 115 | message_type=None, enum_type=None, containing_type=None, 116 | is_extension=False, extension_scope=None, 117 | options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), 118 | ], 119 | extensions=[ 120 | ], 121 | nested_types=[], 122 | enum_types=[ 123 | ], 124 | options=None, 125 | is_extendable=False, 126 | syntax='proto3', 127 | extension_ranges=[], 128 | oneofs=[ 129 | ], 130 | serialized_start=116, 131 | serialized_end=251, 132 | ) 133 | 134 | 135 | _SUMMARYMETADATA_PLUGINDATA = _descriptor.Descriptor( 136 | name='PluginData', 137 | full_name='tensorboard.SummaryMetadata.PluginData', 138 | filename=None, 139 | file=DESCRIPTOR, 140 | containing_type=None, 141 | fields=[ 142 | _descriptor.FieldDescriptor( 143 | name='plugin_name', full_name='tensorboard.SummaryMetadata.PluginData.plugin_name', index=0, 144 | number=1, type=9, cpp_type=9, label=1, 145 | has_default_value=False, default_value=_b("").decode('utf-8'), 146 | message_type=None, enum_type=None, containing_type=None, 147 | is_extension=False, extension_scope=None, 148 | options=None), 149 | _descriptor.FieldDescriptor( 150 | name='content', full_name='tensorboard.SummaryMetadata.PluginData.content', index=1, 151 | number=2, type=9, cpp_type=9, label=1, 152 | has_default_value=False, default_value=_b("").decode('utf-8'), 153 | message_type=None, enum_type=None, containing_type=None, 154 | is_extension=False, extension_scope=None, 155 | options=None), 156 | ], 157 | extensions=[ 158 | ], 159 | nested_types=[], 160 | enum_types=[ 161 | ], 162 | options=None, 163 | is_extendable=False, 164 | syntax='proto3', 165 | extension_ranges=[], 166 | oneofs=[ 167 | ], 168 | serialized_start=335, 169 | serialized_end=385, 170 | ) 171 | 172 | _SUMMARYMETADATA = _descriptor.Descriptor( 173 | name='SummaryMetadata', 174 | full_name='tensorboard.SummaryMetadata', 175 | filename=None, 176 | file=DESCRIPTOR, 177 | containing_type=None, 178 | fields=[ 179 | _descriptor.FieldDescriptor( 180 | name='plugin_data', full_name='tensorboard.SummaryMetadata.plugin_data', index=0, 181 | number=1, type=11, cpp_type=10, label=3, 182 | has_default_value=False, default_value=[], 183 | message_type=None, enum_type=None, containing_type=None, 184 | is_extension=False, extension_scope=None, 185 | options=None), 186 | ], 187 | extensions=[ 188 | ], 189 | nested_types=[_SUMMARYMETADATA_PLUGINDATA, ], 190 | enum_types=[ 191 | ], 192 | options=None, 193 | is_extendable=False, 194 | syntax='proto3', 195 | extension_ranges=[], 196 | oneofs=[ 197 | ], 198 | serialized_start=254, 199 | serialized_end=385, 200 | ) 201 | 202 | 203 | _SUMMARY_IMAGE = _descriptor.Descriptor( 204 | name='Image', 205 | full_name='tensorboard.Summary.Image', 206 | filename=None, 207 | file=DESCRIPTOR, 208 | containing_type=None, 209 | fields=[ 210 | _descriptor.FieldDescriptor( 211 | name='height', full_name='tensorboard.Summary.Image.height', index=0, 212 | number=1, type=5, cpp_type=1, label=1, 213 | has_default_value=False, default_value=0, 214 | message_type=None, enum_type=None, containing_type=None, 215 | is_extension=False, extension_scope=None, 216 | options=None), 217 | _descriptor.FieldDescriptor( 218 | name='width', full_name='tensorboard.Summary.Image.width', index=1, 219 | number=2, type=5, cpp_type=1, label=1, 220 | has_default_value=False, default_value=0, 221 | message_type=None, enum_type=None, containing_type=None, 222 | is_extension=False, extension_scope=None, 223 | options=None), 224 | _descriptor.FieldDescriptor( 225 | name='colorspace', full_name='tensorboard.Summary.Image.colorspace', index=2, 226 | number=3, type=5, cpp_type=1, label=1, 227 | has_default_value=False, default_value=0, 228 | message_type=None, enum_type=None, containing_type=None, 229 | is_extension=False, extension_scope=None, 230 | options=None), 231 | _descriptor.FieldDescriptor( 232 | name='encoded_image_string', full_name='tensorboard.Summary.Image.encoded_image_string', index=3, 233 | number=4, type=12, cpp_type=9, label=1, 234 | has_default_value=False, default_value=_b(""), 235 | message_type=None, enum_type=None, containing_type=None, 236 | is_extension=False, extension_scope=None, 237 | options=None), 238 | ], 239 | extensions=[ 240 | ], 241 | nested_types=[], 242 | enum_types=[ 243 | ], 244 | options=None, 245 | is_extendable=False, 246 | syntax='proto3', 247 | extension_ranges=[], 248 | oneofs=[ 249 | ], 250 | serialized_start=442, 251 | serialized_end=530, 252 | ) 253 | 254 | _SUMMARY_AUDIO = _descriptor.Descriptor( 255 | name='Audio', 256 | full_name='tensorboard.Summary.Audio', 257 | filename=None, 258 | file=DESCRIPTOR, 259 | containing_type=None, 260 | fields=[ 261 | _descriptor.FieldDescriptor( 262 | name='sample_rate', full_name='tensorboard.Summary.Audio.sample_rate', index=0, 263 | number=1, type=2, cpp_type=6, label=1, 264 | has_default_value=False, default_value=float(0), 265 | message_type=None, enum_type=None, containing_type=None, 266 | is_extension=False, extension_scope=None, 267 | options=None), 268 | _descriptor.FieldDescriptor( 269 | name='num_channels', full_name='tensorboard.Summary.Audio.num_channels', index=1, 270 | number=2, type=3, cpp_type=2, label=1, 271 | has_default_value=False, default_value=0, 272 | message_type=None, enum_type=None, containing_type=None, 273 | is_extension=False, extension_scope=None, 274 | options=None), 275 | _descriptor.FieldDescriptor( 276 | name='length_frames', full_name='tensorboard.Summary.Audio.length_frames', index=2, 277 | number=3, type=3, cpp_type=2, label=1, 278 | has_default_value=False, default_value=0, 279 | message_type=None, enum_type=None, containing_type=None, 280 | is_extension=False, extension_scope=None, 281 | options=None), 282 | _descriptor.FieldDescriptor( 283 | name='encoded_audio_string', full_name='tensorboard.Summary.Audio.encoded_audio_string', index=3, 284 | number=4, type=12, cpp_type=9, label=1, 285 | has_default_value=False, default_value=_b(""), 286 | message_type=None, enum_type=None, containing_type=None, 287 | is_extension=False, extension_scope=None, 288 | options=None), 289 | _descriptor.FieldDescriptor( 290 | name='content_type', full_name='tensorboard.Summary.Audio.content_type', index=4, 291 | number=5, type=9, cpp_type=9, label=1, 292 | has_default_value=False, default_value=_b("").decode('utf-8'), 293 | message_type=None, enum_type=None, containing_type=None, 294 | is_extension=False, extension_scope=None, 295 | options=None), 296 | ], 297 | extensions=[ 298 | ], 299 | nested_types=[], 300 | enum_types=[ 301 | ], 302 | options=None, 303 | is_extendable=False, 304 | syntax='proto3', 305 | extension_ranges=[], 306 | oneofs=[ 307 | ], 308 | serialized_start=532, 309 | serialized_end=657, 310 | ) 311 | 312 | _SUMMARY_VALUE = _descriptor.Descriptor( 313 | name='Value', 314 | full_name='tensorboard.Summary.Value', 315 | filename=None, 316 | file=DESCRIPTOR, 317 | containing_type=None, 318 | fields=[ 319 | _descriptor.FieldDescriptor( 320 | name='node_name', full_name='tensorboard.Summary.Value.node_name', index=0, 321 | number=7, type=9, cpp_type=9, label=1, 322 | has_default_value=False, default_value=_b("").decode('utf-8'), 323 | message_type=None, enum_type=None, containing_type=None, 324 | is_extension=False, extension_scope=None, 325 | options=None), 326 | _descriptor.FieldDescriptor( 327 | name='tag', full_name='tensorboard.Summary.Value.tag', index=1, 328 | number=1, type=9, cpp_type=9, label=1, 329 | has_default_value=False, default_value=_b("").decode('utf-8'), 330 | message_type=None, enum_type=None, containing_type=None, 331 | is_extension=False, extension_scope=None, 332 | options=None), 333 | _descriptor.FieldDescriptor( 334 | name='metadata', full_name='tensorboard.Summary.Value.metadata', index=2, 335 | number=9, type=11, cpp_type=10, label=1, 336 | has_default_value=False, default_value=None, 337 | message_type=None, enum_type=None, containing_type=None, 338 | is_extension=False, extension_scope=None, 339 | options=None), 340 | _descriptor.FieldDescriptor( 341 | name='simple_value', full_name='tensorboard.Summary.Value.simple_value', index=3, 342 | number=2, type=2, cpp_type=6, label=1, 343 | has_default_value=False, default_value=float(0), 344 | message_type=None, enum_type=None, containing_type=None, 345 | is_extension=False, extension_scope=None, 346 | options=None), 347 | _descriptor.FieldDescriptor( 348 | name='obsolete_old_style_histogram', full_name='tensorboard.Summary.Value.obsolete_old_style_histogram', index=4, 349 | number=3, type=12, cpp_type=9, label=1, 350 | has_default_value=False, default_value=_b(""), 351 | message_type=None, enum_type=None, containing_type=None, 352 | is_extension=False, extension_scope=None, 353 | options=None), 354 | _descriptor.FieldDescriptor( 355 | name='image', full_name='tensorboard.Summary.Value.image', index=5, 356 | number=4, type=11, cpp_type=10, label=1, 357 | has_default_value=False, default_value=None, 358 | message_type=None, enum_type=None, containing_type=None, 359 | is_extension=False, extension_scope=None, 360 | options=None), 361 | _descriptor.FieldDescriptor( 362 | name='histo', full_name='tensorboard.Summary.Value.histo', index=6, 363 | number=5, type=11, cpp_type=10, label=1, 364 | has_default_value=False, default_value=None, 365 | message_type=None, enum_type=None, containing_type=None, 366 | is_extension=False, extension_scope=None, 367 | options=None), 368 | _descriptor.FieldDescriptor( 369 | name='audio', full_name='tensorboard.Summary.Value.audio', index=7, 370 | number=6, type=11, cpp_type=10, label=1, 371 | has_default_value=False, default_value=None, 372 | message_type=None, enum_type=None, containing_type=None, 373 | is_extension=False, extension_scope=None, 374 | options=None), 375 | _descriptor.FieldDescriptor( 376 | name='tensor', full_name='tensorboard.Summary.Value.tensor', index=8, 377 | number=8, type=11, cpp_type=10, label=1, 378 | has_default_value=False, default_value=None, 379 | message_type=None, enum_type=None, containing_type=None, 380 | is_extension=False, extension_scope=None, 381 | options=None), 382 | ], 383 | extensions=[ 384 | ], 385 | nested_types=[], 386 | enum_types=[ 387 | ], 388 | options=None, 389 | is_extendable=False, 390 | syntax='proto3', 391 | extension_ranges=[], 392 | oneofs=[ 393 | _descriptor.OneofDescriptor( 394 | name='value', full_name='tensorboard.Summary.Value.value', 395 | index=0, containing_type=None, fields=[]), 396 | ], 397 | serialized_start=660, 398 | serialized_end=1000, 399 | ) 400 | 401 | _SUMMARY = _descriptor.Descriptor( 402 | name='Summary', 403 | full_name='tensorboard.Summary', 404 | filename=None, 405 | file=DESCRIPTOR, 406 | containing_type=None, 407 | fields=[ 408 | _descriptor.FieldDescriptor( 409 | name='value', full_name='tensorboard.Summary.value', index=0, 410 | number=1, type=11, cpp_type=10, label=3, 411 | has_default_value=False, default_value=[], 412 | message_type=None, enum_type=None, containing_type=None, 413 | is_extension=False, extension_scope=None, 414 | options=None), 415 | ], 416 | extensions=[ 417 | ], 418 | nested_types=[_SUMMARY_IMAGE, _SUMMARY_AUDIO, _SUMMARY_VALUE, ], 419 | enum_types=[ 420 | ], 421 | options=None, 422 | is_extendable=False, 423 | syntax='proto3', 424 | extension_ranges=[], 425 | oneofs=[ 426 | ], 427 | serialized_start=388, 428 | serialized_end=1000, 429 | ) 430 | 431 | _SUMMARYMETADATA_PLUGINDATA.containing_type = _SUMMARYMETADATA 432 | _SUMMARYMETADATA.fields_by_name['plugin_data'].message_type = _SUMMARYMETADATA_PLUGINDATA 433 | _SUMMARY_IMAGE.containing_type = _SUMMARY 434 | _SUMMARY_AUDIO.containing_type = _SUMMARY 435 | _SUMMARY_VALUE.fields_by_name['metadata'].message_type = _SUMMARYMETADATA 436 | _SUMMARY_VALUE.fields_by_name['image'].message_type = _SUMMARY_IMAGE 437 | _SUMMARY_VALUE.fields_by_name['histo'].message_type = _HISTOGRAMPROTO 438 | _SUMMARY_VALUE.fields_by_name['audio'].message_type = _SUMMARY_AUDIO 439 | _SUMMARY_VALUE.fields_by_name['tensor'].message_type = tb__chainer_dot_src_dot_tensor__pb2._TENSORPROTO 440 | _SUMMARY_VALUE.containing_type = _SUMMARY 441 | _SUMMARY_VALUE.oneofs_by_name['value'].fields.append( 442 | _SUMMARY_VALUE.fields_by_name['simple_value']) 443 | _SUMMARY_VALUE.fields_by_name['simple_value'].containing_oneof = _SUMMARY_VALUE.oneofs_by_name['value'] 444 | _SUMMARY_VALUE.oneofs_by_name['value'].fields.append( 445 | _SUMMARY_VALUE.fields_by_name['obsolete_old_style_histogram']) 446 | _SUMMARY_VALUE.fields_by_name['obsolete_old_style_histogram'].containing_oneof = _SUMMARY_VALUE.oneofs_by_name['value'] 447 | _SUMMARY_VALUE.oneofs_by_name['value'].fields.append( 448 | _SUMMARY_VALUE.fields_by_name['image']) 449 | _SUMMARY_VALUE.fields_by_name['image'].containing_oneof = _SUMMARY_VALUE.oneofs_by_name['value'] 450 | _SUMMARY_VALUE.oneofs_by_name['value'].fields.append( 451 | _SUMMARY_VALUE.fields_by_name['histo']) 452 | _SUMMARY_VALUE.fields_by_name['histo'].containing_oneof = _SUMMARY_VALUE.oneofs_by_name['value'] 453 | _SUMMARY_VALUE.oneofs_by_name['value'].fields.append( 454 | _SUMMARY_VALUE.fields_by_name['audio']) 455 | _SUMMARY_VALUE.fields_by_name['audio'].containing_oneof = _SUMMARY_VALUE.oneofs_by_name['value'] 456 | _SUMMARY_VALUE.oneofs_by_name['value'].fields.append( 457 | _SUMMARY_VALUE.fields_by_name['tensor']) 458 | _SUMMARY_VALUE.fields_by_name['tensor'].containing_oneof = _SUMMARY_VALUE.oneofs_by_name['value'] 459 | _SUMMARY.fields_by_name['value'].message_type = _SUMMARY_VALUE 460 | DESCRIPTOR.message_types_by_name['SummaryDescription'] = _SUMMARYDESCRIPTION 461 | DESCRIPTOR.message_types_by_name['HistogramProto'] = _HISTOGRAMPROTO 462 | DESCRIPTOR.message_types_by_name['SummaryMetadata'] = _SUMMARYMETADATA 463 | DESCRIPTOR.message_types_by_name['Summary'] = _SUMMARY 464 | 465 | SummaryDescription = _reflection.GeneratedProtocolMessageType('SummaryDescription', (_message.Message,), dict( 466 | DESCRIPTOR = _SUMMARYDESCRIPTION, 467 | __module__ = 'tb_chainer.src.summary_pb2' 468 | # @@protoc_insertion_point(class_scope:tensorboard.SummaryDescription) 469 | )) 470 | _sym_db.RegisterMessage(SummaryDescription) 471 | 472 | HistogramProto = _reflection.GeneratedProtocolMessageType('HistogramProto', (_message.Message,), dict( 473 | DESCRIPTOR = _HISTOGRAMPROTO, 474 | __module__ = 'tb_chainer.src.summary_pb2' 475 | # @@protoc_insertion_point(class_scope:tensorboard.HistogramProto) 476 | )) 477 | _sym_db.RegisterMessage(HistogramProto) 478 | 479 | SummaryMetadata = _reflection.GeneratedProtocolMessageType('SummaryMetadata', (_message.Message,), dict( 480 | 481 | PluginData = _reflection.GeneratedProtocolMessageType('PluginData', (_message.Message,), dict( 482 | DESCRIPTOR = _SUMMARYMETADATA_PLUGINDATA, 483 | __module__ = 'tb_chainer.src.summary_pb2' 484 | # @@protoc_insertion_point(class_scope:tensorboard.SummaryMetadata.PluginData) 485 | )) 486 | , 487 | DESCRIPTOR = _SUMMARYMETADATA, 488 | __module__ = 'tb_chainer.src.summary_pb2' 489 | # @@protoc_insertion_point(class_scope:tensorboard.SummaryMetadata) 490 | )) 491 | _sym_db.RegisterMessage(SummaryMetadata) 492 | _sym_db.RegisterMessage(SummaryMetadata.PluginData) 493 | 494 | Summary = _reflection.GeneratedProtocolMessageType('Summary', (_message.Message,), dict( 495 | 496 | Image = _reflection.GeneratedProtocolMessageType('Image', (_message.Message,), dict( 497 | DESCRIPTOR = _SUMMARY_IMAGE, 498 | __module__ = 'tb_chainer.src.summary_pb2' 499 | # @@protoc_insertion_point(class_scope:tensorboard.Summary.Image) 500 | )) 501 | , 502 | 503 | Audio = _reflection.GeneratedProtocolMessageType('Audio', (_message.Message,), dict( 504 | DESCRIPTOR = _SUMMARY_AUDIO, 505 | __module__ = 'tb_chainer.src.summary_pb2' 506 | # @@protoc_insertion_point(class_scope:tensorboard.Summary.Audio) 507 | )) 508 | , 509 | 510 | Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( 511 | DESCRIPTOR = _SUMMARY_VALUE, 512 | __module__ = 'tb_chainer.src.summary_pb2' 513 | # @@protoc_insertion_point(class_scope:tensorboard.Summary.Value) 514 | )) 515 | , 516 | DESCRIPTOR = _SUMMARY, 517 | __module__ = 'tb_chainer.src.summary_pb2' 518 | # @@protoc_insertion_point(class_scope:tensorboard.Summary) 519 | )) 520 | _sym_db.RegisterMessage(Summary) 521 | _sym_db.RegisterMessage(Summary.Image) 522 | _sym_db.RegisterMessage(Summary.Audio) 523 | _sym_db.RegisterMessage(Summary.Value) 524 | 525 | 526 | DESCRIPTOR.has_options = True 527 | DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\rSummaryProtosP\001\370\001\001')) 528 | _HISTOGRAMPROTO.fields_by_name['bucket_limit'].has_options = True 529 | _HISTOGRAMPROTO.fields_by_name['bucket_limit']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 530 | _HISTOGRAMPROTO.fields_by_name['bucket'].has_options = True 531 | _HISTOGRAMPROTO.fields_by_name['bucket']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) 532 | # @@protoc_insertion_point(module_scope) 533 | --------------------------------------------------------------------------------