├── ops.pyc ├── images ├── 1.png ├── 63.png └── model.png ├── checkpoint ├── checkpoint ├── Model.cpkt.meta └── Model.cpkt.index ├── image_helpers.pyc ├── README.md ├── image_helpers.py ├── ops.py └── .ipynb_checkpoints └── Image Completion-checkpoint.ipynb /ops.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/ops.pyc -------------------------------------------------------------------------------- /images/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/images/1.png -------------------------------------------------------------------------------- /images/63.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/images/63.png -------------------------------------------------------------------------------- /images/model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/images/model.png -------------------------------------------------------------------------------- /checkpoint/checkpoint: -------------------------------------------------------------------------------- 1 | model_checkpoint_path: "Model.cpkt" 2 | all_model_checkpoint_paths: "Model.cpkt" 3 | -------------------------------------------------------------------------------- /image_helpers.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/image_helpers.pyc -------------------------------------------------------------------------------- /checkpoint/Model.cpkt.meta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/checkpoint/Model.cpkt.meta -------------------------------------------------------------------------------- /checkpoint/Model.cpkt.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avhirupc/Semantic-Image-Completion/HEAD/checkpoint/Model.cpkt.index -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Semantic Inpainting using DCGANs 2 | * * * 3 | This is an experimental tensorflow implementation of semantic inpainting from corrupted images using DCGANs from the paper [Semantic Image Inpainting with Perceptual and Contextual Losses](https://arxiv.org/abs/1607.07539). A major help was Brandon Amos blog on [Image Completion](https://bamos.github.io/2016/08/09/deep-completion/).One of the major difference between is the training method used.I have used Adam Optimizer instead of gradient descent. 4 | * * * * 5 | ## Requirements 6 | * Tensorflow 7 | * glob 8 | * Python 3 9 | 10 | * * * * 11 | ## Dataset 12 | * I have used Celebrity faces dataset [CelebA](http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html).Download the aligned version ,extract in the same directory as the code 13 | 14 | * * * * 15 | ## Model Architecture 16 | 17 | ![alt-text](images/model.png) 18 | 19 | * * * 20 | ## Few Results are Partial Training 21 | 22 | >Note: Due to unavailabity of GPU,i didnt train the model for long.This results are after an hour of training 23 | 24 | 25 | ![alt-text](images/1.jpg) ![alt-text](images/63.jpg) -------------------------------------------------------------------------------- /image_helpers.py: -------------------------------------------------------------------------------- 1 | import scipy.misc 2 | import numpy as np 3 | from PIL import Image 4 | from glob import glob 5 | import os 6 | 7 | # Helpers for image handling 8 | def get_image(image_path, image_size, is_crop=True): 9 | return transform(imread(image_path), image_size, is_crop) 10 | 11 | def save_images(images, image_path): 12 | for imgindex in range(images.shape[0]): 13 | scipy.misc.imsave(image_path+str(imgindex)+'.jpg',images[imgindex]) 14 | 15 | def imread(path): 16 | return scipy.misc.imread(path).astype(np.float) 17 | 18 | def transform(image, npx=64, is_crop=True): 19 | # npx : # of pixels width/height of image 20 | if is_crop: 21 | cropped_image = center_crop(image, npx) 22 | else: 23 | cropped_image = image 24 | return np.array(cropped_image)/127.5 - 1. 25 | 26 | def center_crop(x, crop_h, crop_w=None, resize_w=64): 27 | if crop_w is None: 28 | crop_w = crop_h 29 | h, w = x.shape[:2] 30 | j = int(round((h - crop_h)/2.)) 31 | i = int(round((w - crop_w)/2.)) 32 | return scipy.misc.imresize(x[j:j+crop_h, i:i+crop_w], [resize_w, resize_w]) 33 | 34 | #def imsave(images, size, path): 35 | # return scipy.misc.imsave(path, merge(images, size)) 36 | 37 | def inverse_transform(images): 38 | return (images+1.)/2. 39 | 40 | def merge(images, size): 41 | h, w = images.shape[1], images.shape[2] 42 | img = np.zeros((h * size[0], w * size[1], 3)) 43 | 44 | for idx, image in enumerate(images): 45 | i = idx % size[1] 46 | j = idx / size[1] 47 | img[j*h:j*h+h, i*w:i*w+w, :] = image 48 | 49 | return img 50 | 51 | 52 | def convert_to_lower_resolution(): 53 | images=glob(os.path.join('cars_train\cars_train/','*.jpg')) 54 | i=0 55 | size=108,108 56 | for image in images: 57 | 58 | im=Image.open(image) 59 | im_resized=im.resize(size,Image.ANTIALIAS) 60 | im_resized.save("cars_train/"+str(i)+'.jpg') -------------------------------------------------------------------------------- /ops.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | import tensorflow as tf 4 | 5 | from tensorflow.python.framework import ops 6 | 7 | #from utils import * 8 | 9 | try: 10 | image_summary = tf.image_summary 11 | scalar_summary = tf.scalar_summary 12 | histogram_summary = tf.histogram_summary 13 | merge_summary = tf.merge_summary 14 | SummaryWriter = tf.train.SummaryWriter 15 | except: 16 | image_summary = tf.summary.image 17 | scalar_summary = tf.summary.scalar 18 | histogram_summary = tf.summary.histogram 19 | merge_summary = tf.summary.merge 20 | SummaryWriter = tf.summary.FileWriter 21 | 22 | class batch_norm(object): 23 | def __init__(self, epsilon=1e-5, momentum = 0.9, name="batch_norm"): 24 | with tf.variable_scope(name): 25 | self.epsilon = epsilon 26 | self.momentum = momentum 27 | self.name = name 28 | 29 | def __call__(self, x, train=True): 30 | return tf.contrib.layers.batch_norm(x, 31 | decay=self.momentum, 32 | updates_collections=None, 33 | epsilon=self.epsilon, 34 | scale=True, 35 | is_training=train, 36 | scope=self.name) 37 | 38 | 39 | def binary_cross_entropy(preds, targets, name=None): 40 | """Computes binary cross entropy given `preds`. 41 | For brevity, let `x = `, `z = targets`. The logistic loss is 42 | loss(x, z) = - sum_i (x[i] * log(z[i]) + (1 - x[i]) * log(1 - z[i])) 43 | Args: 44 | preds: A `Tensor` of type `float32` or `float64`. 45 | targets: A `Tensor` of the same type and shape as `preds`. 46 | """ 47 | eps = 1e-12 48 | with ops.op_scope([preds, targets], name, "bce_loss") as name: 49 | preds = ops.convert_to_tensor(preds, name="preds") 50 | targets = ops.convert_to_tensor(targets, name="targets") 51 | return tf.reduce_mean(-(targets * tf.log(preds + eps) + 52 | (1. - targets) * tf.log(1. - preds + eps))) 53 | 54 | def conv_cond_concat(x, y): 55 | """Concatenate conditioning vector on feature map axis.""" 56 | x_shapes = x.get_shape() 57 | y_shapes = y.get_shape() 58 | return tf.concat(3, [x, y*tf.ones([x_shapes[0], x_shapes[1], x_shapes[2], y_shapes[3]])]) 59 | 60 | def conv2d(input_, output_dim, 61 | k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, 62 | name="conv2d"): 63 | with tf.variable_scope(name): 64 | w = tf.get_variable('w', [k_h, k_w, input_.get_shape()[-1], output_dim], 65 | initializer=tf.truncated_normal_initializer(stddev=stddev)) 66 | conv = tf.nn.conv2d(input_, w, strides=[1, d_h, d_w, 1], padding='SAME') 67 | 68 | biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0)) 69 | conv = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape()) 70 | 71 | return conv 72 | 73 | def deconv2d(input_, output_shape, 74 | k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, 75 | name="deconv2d", with_w=False): 76 | with tf.variable_scope(name): 77 | # filter : [height, width, output_channels, in_channels] 78 | w = tf.get_variable('w', [k_h, k_w, output_shape[-1], input_.get_shape()[-1]], 79 | initializer=tf.random_normal_initializer(stddev=stddev)) 80 | 81 | try: 82 | deconv = tf.nn.conv2d_transpose(input_, w, output_shape=output_shape, 83 | strides=[1, d_h, d_w, 1]) 84 | 85 | # Support for verisons of TensorFlow before 0.7.0 86 | except AttributeError: 87 | deconv = tf.nn.deconv2d(input_, w, output_shape=output_shape, 88 | strides=[1, d_h, d_w, 1]) 89 | 90 | biases = tf.get_variable('biases', [output_shape[-1]], initializer=tf.constant_initializer(0.0)) 91 | deconv = tf.reshape(tf.nn.bias_add(deconv, biases), deconv.get_shape()) 92 | 93 | if with_w: 94 | return deconv, w, biases 95 | else: 96 | return deconv 97 | 98 | 99 | def lrelu(x, leak=0.2, name="lrelu"): 100 | return tf.maximum(x, leak*x) 101 | 102 | def linear(input_, output_size, scope=None, stddev=0.02, bias_start=0.0, with_w=False): 103 | shape = input_.get_shape().as_list() 104 | 105 | with tf.variable_scope(scope or "Linear"): 106 | matrix = tf.get_variable("Matrix", [shape[1], output_size], tf.float32, 107 | tf.random_normal_initializer(stddev=stddev)) 108 | bias = tf.get_variable("bias", [output_size], 109 | initializer=tf.constant_initializer(bias_start)) 110 | if with_w: 111 | return tf.matmul(input_, matrix) + bias, matrix, bias 112 | else: 113 | return tf.matmul(input_, matrix) + bias -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Completion-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Semantic Inpainting using DCGANs\n", 8 | "* * *\n", 9 | "\n", 10 | "This is an experimental tensorflow implementation of semantic inpainting from corrupted images using DCGANs from the paper [Semantic Image Inpainting with Perceptual and Contextual Losses](https://arxiv.org/abs/1607.07539). A major help was Brandon Amos blog on [Image Completion](https://bamos.github.io/2016/08/09/deep-completion/).One of the major difference between is the training method used.I have used Adam Optimizer instead of gradient descent.\n", 11 | "* * * *\n", 12 | "## Requirements\n", 13 | "* Tensorflow\n", 14 | "* glob\n", 15 | "* Python 3\n", 16 | "* * *\n", 17 | "## Dataset\n", 18 | "* I have used Celebrity faces dataset [CelebA](http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html).Download the aligned version ,extract in the same directory as the code.\n" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "collapsed": false 26 | }, 27 | "outputs": [], 28 | "source": [ 29 | "import numpy as np\n", 30 | "import tensorflow as tf\n", 31 | "import os,time\n", 32 | "from glob import glob\n", 33 | "\n", 34 | "from ops import batch_norm,linear,conv2d,deconv2d,lrelu\n", 35 | "from image_helpers import *" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "### Model Defintion\n", 43 | "\n", 44 | "![alt text](model.png)" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 95, 50 | "metadata": { 51 | "collapsed": true 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "#Parameter Defintion\n", 56 | "is_crop=True\n", 57 | "batch_size=64\n", 58 | "image_size=108\n", 59 | "sample_size=64\n", 60 | "image_shape=[64,64,3]\n", 61 | "\n", 62 | "z_dim=100\n", 63 | "\n", 64 | "gf_dim=64\n", 65 | "df_dim=64\n", 66 | "\n", 67 | "learning_rate=0.0002\n", 68 | "beta1=0.5" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 96, 74 | "metadata": { 75 | "collapsed": false 76 | }, 77 | "outputs": [], 78 | "source": [ 79 | "#Batch Normalisation objects\n", 80 | "d_bn1 = batch_norm(name='d_bn1')\n", 81 | "d_bn2 = batch_norm(name='d_bn2')\n", 82 | "d_bn3 = batch_norm(name='d_bn3')\n", 83 | "\n", 84 | "g_bn0 = batch_norm(name='g_bn0')\n", 85 | "g_bn1 = batch_norm(name='g_bn1')\n", 86 | "g_bn2 = batch_norm(name='g_bn2')\n", 87 | "g_bn3 = batch_norm(name='g_bn3')" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": 97, 93 | "metadata": { 94 | "collapsed": false 95 | }, 96 | "outputs": [], 97 | "source": [ 98 | "def discriminator(image,reuse=False):\n", 99 | " if reuse:\n", 100 | " tf.get_variable_scope().reuse_variables()\n", 101 | " \n", 102 | " h0=lrelu(conv2d(image,df_dim,name='d_h0_conv'))\n", 103 | " h1=lrelu(d_bn1(conv2d(h0,df_dim*2,name='d_h1_conv')))\n", 104 | " h2=lrelu(d_bn2(conv2d(h1,df_dim*4,name='d_h2_conv')))\n", 105 | " h3=lrelu(d_bn3(conv2d(h2,df_dim*8,name='d_h3_conv')))\n", 106 | " h4=linear(tf.reshape(h3,[batch_size,-1]),1,'d_h3_lin')\n", 107 | " \n", 108 | " return tf.nn.sigmoid(h4),h4" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 98, 114 | "metadata": { 115 | "collapsed": false 116 | }, 117 | "outputs": [], 118 | "source": [ 119 | "def generator(z):\n", 120 | " z_=linear(z,gf_dim*8*4*4,'g_h0_lin')\n", 121 | " h0=tf.nn.relu(g_bn0(tf.reshape(z_,[-1,4,4,gf_dim*8])))\n", 122 | " h1=tf.nn.relu(g_bn1(deconv2d(h0,[batch_size,8,8,gf_dim*4],name='g_h1')))\n", 123 | " h2=tf.nn.relu(g_bn2(deconv2d(h1,[batch_size,16,16,gf_dim*2],name='g_h2')))\n", 124 | " h3=tf.nn.relu(g_bn3(deconv2d(h2,[batch_size,32,32,gf_dim*1],name='g_h3')))\n", 125 | " h4 = deconv2d(h3, [batch_size, 64, 64, 3], name='g_h4')\n", 126 | " \n", 127 | " return tf.nn.tanh(h4)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 99, 133 | "metadata": { 134 | "collapsed": false 135 | }, 136 | "outputs": [], 137 | "source": [ 138 | "#Building model\n", 139 | "images=tf.placeholder(tf.float32,[batch_size]+image_shape,name='real_images')\n", 140 | "sample_images=tf.placeholder(tf.float32,[sample_size]+image_shape,name=\"sample_images\")\n", 141 | "z=tf.placeholder(tf.float32,[None,z_dim])\n", 142 | "\n", 143 | "G=generator(z)\n", 144 | "D,D_logits=discriminator(images)\n", 145 | "D_,D_logits_=discriminator(G,reuse=True)\n", 146 | "\n", 147 | "#cost fn\n", 148 | "d_loss_real=tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(D_logits,tf.ones_like(D)))\n", 149 | "d_loss_fake=tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(D_logits_,tf.zeros_like(D_)))\n", 150 | "d_loss=d_loss_real+d_loss_fake\n", 151 | "\n", 152 | "g_loss=tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(D_logits_,tf.ones_like(D_)))\n", 153 | "#For image completion\n", 154 | "mask=tf.placeholder(tf.float32,[None]+image_shape,name=\"mask\")\n", 155 | "\n", 156 | "contextual_loss=tf.reduce_sum(tf.contrib.layers.flatten(tf.abs(tf.mul(mask,G)-tf.mul(mask,images))))\n", 157 | "perceptual_loss=g_loss\n", 158 | "\n", 159 | "complete_loss=contextual_loss+perceptual_loss" 160 | ] 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": 100, 165 | "metadata": { 166 | "collapsed": false 167 | }, 168 | "outputs": [], 169 | "source": [ 170 | "#Optimizers\n", 171 | "t_vars=tf.trainable_variables()\n", 172 | "\n", 173 | "d_vars=[var for var in t_vars if 'd_' in var.name]\n", 174 | "g_vars=[var for var in t_vars if 'g_' in var.name]\n", 175 | "\n", 176 | "d_optim=tf.train.AdamOptimizer(learning_rate,beta1=beta1).minimize(d_loss,var_list=d_vars)\n", 177 | "g_optim=tf.train.AdamOptimizer(learning_rate,beta1=beta1).minimize(g_loss,var_list=g_vars)\n", 178 | "complete_optim=tf.train.AdamOptimizer(learning_rate,beta1=beta1).minimize(complete_loss,var_list=g_vars)" 179 | ] 180 | }, 181 | { 182 | "cell_type": "markdown", 183 | "metadata": {}, 184 | "source": [ 185 | "## Data Points" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": 101, 191 | "metadata": { 192 | "collapsed": false 193 | }, 194 | "outputs": [], 195 | "source": [ 196 | "sess=tf.Session()\n", 197 | "sess.run(tf.global_variables_initializer())\n", 198 | "\n", 199 | "saver=tf.train.Saver()" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 102, 205 | "metadata": { 206 | "collapsed": false 207 | }, 208 | "outputs": [], 209 | "source": [ 210 | "#DATASET \n", 211 | "data=glob(os.path.join('img_align_celeba/','*.jpg'))\n", 212 | "\n", 213 | "sample_z=np.random.uniform(-1,1,size=(sample_size,z_dim))\n", 214 | "sample_files=data[0:sample_size]\n", 215 | "sample=[get_image(sample_file,image_size,is_crop) for sample_file in sample_files]\n", 216 | "sample_images=np.reshape(np.array(sample).astype(np.float32),[sample_size]+image_shape)\n" 217 | ] 218 | }, 219 | { 220 | "cell_type": "markdown", 221 | "metadata": {}, 222 | "source": [ 223 | "## Training the model" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": 103, 229 | "metadata": { 230 | "collapsed": false 231 | }, 232 | "outputs": [ 233 | { 234 | "name": "stdout", 235 | "output_type": "stream", 236 | "text": [ 237 | "1 282487.0 7.2893 0.00100419\n", 238 | "2 276528.0 0.0993834 2.89487\n", 239 | "3 274097.0 1.12919 0.519172\n", 240 | "4 246674.0 1.34889 0.378413\n", 241 | "5 256940.0 0.111176 3.92779\n", 242 | "[Sample] d_loss: 0.11600902, g_loss: 5.41791153\n", 243 | "6 249439.0 1.29371 0.538651\n", 244 | "7 246668.0 0.28509 4.2912\n", 245 | "8 228919.0 1.40452 0.440909\n", 246 | "9 244363.0 0.73556 15.9906\n", 247 | "10 236065.0 0.0146455 12.1339\n", 248 | "[Sample] d_loss: 0.17829044, g_loss: 13.38587952\n", 249 | "11 248971.0 0.0783687 3.24196\n", 250 | "12 241467.0 4.29185 0.0225754\n", 251 | "13 239579.0 1.54853 26.4669\n", 252 | "14 242600.0 0.151014 25.2905\n", 253 | "15 220674.0 0.0228339 16.4789\n", 254 | "[Sample] d_loss: 0.09336887, g_loss: 18.01052094\n", 255 | "16 236997.0 0.0429531 5.2789\n", 256 | "17 227873.0 4.27723 0.0832719\n", 257 | "18 231394.0 1.18376 24.6325\n", 258 | "19 227557.0 0.474589 25.0789\n", 259 | "20 227269.0 0.0977529 16.9231\n", 260 | "[Sample] d_loss: 0.07977862, g_loss: 18.91727066\n", 261 | "21 229288.0 0.0904242 7.49572\n", 262 | "22 227796.0 2.97267 0.512974\n", 263 | "23 243101.0 0.312964 14.9833\n", 264 | "24 234674.0 0.685183 15.013\n", 265 | "25 244656.0 0.212154 7.82399\n", 266 | "[Sample] d_loss: 0.14891370, g_loss: 10.20151997\n", 267 | "26 228032.0 2.20547 0.859715\n", 268 | "27 218550.0 0.784921 13.7666\n", 269 | "28 224967.0 0.215524 8.14993\n", 270 | "29 233930.0 1.02506 1.54343\n", 271 | "30 221013.0 0.202823 5.29911\n", 272 | "[Sample] d_loss: 0.17596623, g_loss: 6.98355961\n", 273 | "31 233331.0 0.240918 4.31386\n", 274 | "32 221469.0 1.63559 0.867338\n", 275 | "33 243092.0 0.89673 11.5971\n", 276 | "34 232857.0 0.176128 4.57618\n", 277 | "35 233030.0 2.75673 0.224795\n", 278 | "[Sample] d_loss: 1.92172289, g_loss: 0.49400747\n", 279 | "36 216841.0 3.64848 21.3311\n", 280 | "37 226292.0 0.912832 18.7106\n", 281 | "38 237308.0 0.0992048 11.1966\n", 282 | "39 230912.0 0.0646105 3.83531\n", 283 | "40 221903.0 1.5059 0.402398\n", 284 | "[Sample] d_loss: 0.97079611, g_loss: 0.80373305\n", 285 | "41 220147.0 0.398372 13.7659\n", 286 | "42 233183.0 0.442206 14.4466\n", 287 | "43 221478.0 0.115688 8.85882\n", 288 | "44 227511.0 0.103556 3.29797\n", 289 | "45 225740.0 0.316684 1.58457\n", 290 | "[Sample] d_loss: 0.24566384, g_loss: 2.27758265\n", 291 | "46 221256.0 0.052991 6.1553\n", 292 | "47 218603.0 0.0858537 6.03024\n", 293 | "48 226705.0 0.102655 3.63272\n", 294 | "49 233635.0 0.358139 1.5445\n", 295 | "50 232327.0 0.150035 2.79923\n", 296 | "[Sample] d_loss: 0.53596234, g_loss: 3.24253750\n", 297 | "51 234514.0 0.233379 3.13415\n", 298 | "52 232832.0 0.26823 2.26006\n", 299 | "53 222596.0 0.330167 2.0539\n", 300 | "54 223363.0 0.299028 7.12743\n", 301 | "55 228575.0 0.0561099 7.84102\n", 302 | "[Sample] d_loss: 0.53555936, g_loss: 8.23306084\n", 303 | "56 227016.0 0.143323 4.58196\n", 304 | "57 230780.0 0.283975 2.77148\n", 305 | "58 230692.0 0.128337 8.43642\n", 306 | "59 217837.0 0.123963 3.59217\n", 307 | "60 234979.0 0.471197 1.31431\n", 308 | "[Sample] d_loss: 0.31983742, g_loss: 1.69221103\n", 309 | "61 232261.0 0.235006 13.6215\n", 310 | "62 229534.0 0.240922 12.777\n", 311 | "63 246775.0 0.0259529 10.1607\n", 312 | "64 232175.0 0.109241 6.28344\n", 313 | "65 223427.0 0.136213 3.65215\n", 314 | "[Sample] d_loss: 0.08389595, g_loss: 4.43322706\n", 315 | "66 231858.0 0.141451 3.11864\n", 316 | "67 233260.0 0.100814 7.95506\n", 317 | "68 229460.0 0.140937 8.82961\n", 318 | "69 228820.0 0.044231 6.68642\n", 319 | "70 231867.0 0.0391057 5.28831\n", 320 | "[Sample] d_loss: 0.06474630, g_loss: 5.89106798\n", 321 | "71 218485.0 0.0381183 4.97443\n", 322 | "72 240378.0 0.0414664 4.84758\n", 323 | "73 237909.0 0.0316792 4.8363\n", 324 | "74 225482.0 0.0500597 5.042\n", 325 | "75 229679.0 0.0375784 4.85953\n", 326 | "[Sample] d_loss: 0.06993507, g_loss: 5.25527048\n", 327 | "76 229876.0 0.0591319 3.37589\n", 328 | "77 243249.0 0.0781012 3.23792\n", 329 | "78 242992.0 0.0586319 3.61272\n", 330 | "79 226477.0 0.186252 2.37231\n", 331 | "80 227364.0 0.552157 11.5508\n", 332 | "[Sample] d_loss: 1.29886222, g_loss: 12.28591442\n", 333 | "81 234229.0 0.048188 9.57236\n", 334 | "82 240537.0 0.0134968 6.89563\n", 335 | "83 232499.0 0.0642662 3.8377\n", 336 | "84 233121.0 0.120957 2.86964\n", 337 | "85 242130.0 0.0207775 4.3802\n", 338 | "[Sample] d_loss: 0.05711756, g_loss: 5.11109400\n", 339 | "86 228492.0 0.0332307 4.22022\n", 340 | "87 233015.0 0.0767635 3.22753\n", 341 | "88 238375.0 0.09974 3.09681\n", 342 | "89 237020.0 0.304276 1.92144\n", 343 | "90 238323.0 0.262102 2.04917\n", 344 | "[Sample] d_loss: 0.36901647, g_loss: 2.30276966\n", 345 | "91 244171.0 0.314343 10.4446\n", 346 | "92 227545.0 0.0930861 10.2484\n", 347 | "93 234162.0 0.670985 0.967371\n", 348 | "94 239705.0 0.268768 7.71297\n", 349 | "95 249262.0 0.159713 3.97782\n", 350 | "[Sample] d_loss: 0.56368059, g_loss: 5.05822468\n", 351 | "96 247300.0 1.02076 2.36702\n", 352 | "97 230482.0 1.07567 13.0539\n", 353 | "98 250850.0 0.0403265 7.90645\n", 354 | "99 233118.0 0.209202 2.7842\n", 355 | "100 240843.0 0.362583 1.80479\n", 356 | "[Sample] d_loss: 0.40569168, g_loss: 1.82191539\n", 357 | "101 238528.0 0.073882 8.59026\n", 358 | "102 247490.0 0.154018 10.2599\n", 359 | "103 227374.0 0.0304791 8.15466\n", 360 | "104 237288.0 0.0117367 6.69026\n", 361 | "105 227734.0 0.0378877 5.03174\n", 362 | "[Sample] d_loss: 0.11802465, g_loss: 5.18348503\n", 363 | "106 233267.0 0.0735894 4.84565\n", 364 | "107 242341.0 0.0144858 6.22992\n", 365 | "108 238907.0 0.0219373 6.86812\n", 366 | "109 218899.0 0.042041 4.906\n", 367 | "110 231020.0 0.0303419 4.32999\n", 368 | "[Sample] d_loss: 0.05061100, g_loss: 5.01278400\n", 369 | "111 224129.0 0.0272198 4.95109\n", 370 | "112 215290.0 0.0161069 5.1942\n", 371 | "113 219428.0 0.0152109 5.16986\n", 372 | "114 228589.0 0.00783775 5.417\n", 373 | "115 226036.0 0.00965136 5.4604\n", 374 | "[Sample] d_loss: 0.05278186, g_loss: 5.65951967\n", 375 | "116 232838.0 0.00963528 5.43493\n", 376 | "117 229123.0 0.0242478 4.84256\n", 377 | "118 223836.0 0.0146208 4.93775\n", 378 | "119 229561.0 0.00900754 5.32163\n", 379 | "120 237225.0 0.019878 5.31551\n", 380 | "[Sample] d_loss: 0.09606962, g_loss: 5.70008469\n", 381 | "121 234889.0 0.00995109 5.78568\n", 382 | "122 230902.0 0.00981983 5.56307\n", 383 | "123 227099.0 0.0307825 4.4313\n", 384 | "124 223199.0 0.0304973 4.23355\n", 385 | "125 238231.0 0.0100031 5.27072\n", 386 | "[Sample] d_loss: 0.07589614, g_loss: 4.75178766\n", 387 | "126 232674.0 0.0109544 5.15324\n", 388 | "127 232122.0 0.0124768 5.62787\n", 389 | "128 239068.0 0.00577762 5.96274\n", 390 | "129 228096.0 0.0190791 5.0168\n", 391 | "130 227326.0 0.0173474 5.35883\n", 392 | "[Sample] d_loss: 0.08644515, g_loss: 5.01514482\n", 393 | "131 225977.0 0.0325693 4.46277\n", 394 | "132 216371.0 0.034312 5.03701\n", 395 | "133 222484.0 0.0163619 5.00208\n", 396 | "134 232366.0 0.0115113 5.22872\n", 397 | "135 241162.0 0.00764891 5.46183\n", 398 | "[Sample] d_loss: 0.07085460, g_loss: 5.79302406\n", 399 | "136 229574.0 0.0174105 5.45416\n", 400 | "137 226597.0 0.0170864 5.64671\n", 401 | "138 238275.0 0.0150278 5.16325\n", 402 | "139 233610.0 0.0112422 5.11028\n", 403 | "140 228893.0 0.0110473 5.22609\n", 404 | "[Sample] d_loss: 0.06534805, g_loss: 5.67438078\n", 405 | "141 236861.0 0.0140983 5.32736\n", 406 | "142 227393.0 0.0131771 5.90189\n", 407 | "143 235779.0 0.00848797 5.78246\n", 408 | "144 230493.0 0.00692687 5.88537\n", 409 | "145 242716.0 0.0110878 5.29196\n", 410 | "[Sample] d_loss: 0.08790626, g_loss: 5.80489397\n", 411 | "146 231225.0 0.0122205 5.11008\n", 412 | "147 238659.0 0.0117037 5.77645\n", 413 | "148 230638.0 0.0175756 5.61163\n", 414 | "149 220145.0 0.0278739 4.64126\n", 415 | "150 226944.0 0.0158838 4.76763\n", 416 | "[Sample] d_loss: 0.03432536, g_loss: 5.33688068\n", 417 | "151 232705.0 0.00881153 5.39108\n", 418 | "152 224537.0 0.0122822 5.68102\n", 419 | "153 228229.0 0.00735766 6.04768\n", 420 | "154 228388.0 0.00517893 5.75304\n", 421 | "155 238058.0 0.00882215 5.21119\n", 422 | "[Sample] d_loss: 0.04109722, g_loss: 5.27304268\n", 423 | "156 242456.0 0.013323 4.87104\n", 424 | "157 225957.0 0.01703 5.2169\n", 425 | "158 232924.0 0.00694908 5.58453\n", 426 | "159 231190.0 0.0120224 5.32643\n", 427 | "160 232224.0 0.0254301 5.13455\n", 428 | "[Sample] d_loss: 0.03752626, g_loss: 5.19616318\n", 429 | "161 233386.0 0.0114367 4.80176\n", 430 | "162 234069.0 0.0107298 4.98471\n", 431 | "163 235582.0 0.00837972 5.37866\n", 432 | "164 234374.0 0.0446169 5.53277\n", 433 | "165 245869.0 0.0129017 4.80986\n", 434 | "[Sample] d_loss: 0.04730193, g_loss: 5.91464090\n", 435 | "166 221256.0 0.0163708 5.28049\n", 436 | "167 229175.0 0.0199265 4.69881\n", 437 | "168 222870.0 0.0166284 4.85229\n", 438 | "169 232317.0 0.00842492 5.09982\n", 439 | "170 227150.0 0.0259353 4.66713\n", 440 | "[Sample] d_loss: 0.04824191, g_loss: 5.00397587\n", 441 | "171 233823.0 0.0238737 4.99047\n", 442 | "172 235648.0 0.0155936 5.40203\n", 443 | "173 238560.0 0.00894511 5.7128\n", 444 | "174 229837.0 0.0211374 4.93182\n", 445 | "175 239050.0 0.0333094 4.34906\n", 446 | "[Sample] d_loss: 0.11382335, g_loss: 4.91633129\n", 447 | "176 238777.0 0.023757 4.65059\n", 448 | "177 234549.0 0.0234066 4.3521\n", 449 | "178 243782.0 0.0352357 5.12954\n", 450 | "179 227239.0 0.17978 2.26344\n", 451 | "180 236638.0 0.0121121 5.23738\n", 452 | "[Sample] d_loss: 0.06577131, g_loss: 5.40220451\n", 453 | "181 251268.0 0.0172349 5.97721\n", 454 | "182 242239.0 0.036469 4.96705\n", 455 | "183 234031.0 0.0262188 5.1254\n", 456 | "184 221232.0 0.0773363 3.52267\n", 457 | "185 221862.0 0.0325552 6.59387\n", 458 | "[Sample] d_loss: 0.15868407, g_loss: 7.59856272\n", 459 | "186 243207.0 0.0648476 4.45383\n", 460 | "187 239173.0 0.027962 4.09254\n", 461 | "188 230761.0 0.0281782 4.12391\n", 462 | "189 239185.0 0.0126938 4.79153\n", 463 | "190 244615.0 0.0146423 5.00414\n", 464 | "[Sample] d_loss: 0.07880122, g_loss: 6.54589844\n", 465 | "191 248247.0 0.00847159 5.40055\n", 466 | "192 240515.0 0.026969 4.26755\n", 467 | "193 236745.0 0.0998925 2.85351\n", 468 | "194 237708.0 0.0573707 3.82132\n", 469 | "195 247277.0 0.0408176 3.58679\n", 470 | "[Sample] d_loss: 0.21264750, g_loss: 4.63467312\n", 471 | "196 246981.0 0.0785196 3.13357\n", 472 | "197 233499.0 3.76655 0.0827683\n", 473 | "198 237561.0 4.90307 30.8296\n", 474 | "199 250424.0 0.52979 26.5839\n", 475 | "200 234164.0 0.0186628 18.3486\n", 476 | "[Sample] d_loss: 0.15513425, g_loss: 17.83559799\n", 477 | "201 232475.0 0.0157238 6.64334\n", 478 | "202 234208.0 3.18346 0.227658\n", 479 | "203 227974.0 0.863236 19.1933\n", 480 | "204 240140.0 1.16967 21.0508\n", 481 | "205 233598.0 0.141638 13.3817\n", 482 | "[Sample] d_loss: 0.65094686, g_loss: 13.64598274\n", 483 | "206 232207.0 0.24277 8.10804\n", 484 | "207 234332.0 0.0454663 7.67026\n", 485 | "208 223944.0 0.0274578 6.72822\n", 486 | "209 223066.0 0.0218023 6.62184\n", 487 | "210 236240.0 0.0666431 5.78545\n", 488 | "[Sample] d_loss: 0.21489657, g_loss: 5.92019272\n", 489 | "211 226682.0 0.104861 5.09795\n", 490 | "212 228393.0 0.0354797 5.00646\n", 491 | "213 226480.0 0.049423 4.81701\n", 492 | "214 235252.0 0.0227832 4.63974\n", 493 | "215 224402.0 0.0704613 4.38986\n", 494 | "[Sample] d_loss: 0.14497535, g_loss: 4.62863350\n", 495 | "216 217486.0 0.0218949 4.87069\n", 496 | "217 216059.0 0.0339529 5.24329\n", 497 | "218 224107.0 0.0149704 5.64122\n", 498 | "219 218933.0 0.0127757 5.20033\n", 499 | "220 234791.0 0.00814328 5.82185\n", 500 | "[Sample] d_loss: 0.14541656, g_loss: 5.69943142\n", 501 | "221 228876.0 0.0135209 5.75629\n", 502 | "222 218893.0 0.0356815 4.68274\n", 503 | "223 234191.0 0.025902 4.10312\n", 504 | "224 228151.0 0.0417658 4.03452\n", 505 | "225 236395.0 0.0207397 4.40349\n", 506 | "[Sample] d_loss: 0.19967143, g_loss: 4.58780384\n", 507 | "226 233003.0 0.034534 4.67472\n", 508 | "227 235322.0 0.0396185 4.7476\n", 509 | "228 228168.0 0.0178029 4.64536\n", 510 | "229 227191.0 0.0482604 4.0262\n", 511 | "230 226153.0 0.0236118 4.64213\n", 512 | "[Sample] d_loss: 0.17000815, g_loss: 4.73817110\n", 513 | "231 230057.0 0.0196168 4.76789\n", 514 | "232 224689.0 0.0327076 4.1422\n", 515 | "233 236112.0 0.0594067 4.13156\n", 516 | "234 224057.0 0.0358456 4.3645\n", 517 | "235 243267.0 0.0194016 4.64152\n", 518 | "[Sample] d_loss: 0.16593489, g_loss: 4.59318209\n", 519 | "236 233658.0 0.0317861 4.4482\n", 520 | "237 233907.0 0.0294461 4.51846\n", 521 | "238 234301.0 0.0309488 4.22815\n", 522 | "239 229132.0 0.0273762 4.84214\n", 523 | "240 230384.0 0.034775 4.82667\n", 524 | "[Sample] d_loss: 0.21976697, g_loss: 4.76398659\n", 525 | "241 227761.0 0.0561302 4.00227\n", 526 | "242 227735.0 0.0388394 4.13304\n", 527 | "243 227644.0 0.0615722 4.99376\n", 528 | "244 219833.0 0.02863 5.09435\n", 529 | "245 229947.0 0.0252947 5.16666\n", 530 | "[Sample] d_loss: 0.16339926, g_loss: 5.42608356\n", 531 | "246 232544.0 0.0431259 4.42563\n", 532 | "247 231965.0 0.0561339 5.01132\n", 533 | "248 241051.0 0.022199 5.65404\n", 534 | "249 236994.0 0.0151434 5.73013\n", 535 | "250 224802.0 0.0376041 4.56005\n", 536 | "[Sample] d_loss: 0.08151779, g_loss: 4.96111298\n", 537 | "251 228488.0 0.0270349 4.38578\n", 538 | "252 233242.0 0.0187593 4.80222\n", 539 | "253 237141.0 0.0176837 5.26819\n", 540 | "254 236231.0 0.00765318 6.06183\n", 541 | "255 214626.0 0.039808 4.87397\n", 542 | "[Sample] d_loss: 0.04781735, g_loss: 5.29637814\n", 543 | "256 230600.0 0.0237795 4.79164\n", 544 | "257 242342.0 0.0140439 5.38596\n", 545 | "258 231235.0 0.0160225 5.6932\n", 546 | "259 224695.0 0.010161 6.27275\n", 547 | "260 226567.0 0.00903921 6.05928\n", 548 | "[Sample] d_loss: 0.05506280, g_loss: 6.80773401\n", 549 | "261 231709.0 0.0120723 6.1225\n", 550 | "262 236434.0 0.00530667 7.00091\n", 551 | "263 231345.0 0.0119392 6.38939\n", 552 | "264 232193.0 0.0170099 6.02789\n", 553 | "265 230570.0 0.0173669 6.03008\n", 554 | "[Sample] d_loss: 0.05970846, g_loss: 6.65830946\n", 555 | "266 241510.0 0.0116628 6.33305\n", 556 | "267 236238.0 0.0217201 5.94722\n", 557 | "268 225370.0 0.00849996 6.67379\n", 558 | "269 232619.0 0.00875042 6.60828\n", 559 | "270 232028.0 0.00872602 6.8441\n", 560 | "[Sample] d_loss: 0.04462141, g_loss: 6.80883694\n", 561 | "271 244737.0 0.0101465 6.41539\n", 562 | "272 233551.0 0.00834047 6.50659\n", 563 | "273 230720.0 0.0135347 6.46788\n", 564 | "274 228724.0 0.0076012 7.13802\n", 565 | "275 232588.0 0.0049731 6.90587\n", 566 | "[Sample] d_loss: 0.03411392, g_loss: 6.99442482\n", 567 | "276 238740.0 0.00439329 6.68919\n", 568 | "277 225808.0 0.00651315 6.39027\n", 569 | "278 230533.0 0.00664583 6.45124\n", 570 | "279 227631.0 0.00901968 6.08823\n", 571 | "280 244889.0 0.0068897 6.08488\n", 572 | "[Sample] d_loss: 0.05213916, g_loss: 6.06281948\n", 573 | "281 228423.0 0.0159711 5.71435\n", 574 | "282 232857.0 0.00987732 6.29269\n", 575 | "283 235713.0 0.00718818 5.89121\n", 576 | "284 229673.0 0.00825992 6.41971\n", 577 | "285 227430.0 0.00930632 6.52186\n", 578 | "[Sample] d_loss: 0.05738930, g_loss: 6.77016640\n", 579 | "286 222880.0 0.00964625 6.24607\n", 580 | "287 239041.0 0.00692302 6.66637\n", 581 | "288 226469.0 0.00916264 6.74069\n", 582 | "289 223526.0 0.0083958 7.05017\n", 583 | "290 222067.0 0.00745634 6.54768\n", 584 | "[Sample] d_loss: 0.04481876, g_loss: 6.89225626\n", 585 | "291 242108.0 0.00417067 6.56804\n", 586 | "292 227266.0 0.00855611 5.97746\n", 587 | "293 226375.0 0.00809982 5.86793\n", 588 | "294 224373.0 0.022894 5.58134\n", 589 | "295 223920.0 0.0123834 6.11103\n", 590 | "[Sample] d_loss: 0.04447897, g_loss: 6.30993271\n", 591 | "296 236501.0 0.00616622 5.95452\n", 592 | "297 239469.0 0.00486474 6.31477\n", 593 | "298 222471.0 0.01576 6.54476\n", 594 | "299 240584.0 0.00467833 6.55898\n", 595 | "300 238486.0 0.00528898 6.81481\n", 596 | "[Sample] d_loss: 0.03745259, g_loss: 7.19942045\n", 597 | "301 224714.0 0.00664556 6.7361\n", 598 | "302 226595.0 0.0116915 6.75714\n", 599 | "303 228254.0 0.00693907 6.5623\n", 600 | "304 234693.0 0.00505667 6.40266\n", 601 | "305 221397.0 0.00652016 5.97942\n", 602 | "[Sample] d_loss: 0.03662929, g_loss: 6.35684109\n", 603 | "306 229638.0 0.00842832 6.01297\n", 604 | "307 226824.0 0.0053036 6.23335\n", 605 | "308 233790.0 0.00348787 7.01063\n", 606 | "309 229991.0 0.00908346 6.22502\n", 607 | "310 230525.0 0.00385921 6.71605\n", 608 | "[Sample] d_loss: 0.05208379, g_loss: 7.07441711\n", 609 | "311 226536.0 0.00704353 6.57535\n", 610 | "312 227179.0 0.00547935 6.41332\n", 611 | "313 250212.0 0.00750827 5.82572\n", 612 | "314 234616.0 0.0117486 5.9731\n", 613 | "315 230237.0 0.0133015 6.64978\n", 614 | "[Sample] d_loss: 0.05998197, g_loss: 7.16182518\n", 615 | "316 228343.0 0.00534785 7.08888\n", 616 | "317 225581.0 0.0157808 6.26036\n", 617 | "318 236806.0 0.00949105 6.57611\n", 618 | "319 232100.0 0.00472928 7.41062\n", 619 | "320 231944.0 0.00337311 7.23025\n", 620 | "[Sample] d_loss: 0.02167877, g_loss: 7.32331753\n", 621 | "321 221511.0 0.00559661 7.0422\n", 622 | "322 223953.0 0.00968856 6.54792\n", 623 | "323 219441.0 0.0111878 6.31454\n", 624 | "324 225638.0 0.00953885 6.15228\n", 625 | "325 214925.0 0.00346153 6.94129\n", 626 | "[Sample] d_loss: 0.02705349, g_loss: 7.47104883\n", 627 | "326 223848.0 0.00326469 7.36999\n", 628 | "327 237949.0 0.00311648 6.71274\n", 629 | "328 236584.0 0.00306652 6.91049\n", 630 | "329 225408.0 0.00453587 6.55525\n", 631 | "330 231171.0 0.00934839 5.90724\n", 632 | "[Sample] d_loss: 0.03794486, g_loss: 6.75169849\n", 633 | "331 225152.0 0.012217 6.51665\n", 634 | "332 230556.0 0.00483899 6.39285\n", 635 | "333 230084.0 0.00272052 7.00029\n", 636 | "334 223528.0 0.00461105 6.46103\n", 637 | "335 224546.0 0.00504601 6.71855\n", 638 | "[Sample] d_loss: 0.03703976, g_loss: 7.29901505\n", 639 | "336 231152.0 0.00511401 6.85701\n", 640 | "337 239254.0 0.00500541 6.68827\n", 641 | "338 229720.0 0.00470691 6.61704\n", 642 | "339 230719.0 0.00571903 6.00766\n", 643 | "340 227765.0 0.00575892 7.02052\n", 644 | "[Sample] d_loss: 0.04576585, g_loss: 7.30345726\n", 645 | "341 222227.0 0.0097072 7.04605\n", 646 | "342 240589.0 0.00422425 6.60751\n", 647 | "343 231894.0 0.00696187 6.46882\n", 648 | "344 229089.0 0.00427696 6.55492\n", 649 | "345 229901.0 0.00593136 6.52651\n", 650 | "[Sample] d_loss: 0.04528400, g_loss: 7.29823494\n", 651 | "346 227550.0 0.00810985 6.94126\n", 652 | "347 225892.0 0.00799331 6.15584\n", 653 | "348 219861.0 0.00569321 6.8965\n", 654 | "349 227115.0 0.00653193 6.03038\n", 655 | "350 229437.0 0.00563783 6.22979\n", 656 | "[Sample] d_loss: 0.04784630, g_loss: 6.74322796\n", 657 | "351 230905.0 0.00641609 5.80035\n", 658 | "352 233912.0 0.00630432 5.98079\n", 659 | "353 242256.0 0.00503092 6.39165\n", 660 | "354 224562.0 0.00897856 6.77515\n", 661 | "355 240421.0 0.00533586 6.87426\n", 662 | "[Sample] d_loss: 0.06678400, g_loss: 7.56421757\n", 663 | "356 232685.0 0.00419753 6.74777\n", 664 | "357 223149.0 0.0120203 7.05257\n", 665 | "358 230422.0 0.00362152 7.04082\n", 666 | "359 228160.0 0.0116201 6.6046\n", 667 | "360 227745.0 0.00373293 6.82082\n", 668 | "[Sample] d_loss: 0.02574606, g_loss: 7.34196663\n", 669 | "361 230275.0 0.00525981 6.35709\n", 670 | "362 244093.0 0.00519018 6.48038\n", 671 | "363 235597.0 0.0039971 6.64699\n", 672 | "364 226262.0 0.00949493 6.13839\n", 673 | "365 236849.0 0.00457394 6.21822\n", 674 | "[Sample] d_loss: 0.03232419, g_loss: 6.53604078\n", 675 | "366 224912.0 0.00998274 5.41324\n", 676 | "367 221642.0 0.0158585 5.44584\n", 677 | "368 228092.0 0.0132993 5.72031\n", 678 | "369 231961.0 0.00926836 5.59224\n", 679 | "370 230446.0 0.0156366 5.76043\n", 680 | "[Sample] d_loss: 0.05859080, g_loss: 6.51921749\n", 681 | "371 229604.0 0.00957838 5.63284\n", 682 | "372 228373.0 0.018297 5.00355\n", 683 | "373 232860.0 0.0586759 3.77599\n", 684 | "374 249124.0 0.0452311 3.73321\n", 685 | "375 242459.0 0.0975956 2.91956\n", 686 | "[Sample] d_loss: 0.45042205, g_loss: 4.19774866\n", 687 | "376 229526.0 0.182092 2.59648\n", 688 | "377 234532.0 0.141931 7.53892\n", 689 | "378 247905.0 0.0364125 8.7273\n", 690 | "379 227781.0 0.0180163 6.67734\n", 691 | "380 244264.0 0.134298 4.32576\n", 692 | "[Sample] d_loss: 0.09217034, g_loss: 5.42544746\n", 693 | "381 236407.0 0.00794402 7.82291\n", 694 | "382 243881.0 0.00293721 9.8204\n", 695 | "383 232229.0 0.0115984 9.46745\n", 696 | "384 236722.0 0.0143918 7.44666\n", 697 | "385 242641.0 0.0121682 5.56875\n", 698 | "[Sample] d_loss: 0.03963409, g_loss: 7.38958311\n", 699 | "386 242344.0 0.0132331 5.31705\n", 700 | "387 238078.0 0.00671329 6.00698\n", 701 | "388 227904.0 0.013077 5.2371\n", 702 | "389 225122.0 0.00907737 5.66986\n", 703 | "390 225619.0 0.026194 5.22994\n", 704 | "[Sample] d_loss: 0.02320081, g_loss: 7.06579494\n", 705 | "391 235780.0 0.010019 5.65837\n", 706 | "392 230970.0 0.0260254 5.04433\n", 707 | "393 225584.0 0.0153327 5.01628\n", 708 | "394 231767.0 0.00903254 5.44734\n", 709 | "395 223767.0 0.0316662 5.07014\n", 710 | "[Sample] d_loss: 0.07280695, g_loss: 6.65995741\n", 711 | "396 233457.0 0.0105404 6.07634\n", 712 | "397 239050.0 0.0292042 4.57349\n", 713 | "398 227220.0 0.0166133 5.57333\n", 714 | "399 228879.0 0.0386154 4.38414\n", 715 | "400 230961.0 0.130367 5.91494\n", 716 | "[Sample] d_loss: 0.48303351, g_loss: 8.57436943\n", 717 | "401 251261.0 0.0485235 7.48811\n", 718 | "402 235816.0 0.0513761 5.30405\n", 719 | "403 231016.0 0.0499712 4.41322\n", 720 | "404 244146.0 0.0153289 5.27378\n", 721 | "405 238806.0 0.00900818 5.96332\n", 722 | "[Sample] d_loss: 0.15898013, g_loss: 7.82029533\n", 723 | "406 229355.0 0.0402518 4.73577\n", 724 | "407 241944.0 0.0303526 4.48466\n", 725 | "408 230988.0 0.0200604 4.89748\n", 726 | "409 248028.0 0.00590469 6.38302\n", 727 | "410 231242.0 0.0247886 5.31105\n", 728 | "[Sample] d_loss: 0.13028352, g_loss: 6.54356670\n", 729 | "411 245610.0 0.00773092 5.59995\n", 730 | "412 232449.0 0.0767833 3.69628\n", 731 | "413 239457.0 0.030232 4.92369\n", 732 | "414 245647.0 0.0838391 4.60188\n", 733 | "415 240851.0 0.0455272 4.64956\n", 734 | "[Sample] d_loss: 0.21393555, g_loss: 6.36150360\n", 735 | "416 241551.0 0.0351407 4.38862\n", 736 | "417 255684.0 0.0420684 5.30632\n", 737 | "418 238775.0 0.150728 2.48056\n", 738 | "419 235866.0 0.0259643 6.38019\n", 739 | "420 234560.0 0.0269685 7.99852\n", 740 | "[Sample] d_loss: 0.21031801, g_loss: 8.79819870\n", 741 | "421 231761.0 0.0224057 8.06307\n", 742 | "422 236712.0 0.0619987 5.24119\n", 743 | "423 239279.0 0.0307862 4.39839\n", 744 | "424 255368.0 0.525198 4.89987\n", 745 | "425 238345.0 2.88381 11.9246\n", 746 | "[Sample] d_loss: 2.85928965, g_loss: 13.85167885\n", 747 | "426 244172.0 0.248095 8.63135\n", 748 | "427 235127.0 0.291439 3.95281\n", 749 | "428 257956.0 0.0910518 9.64591\n", 750 | "429 252064.0 0.120828 12.2587\n", 751 | "430 227278.0 0.0268803 10.4078\n", 752 | "[Sample] d_loss: 0.01734102, g_loss: 10.02555561\n", 753 | "431 240899.0 0.00569455 7.07338\n", 754 | "432 225555.0 0.00434344 7.20843\n", 755 | "433 233632.0 0.00682019 5.91998\n", 756 | "434 231891.0 0.00571175 6.68227\n", 757 | "435 228809.0 0.00498165 7.40532\n", 758 | "[Sample] d_loss: 0.00722294, g_loss: 8.10242176\n", 759 | "436 235955.0 0.00644644 7.55073\n", 760 | "437 241113.0 0.00532984 6.94808\n", 761 | "438 230223.0 0.0135218 5.34129\n", 762 | "439 233142.0 0.0108817 5.3288\n", 763 | "440 228034.0 0.00952262 5.37383\n", 764 | "[Sample] d_loss: 0.01330117, g_loss: 5.76603603\n", 765 | "441 231402.0 0.00821999 5.51816\n", 766 | "442 227568.0 0.00408015 6.18706\n", 767 | "443 228192.0 0.00829474 5.68073\n", 768 | "444 218651.0 0.0237908 5.39958\n", 769 | "445 225750.0 0.0124995 4.77973\n", 770 | "[Sample] d_loss: 0.01857344, g_loss: 5.06382370\n", 771 | "446 235589.0 0.00758902 5.32547\n", 772 | "447 233694.0 0.0110533 6.06763\n", 773 | "448 226417.0 0.00624357 6.48237\n", 774 | "449 238758.0 0.0035881 6.20237\n", 775 | "450 240990.0 0.00392423 5.92283\n", 776 | "[Sample] d_loss: 0.01650183, g_loss: 6.27635145\n", 777 | "451 232324.0 0.00412346 5.84415\n", 778 | "452 242203.0 0.00412027 5.92891\n", 779 | "453 221197.0 0.0135743 6.02693\n", 780 | "454 226812.0 0.00651309 6.07469\n", 781 | "455 239609.0 0.00385881 6.0784\n", 782 | "[Sample] d_loss: 0.01295040, g_loss: 6.57402611\n", 783 | "456 225997.0 0.00427819 6.6498\n", 784 | "457 230986.0 0.00424453 6.12105\n", 785 | "458 227776.0 0.00432798 6.14206\n", 786 | "459 238730.0 0.00472092 6.11902\n", 787 | "460 241677.0 0.00357627 6.0541\n", 788 | "[Sample] d_loss: 0.01488798, g_loss: 6.25869131\n", 789 | "461 237075.0 0.0049489 5.72847\n", 790 | "462 244164.0 0.00539957 5.78004\n", 791 | "463 225896.0 0.00545449 6.13207\n", 792 | "464 234774.0 0.00411047 6.06359\n", 793 | "465 228435.0 0.00397428 6.38309\n", 794 | "[Sample] d_loss: 0.02083703, g_loss: 6.80359650\n", 795 | "466 229067.0 0.00358915 6.57241\n", 796 | "467 233783.0 0.00949546 6.33603\n", 797 | "468 233402.0 0.00234101 7.05441\n", 798 | "469 231492.0 0.00766266 6.60424\n", 799 | "470 239787.0 0.00391098 6.7012\n", 800 | "[Sample] d_loss: 0.01686109, g_loss: 6.90075827\n", 801 | "471 231652.0 0.00333893 6.43238\n", 802 | "472 219755.0 0.00497335 6.02615\n", 803 | "473 226189.0 0.0115621 5.88204\n", 804 | "474 234905.0 0.00633552 5.92082\n", 805 | "475 239756.0 0.00672242 5.72087\n", 806 | "[Sample] d_loss: 0.01657334, g_loss: 6.32542896\n", 807 | "476 238571.0 0.00530748 6.33439\n", 808 | "477 243502.0 0.00465349 6.63239\n", 809 | "478 232169.0 0.00376534 6.99403\n", 810 | "479 222980.0 0.00352129 7.08596\n", 811 | "480 236069.0 0.0031136 6.53782\n", 812 | "[Sample] d_loss: 0.01569846, g_loss: 7.41158009\n", 813 | "481 232192.0 0.00242 6.41409\n", 814 | "482 228513.0 0.0109052 6.61619\n", 815 | "483 234972.0 0.00871294 5.92578\n", 816 | "484 228919.0 0.00869686 5.4675\n", 817 | "485 233592.0 0.00493189 5.97714\n", 818 | "[Sample] d_loss: 0.00965434, g_loss: 6.47797871\n", 819 | "486 237099.0 0.00324196 6.09975\n", 820 | "487 234611.0 0.00283145 6.26349\n", 821 | "488 234069.0 0.00348271 6.39802\n", 822 | "489 231441.0 0.002699 6.30102\n", 823 | "490 241489.0 0.0023989 6.54204\n", 824 | "[Sample] d_loss: 0.01195450, g_loss: 7.13307381\n", 825 | "491 239964.0 0.0016464 6.68549\n", 826 | "492 230367.0 0.00231924 6.82873\n", 827 | "493 232170.0 0.00441406 6.88877\n", 828 | "494 230853.0 0.00745805 6.34587\n", 829 | "495 230504.0 0.00378106 6.78877\n", 830 | "[Sample] d_loss: 0.01029651, g_loss: 7.29813671\n", 831 | "496 235587.0 0.00184417 6.87682\n", 832 | "497 236726.0 0.0011928 7.37907\n", 833 | "498 240503.0 0.00184366 6.79634\n", 834 | "499 223160.0 0.00225777 6.55724\n", 835 | "500 241666.0 0.00305745 6.62385\n", 836 | "[Sample] d_loss: 0.01095224, g_loss: 6.89898205\n", 837 | "501 233296.0 0.00245379 6.40481\n", 838 | "502 235243.0 0.00330725 6.56732\n", 839 | "503 232420.0 0.00534135 6.49388\n", 840 | "504 241018.0 0.00165398 6.91947\n", 841 | "505 230548.0 0.00246502 7.16578\n", 842 | "[Sample] d_loss: 0.01184070, g_loss: 7.62761497\n", 843 | "506 234954.0 0.00104935 7.47584\n", 844 | "507 225812.0 0.0046939 7.36188\n", 845 | "508 233534.0 0.00153489 7.2661\n", 846 | "509 225678.0 0.0022783 7.1272\n", 847 | "510 230591.0 0.00539551 6.98715\n", 848 | "[Sample] d_loss: 0.01055986, g_loss: 7.12112188\n", 849 | "511 217440.0 0.00936802 6.90634\n", 850 | "512 232867.0 0.00261165 6.7012\n", 851 | "513 234356.0 0.00327527 6.8992\n", 852 | "514 236871.0 0.00230774 6.84718\n", 853 | "515 223101.0 0.00209308 7.12661\n", 854 | "[Sample] d_loss: 0.00679285, g_loss: 7.62021589\n", 855 | "516 229152.0 0.00168031 7.17106\n", 856 | "517 234952.0 0.0010182 7.75602\n", 857 | "518 232390.0 0.00121826 7.371\n", 858 | "519 221567.0 0.00198914 6.98283\n", 859 | "520 228544.0 0.00128378 7.25912\n", 860 | "[Sample] d_loss: 0.00779736, g_loss: 7.41330338\n", 861 | "521 232821.0 0.00174804 7.22761\n", 862 | "522 240278.0 0.00122231 7.24541\n", 863 | "523 223778.0 0.00375986 7.07968\n", 864 | "524 223159.0 0.00311893 6.90097\n", 865 | "525 235256.0 0.00259103 7.35689\n", 866 | "[Sample] d_loss: 0.00924281, g_loss: 7.59811592\n", 867 | "526 226965.0 0.00421429 7.35439\n", 868 | "527 230836.0 0.00373219 7.71545\n", 869 | "528 236153.0 0.00162868 7.6636\n", 870 | "529 224777.0 0.00116469 7.71395\n", 871 | "530 224451.0 0.00423772 7.50927\n", 872 | "[Sample] d_loss: 0.00709648, g_loss: 8.08582783\n", 873 | "531 225316.0 0.00303864 7.8389\n", 874 | "532 225797.0 0.00200309 7.36431\n", 875 | "533 238335.0 0.00148096 7.25667\n", 876 | "534 226151.0 0.00181822 7.06185\n", 877 | "535 238360.0 0.00425675 6.01023\n", 878 | "[Sample] d_loss: 0.00844006, g_loss: 6.44500446\n", 879 | "536 219348.0 0.00329617 6.7834\n", 880 | "537 224939.0 0.00290666 7.54935\n", 881 | "538 218319.0 0.00503667 7.64497\n", 882 | "539 218329.0 0.000921919 7.98389\n", 883 | "540 228556.0 0.00124708 8.06391\n", 884 | "[Sample] d_loss: 0.00554754, g_loss: 8.15587902\n", 885 | "541 224579.0 0.00116042 7.97102\n", 886 | "542 228131.0 0.0013109 7.55577\n", 887 | "543 229988.0 0.00165463 7.47293\n", 888 | "544 226960.0 0.00214207 7.31126\n", 889 | "545 234161.0 0.00198805 7.54495\n", 890 | "[Sample] d_loss: 0.00612833, g_loss: 7.87354469\n", 891 | "546 239810.0 0.00148918 7.38165\n", 892 | "547 233033.0 0.00209961 7.21658\n", 893 | "548 225831.0 0.00192223 7.40458\n", 894 | "549 229106.0 0.00158761 7.93108\n", 895 | "550 237197.0 0.00108034 7.80758\n", 896 | "[Sample] d_loss: 0.00687567, g_loss: 7.95410442\n", 897 | "551 235458.0 0.00150188 7.62764\n", 898 | "552 236869.0 0.0023154 7.18325\n" 899 | ] 900 | }, 901 | { 902 | "ename": "KeyboardInterrupt", 903 | "evalue": "", 904 | "output_type": "error", 905 | "traceback": [ 906 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 907 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", 908 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 29\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[0mfd\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m{\u001b[0m\u001b[0mz\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mbatch_z\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mimages\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mbatch_images\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m---> 31\u001b[0;31m \u001b[0msess\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mg_optim\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0md_optim\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfd\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 32\u001b[0m \u001b[0msess\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mcomplete_optim\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m{\u001b[0m\u001b[0mz\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mbatch_z\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mimages\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mbatch_images\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmask\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mmask_\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0mc_loss\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mdloss\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mgloss\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msess\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mcomplete_loss\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0md_loss\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mg_loss\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m{\u001b[0m\u001b[0mz\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mbatch_z\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mimages\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mbatch_images\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmask\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mmask_\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 909 | "\u001b[0;32mC:\\Users\\zc440z0ac\\AppData\\Local\\Continuum\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 764\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 765\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 766\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 767\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 768\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 910 | "\u001b[0;32mC:\\Users\\zc440z0ac\\AppData\\Local\\Continuum\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 962\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[1;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 963\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m--> 964\u001b[0;31m feed_dict_string, options, run_metadata)\n\u001b[0m\u001b[1;32m 965\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 966\u001b[0m \u001b[0mresults\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 911 | "\u001b[0;32mC:\\Users\\zc440z0ac\\AppData\\Local\\Continuum\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1012\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 1013\u001b[0m return self._do_call(_run_fn, self._session, feed_dict, fetch_list,\n\u001b[0;32m-> 1014\u001b[0;31m target_list, options, run_metadata)\n\u001b[0m\u001b[1;32m 1015\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 1016\u001b[0m return self._do_call(_prun_fn, self._session, handle, feed_dict,\n", 912 | "\u001b[0;32mC:\\Users\\zc440z0ac\\AppData\\Local\\Continuum\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m 1019\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 1020\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1021\u001b[0;31m \u001b[1;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1022\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 1023\u001b[0m \u001b[0mmessage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 913 | "\u001b[0;32mC:\\Users\\zc440z0ac\\AppData\\Local\\Continuum\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m 1001\u001b[0m return tf_session.TF_Run(session, options,\n\u001b[1;32m 1002\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1003\u001b[0;31m status, run_metadata)\n\u001b[0m\u001b[1;32m 1004\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m 1005\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msession\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 914 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: " 915 | ] 916 | } 917 | ], 918 | "source": [ 919 | "#Training\n", 920 | "counter=1\n", 921 | "start_time=time.time()\n", 922 | "for epoch in range(1):\n", 923 | " np.random.shuffle(data)\n", 924 | " batchidxs=int(len(data)/batch_size)\n", 925 | " \n", 926 | " for idx in range(batchidxs):\n", 927 | " #try:\n", 928 | " batch_files=data[idx*batch_size:(idx+1)*batch_size]\n", 929 | " batch=[get_image(batch_file,image_size,is_crop=is_crop) for batch_file in batch_files]\n", 930 | " batch_images=np.reshape(np.array(batch).astype(np.float32),[batch_size]+image_shape)\n", 931 | "\n", 932 | " batch_z=np.random.uniform(-1,1,[batch_size,z_dim]).astype(np.float32)\n", 933 | "\n", 934 | " #mask\n", 935 | " scale=0.25\n", 936 | " mask_=np.ones([batch_size]+image_shape).astype(np.float32)\n", 937 | " l=int(64*scale)\n", 938 | " u=int(64*(1.0-scale))\n", 939 | " mask_[:,l:u,l:u,:]=0.0\n", 940 | " #inverse mask\n", 941 | " scale=0.25\n", 942 | " imask_=np.zeros([batch_size]+image_shape).astype(np.float32)\n", 943 | " l=int(64*scale)\n", 944 | " u=int(64*(1.0-scale))\n", 945 | " imask_[:,l:u,l:u,:]=1.0\n", 946 | " \n", 947 | " \n", 948 | " fd={z:batch_z,images:batch_images}\n", 949 | " sess.run([g_optim,d_optim],feed_dict=fd)\n", 950 | " sess.run([complete_optim],feed_dict={z:batch_z,images:batch_images,mask:mask_})\n", 951 | " c_loss,dloss,gloss=sess.run([complete_loss,d_loss,g_loss],feed_dict={z:batch_z,images:batch_images,mask:mask_})\n", 952 | " print(counter,c_loss,dloss,gloss)\n", 953 | " \n", 954 | " if np.mod(counter,5)==0:\n", 955 | " sample_generated,dl,gl=sess.run([G,d_loss,g_loss],feed_dict={z:sample_z,images:sample_images})\n", 956 | " original_part=np.multiply(sample_images,mask_)\n", 957 | " generated_part=np.multiply(sample_generated,imask_)\n", 958 | " total=np.add(original_part,generated_part)\n", 959 | " save_images(total,'samples\\\\')\n", 960 | " print('[Sample] d_loss: %.8f, g_loss: %.8f' % (dl, gl))\n", 961 | " counter+=1 \n", 962 | " #except:\n", 963 | " # continue" 964 | ] 965 | }, 966 | { 967 | "cell_type": "markdown", 968 | "metadata": {}, 969 | "source": [ 970 | "## Saving the model" 971 | ] 972 | }, 973 | { 974 | "cell_type": "code", 975 | "execution_count": 106, 976 | "metadata": { 977 | "collapsed": false 978 | }, 979 | "outputs": [ 980 | { 981 | "data": { 982 | "text/plain": [ 983 | "'checkpoint\\\\image_completion.chk'" 984 | ] 985 | }, 986 | "execution_count": 106, 987 | "metadata": {}, 988 | "output_type": "execute_result" 989 | } 990 | ], 991 | "source": [ 992 | "saver.save(sess,\"checkpoint\\\\image_completion.chk\")" 993 | ] 994 | }, 995 | { 996 | "cell_type": "code", 997 | "execution_count": null, 998 | "metadata": { 999 | "collapsed": false 1000 | }, 1001 | "outputs": [], 1002 | "source": [ 1003 | "saver.restore(sess, \"checkpoint\\\\all_variables.chk\")\n", 1004 | "print(sess.run(tf.all_variables()))" 1005 | ] 1006 | }, 1007 | { 1008 | "cell_type": "markdown", 1009 | "metadata": { 1010 | "collapsed": false 1011 | }, 1012 | "source": [] 1013 | }, 1014 | { 1015 | "cell_type": "code", 1016 | "execution_count": null, 1017 | "metadata": { 1018 | "collapsed": false 1019 | }, 1020 | "outputs": [], 1021 | "source": [] 1022 | }, 1023 | { 1024 | "cell_type": "code", 1025 | "execution_count": null, 1026 | "metadata": { 1027 | "collapsed": true 1028 | }, 1029 | "outputs": [], 1030 | "source": [] 1031 | } 1032 | ], 1033 | "metadata": { 1034 | "anaconda-cloud": {}, 1035 | "kernelspec": { 1036 | "display_name": "Python [tensorflow]", 1037 | "language": "python", 1038 | "name": "Python [tensorflow]" 1039 | }, 1040 | "language_info": { 1041 | "codemirror_mode": { 1042 | "name": "ipython", 1043 | "version": 2 1044 | }, 1045 | "file_extension": ".py", 1046 | "mimetype": "text/x-python", 1047 | "name": "python", 1048 | "nbconvert_exporter": "python", 1049 | "pygments_lexer": "ipython2", 1050 | "version": "2.7.12" 1051 | } 1052 | }, 1053 | "nbformat": 4, 1054 | "nbformat_minor": 1 1055 | } 1056 | --------------------------------------------------------------------------------