├── FlowersImageClassify ├── batch_get_data.py ├── datasets_name.txt ├── flower_photos │ └── 数据集下载.txt ├── flowers_photos_independent │ ├── test1-1.jpg │ ├── test2-1.jpg │ ├── test3-1.jpg │ ├── test4-1.jpg │ ├── test4-2.jpg │ └── test5-1.jpg ├── log_cnn.txt ├── log_lenet.txt ├── main.py ├── models │ ├── cnn.py │ └── lenet.py ├── read_img.py ├── result │ ├── model_pic │ │ ├── train_acc_100_epoches.png │ │ ├── train_acc_10_epoches.png │ │ ├── train_acc_5_epoches.png │ │ ├── train_loss_100_epoches.png │ │ ├── train_loss_10_epoches.png │ │ ├── train_loss_5_epoches.png │ │ ├── val_acc_100_epoches.png │ │ ├── val_acc_10_epoches.png │ │ ├── val_acc_5_epoches.png │ │ ├── val_loss_100_epoches.png │ │ ├── val_loss_10_epoches.png │ │ └── val_loss_5_epoches.png │ ├── model_save │ │ ├── checkpoint │ │ ├── save_net.ckpt-75.data-00000-of-00001 │ │ ├── save_net.ckpt-75.index │ │ ├── save_net.ckpt-75.meta │ │ └── save_net.ckpt-80.data-00000-of-00001 │ └── model_variable │ │ ├── train_acces_100_epoches.npy │ │ ├── train_acces_10_epoches.npy │ │ ├── train_losses_100_epoches.npy │ │ ├── train_losses_10_epoches.npy │ │ ├── val_acces_100_epoches.npy │ │ ├── val_acces_10_epoches.npy │ │ ├── val_losses_100_epoches.npy │ │ └── val_losses_10_epoches.npy └── run.py └── README.md /FlowersImageClassify/batch_get_data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Jun 26 21:23:56 2019 4 | 5 | @author: ZQQ 6 | """ 7 | 8 | import numpy as np 9 | 10 | # 定义一个函数,按批次取数据 11 | def minibatches(inputs=None, targets=None, batch_size=None, shuffle=False): 12 | assert len(inputs) == len(targets) 13 | if shuffle: 14 | indices = np.arange(len(inputs)) 15 | np.random.shuffle(indices) 16 | for start_idx in range(0, len(inputs) - batch_size + 1, batch_size): 17 | if shuffle: 18 | excerpt = indices[start_idx:start_idx + batch_size] 19 | else: 20 | excerpt = slice(start_idx, start_idx + batch_size) 21 | yield inputs[excerpt], targets[excerpt] 22 | -------------------------------------------------------------------------------- /FlowersImageClassify/datasets_name.txt: -------------------------------------------------------------------------------- 1 | flower_photos/tulipsflower_photos/tulips\9976515506_d496c5e72c.jpg_4 2 | -------------------------------------------------------------------------------- /FlowersImageClassify/flower_photos/数据集下载.txt: -------------------------------------------------------------------------------- 1 | https://blog.csdn.net/AugustMe/article/details/94166164 -------------------------------------------------------------------------------- /FlowersImageClassify/flowers_photos_independent/test1-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/flowers_photos_independent/test1-1.jpg -------------------------------------------------------------------------------- /FlowersImageClassify/flowers_photos_independent/test2-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/flowers_photos_independent/test2-1.jpg -------------------------------------------------------------------------------- /FlowersImageClassify/flowers_photos_independent/test3-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/flowers_photos_independent/test3-1.jpg -------------------------------------------------------------------------------- /FlowersImageClassify/flowers_photos_independent/test4-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/flowers_photos_independent/test4-1.jpg -------------------------------------------------------------------------------- /FlowersImageClassify/flowers_photos_independent/test4-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/flowers_photos_independent/test4-2.jpg -------------------------------------------------------------------------------- /FlowersImageClassify/flowers_photos_independent/test5-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/flowers_photos_independent/test5-1.jpg -------------------------------------------------------------------------------- /FlowersImageClassify/log_cnn.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/log_cnn.txt -------------------------------------------------------------------------------- /FlowersImageClassify/log_lenet.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/log_lenet.txt -------------------------------------------------------------------------------- /FlowersImageClassify/main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun Jun 30 20:18:08 2019 4 | 5 | @author: ZQQ 6 | """ 7 | 8 | # -*- coding: utf-8 -*- 9 | """ 10 | Created on Wed Jun 26 10:54:57 2019 11 | 12 | @author: ZQQ 13 | """ 14 | 15 | import time 16 | from read_img import read_img, shuffle_data 17 | import tensorflow as tf 18 | from models import lenet 19 | from batch_get_data import minibatches 20 | 21 | start_time = time.time() 22 | 23 | # 数据:http://download.tensorflow.org/example_images/flower_photos.tgz 24 | # 花总共有五类,分别放在5个文件夹下。 25 | path = 'flower_photos/' # 设置图片路径 26 | 27 | # 设置超参数,准备将所有的图片resize成100*100 28 | w = 100 # 宽度 29 | h = 100 # 高度 30 | c = 3 # 图片通道数 31 | 32 | # step1: 加载数据集 33 | print('step1:load the datasets...') 34 | 35 | data, label = read_img(path) # 调用read_img()函数,读取图片数据和对应的标签 36 | x_train, y_train, x_val, y_val = shuffle_data(data,label) # 调用shuffle_data()函数,打乱数据集,并划分数据集 37 | 38 | # step2: 构建模型并开始训练、测试 39 | print('step2: build the model and training...') 40 | 41 | 42 | 43 | # 占位符 44 | x = tf.placeholder(tf.float32, shape=[None, w, h, c], name='x') 45 | y_ = tf.placeholder(tf.int32, shape=[None, ], name='y_') 46 | 47 | logits, pred = lenet.LeNet(x) # pred 是经过softmax处理过的 48 | loss = tf.losses.sparse_softmax_cross_entropy(labels=y_, logits=logits) 49 | #train_op = tf.train.AdadeltaOptimizer(learning_rate=0.001).minimize(loss) 50 | train_op = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss) # 和上面的优化器不同 51 | correct_prediction = tf.equal(tf.cast(tf.argmax(logits, 1), tf.int32), y_) 52 | acc = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) 53 | 54 | num_epoches = 10 55 | batch_size = 64 56 | 57 | train_losses = [] 58 | train_acces = [] 59 | val_losses = [] 60 | val_acces = [] 61 | sess = tf.InteractiveSession() 62 | sess.run(tf.global_variables_initializer()) 63 | saver = tf.train.Saver() 64 | 65 | with open("log_lenet.txt", "w") as log_f: 66 | for epoch in range(num_epoches): 67 | train_loss, train_acc, n_batch = 0, 0 , 0 68 | for x_train_a, y_train_a in minibatches(x_train, y_train, batch_size, shuffle=True): 69 | _, err, ac = sess.run([train_op, loss, acc], feed_dict={x: x_train_a, y_: y_train_a}) 70 | train_loss += err 71 | train_acc += ac 72 | n_batch += 1 73 | 74 | train_losses.append(train_loss / n_batch) 75 | train_acces.append(train_acc / n_batch) 76 | print('Epoch: %d - train loss: %.4f - train acc: %.4f' % (epoch, (train_loss / n_batch), (train_acc / n_batch))) 77 | 78 | log_f.write('Epoch: %d - train loss: %.4f - train acc: %.4f' % (epoch, (train_loss / n_batch), (train_acc / n_batch))) 79 | log_f.write('\n') 80 | log_f.flush() 81 | 82 | # validation 83 | val_loss, val_acc, n_batch = 0, 0 , 0 84 | for x_val_a, y_val_a in minibatches(x_val, y_val, batch_size, shuffle=False): 85 | err, ac = sess.run([loss, acc], feed_dict={x: x_val_a, y_: y_val_a}) 86 | val_loss += err 87 | val_acc += ac 88 | n_batch += 1 89 | 90 | val_losses.append(val_loss / n_batch) 91 | val_acces.append(val_acc / n_batch) 92 | #print('Epoch: %d' % epoch, '- validation loss: %.4f' % val_loss, '- validation acc: %.4f' % val_acc) # 没有除n_batch,错误额警示自己 93 | print('Epoch: %d - val loss: %.4f - val acc: %.4f' % (epoch,(val_loss / n_batch), (val_acc / n_batch))) # 为了体现两种输出,上面那种居然忘了除n_batch,找了好长时间bug!!! 94 | 95 | log_f.write('Epoch: %d - val loss: %.4f - val acc: %.4f' % (epoch,(val_loss / n_batch), (val_acc / n_batch))) 96 | log_f.write('\n') 97 | log_f.flush() 98 | 99 | # if epoch % 5 == 0: 100 | # saver.save(sess, "result/model_save/save_net.ckpt",epoch) 101 | # print('Trained Model Saved.') 102 | 103 | sess.close() 104 | 105 | end_time = time.time() 106 | time = end_time - start_time 107 | print('run time:',time) -------------------------------------------------------------------------------- /FlowersImageClassify/models/cnn.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Jun 26 16:38:15 2019 4 | 5 | @author: ZQQ 6 | 7 | tools:Pycharm 8 | 9 | 用tensorflo框架搭建一个卷积神经网络 10 | 参考: 11 | https://www.cnblogs.com/ansang/p/9164805.html 12 | 13 | 数据:http://download.tensorflow.org/example_images/flower_photos.tgz 14 | 15 | """ 16 | 17 | import tensorflow as tf 18 | 19 | # 定义批量标准化函数,有效防止了梯度消失和爆炸,还加速了收敛 20 | #def batch_norm(x, momentum=0.9, epsilon=1e-5, train=True, name='bn'): 21 | # return tf.layers.batch_normalization(x, 22 | # momentum=momentum, 23 | # epsilon=epsilon, 24 | # scale=True, 25 | # training=train, 26 | # name=name) 27 | 28 | def simple_net(x): 29 | ### 卷积,池化 30 | # 第一层卷积层(100->50) 31 | conv1 = tf.layers.conv2d(inputs=x, 32 | filters=32, 33 | #kernel_size=[3,3], # kernel_size = [5,5] 换不同的核大小,查看效果 34 | kernel_size = [5,5], 35 | padding="same", 36 | activation=tf.nn.relu, 37 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) # padding="same",卷积层不改变图片大小 38 | #conv1 = batch_norm(conv1, name="pw_bn1") # 加入批量标准化 39 | pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2,2], strides=2) # 池化层图片大小缩小一半 40 | 41 | # 第二个卷积层(50->25) 42 | conv2 = tf.layers.conv2d(inputs=pool1, 43 | filters=64, 44 | kernel_size=[5,5], 45 | padding="same", 46 | activation=tf.nn.relu, 47 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) 48 | #conv2 = batch_norm(conv2, name="pw_bn2") 49 | pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2,2], strides=2) 50 | 51 | # 第三个卷积层(25->12) 52 | conv3 = tf.layers.conv2d(inputs=pool2, 53 | filters=128, 54 | kernel_size=[3,3], 55 | padding="same", 56 | activation=tf.nn.relu, 57 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) 58 | #conv3 = batch_norm(conv3, name="pw_bn3") 59 | pool3 = tf.layers.max_pooling2d(inputs=conv3, 60 | pool_size=[2,2], 61 | strides=2) 62 | 63 | # 第四个卷积层(12->6) 64 | conv4 = tf.layers.conv2d(inputs=pool3, 65 | filters=128, 66 | kernel_size=[3,3], 67 | padding="same", 68 | activation=tf.nn.relu, 69 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) 70 | #conv4 = batch_norm(conv4, name="pw_bn4") 71 | pool4 = tf.layers.max_pooling2d(inputs=conv4, 72 | pool_size=[2,2], 73 | strides=2) 74 | 75 | rel = tf.reshape(pool4,[-1, 6 * 6 * 128]) 76 | 77 | # 防止过拟合,加入dropout 78 | #dropout = tf.layers.dropout(inputs=rel, rate=0.5) 79 | 80 | ### 全连接层 81 | dense1 = tf.layers.dense(inputs=rel, 82 | units=1024, 83 | activation=tf.nn.relu, 84 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01), 85 | kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003)) 86 | 87 | dense2 = tf.layers.dense(inputs=dense1, 88 | units=512, 89 | activation=tf.nn.relu, 90 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01), 91 | kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003)) 92 | 93 | logits = tf.layers.dense(inputs=dense2, 94 | units=5, # 5个类 95 | activation=None, 96 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01), 97 | kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003)) 98 | 99 | pred = tf.nn.softmax(logits, name='prob') # softmax处理 100 | return logits, pred 101 | 102 | ### 四个卷积层,两个全连接层,一个softmax层组成。 103 | ### 在每一层的卷积后面加入batch_normalization, relu, 池化 104 | ### batch_normalization层很好用,加了它之后,有效防止了梯度消失和爆炸,还加速了收敛。 105 | -------------------------------------------------------------------------------- /FlowersImageClassify/models/lenet.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Jun 26 16:38:15 2019 4 | 5 | @author: ZQQ 6 | 7 | tools:Pycharm 8 | 9 | 用tensorflo框架搭建一个卷积神经网络 10 | 参考: 11 | 12 | https://www.cnblogs.com/ansang/p/9164805.html 13 | 14 | 数据:http://download.tensorflow.org/example_images/flower_photos.tgz 15 | 16 | """ 17 | 18 | import tensorflow as tf 19 | 20 | # 定义批量标准化函数,有效防止了梯度消失和爆炸,还加速了收敛 21 | #def batch_norm(x, momentum=0.9, epsilon=1e-5, train=True, name='bn'): 22 | # return tf.layers.batch_normalization(x, 23 | # momentum=momentum, 24 | # epsilon=epsilon, 25 | # scale=True, 26 | # training=train, 27 | # name=name) 28 | 29 | def LeNet(x): 30 | ### 卷积,池化 31 | # 第一层卷积层(100->50) 32 | conv1 = tf.layers.conv2d(inputs=x, 33 | filters=32, 34 | #kernel_size=[3,3], # kernel_size = [5,5] 换不同的核大小,查看效果 35 | kernel_size = [5,5], 36 | padding="same", 37 | activation=tf.nn.relu, 38 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) # padding="same",卷积层不改变图片大小 39 | #conv1 = batch_norm(conv1, name="pw_bn1") # 加入批量标准化 40 | pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2,2], strides=2) # 池化层图片大小缩小一半 41 | 42 | # 第二个卷积层(50->25) 43 | conv2 = tf.layers.conv2d(inputs=pool1, 44 | filters=64, 45 | kernel_size=[5,5], 46 | padding="same", 47 | activation=tf.nn.relu, 48 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) 49 | #conv2 = batch_norm(conv2, name="pw_bn2") 50 | pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2,2], strides=2) 51 | 52 | # 第三个卷积层(25->12) 53 | conv3 = tf.layers.conv2d(inputs=pool2, 54 | filters=128, 55 | kernel_size=[3,3], 56 | padding="same", 57 | activation=tf.nn.relu, 58 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01)) 59 | #conv3 = batch_norm(conv3, name="pw_bn3") 60 | pool3 = tf.layers.max_pooling2d(inputs=conv3, 61 | pool_size=[2,2], 62 | strides=2) 63 | 64 | rel = tf.reshape(pool3,[-1, 12 * 12 * 128]) 65 | 66 | # 防止过拟合,加入dropout 67 | #dropout = tf.layers.dropout(inputs=rel, rate=0.5) 68 | 69 | ### 全连接层 70 | dense1 = tf.layers.dense(inputs=rel, 71 | units=1024, 72 | activation=tf.nn.relu, 73 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01), 74 | kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003)) 75 | 76 | dense2 = tf.layers.dense(inputs=dense1, 77 | units=512, 78 | activation=tf.nn.relu, 79 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01), 80 | kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003)) 81 | 82 | logits = tf.layers.dense(inputs=dense2, 83 | units=5, # 5个类 84 | activation=None, 85 | kernel_initializer=tf.truncated_normal_initializer(stddev=0.01), 86 | kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003)) 87 | 88 | pred = tf.nn.softmax(logits, name='prob') # softmax处理 89 | return logits, pred 90 | 91 | ### 四个卷积层,两个全连接层,一个softmax层组成。 92 | ### 在每一层的卷积后面加入batch_normalization, relu, 池化 93 | ### batch_normalization层很好用,加了它之后,有效防止了梯度消失和爆炸,还加速了收敛。 94 | -------------------------------------------------------------------------------- /FlowersImageClassify/read_img.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Jun 26 09:23:46 2019 4 | 5 | @author: ZQQ 6 | """ 7 | 8 | import numpy as np 9 | import os 10 | import glob 11 | from skimage import transform,io 12 | import random 13 | 14 | # 定义超参数 15 | w = 100 16 | h = 100 17 | 18 | #### 定义读取图片的函数:read_img() 19 | def read_img(path): 20 | data_list = [path + x for x in os.listdir(path) if os.path.isdir(path + x)] # 所有图片分类目录 21 | imgs = [] # 定义一个imgs空列表,存放遍历读取的图片 22 | labels = [] # 定义一个labels空列表,存放图片标签 23 | for idx, folder in enumerate(data_list): # 遍历每个文件夹中的图片,idx表示 24 | for im in glob.glob(folder + '/*.jpg'): # *:匹配0个或多个字符 25 | print('reading the images:%s' % (im)) 26 | img = io.imread(im) 27 | img = transform.resize(img, (w, h)) # 将所有图片的尺寸统一为:100*100(宽度*高度) 28 | with open('datasets_name.txt','w') as f: 29 | f.write(folder+im+'_'+str(idx)+'\n') 30 | imgs.append(img) # 遍历后更改尺寸后的图片添加到imgs列表中 31 | labels.append(idx) # 遍历后更改尺寸后的图片标签添加到labels列表中 32 | return np.asarray(imgs, np.float32), np.asarray(labels, np.int32) # np.float32是类型 后面两个变量是没有进行np.asarray 33 | 34 | # np.asarray 和 np.array 35 | # np.array与np.asarray功能是一样的,都是将输入转为矩阵格式。 36 | # 主要区别在于 np.array(默认情况下)将会copy该对象,而 np.asarray除非必要,否则不会copy该对象。 37 | 38 | ### 定义随机打乱数据集的函数:shuffle_data() 39 | def shuffle_data(data,label): 40 | # 打乱顺序 41 | data_size = data.shape[0] # 数据集个数 42 | arr = np.arange(data_size) # 生成0到datasize个数 43 | np.random.shuffle(arr) # 随机打乱arr数组 44 | data = data[arr] # 将data以arr索引重新组合 45 | label = label[arr] # 将label以arr索引重新组合 46 | 47 | # # 打乱数据顺序的另一种方法,当然还有其他的方法 48 | # index = [i for i in range(len(data))] 49 | # random.shuffle(index) 50 | # data = data[index] 51 | # label = label[index] 52 | 53 | # 将所有数据分为训练集和验证集 54 | ratio = 0.8 # 训练集比例 55 | num = np.int(len(data) * ratio) 56 | x_train = data[:num] 57 | y_train = label[:num] 58 | x_val = data[num:] 59 | y_val = label[num:] 60 | 61 | return x_train, y_train, x_val, y_val 62 | 63 | #path = 'flower_photos/' # 所有图片的总路径(目录) 64 | #data, label = read_img(path) 65 | -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/train_acc_100_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/train_acc_100_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/train_acc_10_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/train_acc_10_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/train_acc_5_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/train_acc_5_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/train_loss_100_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/train_loss_100_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/train_loss_10_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/train_loss_10_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/train_loss_5_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/train_loss_5_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/val_acc_100_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/val_acc_100_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/val_acc_10_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/val_acc_10_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/val_acc_5_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/val_acc_5_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/val_loss_100_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/val_loss_100_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/val_loss_10_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/val_loss_10_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_pic/val_loss_5_epoches.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_pic/val_loss_5_epoches.png -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_save/checkpoint: -------------------------------------------------------------------------------- 1 | model_checkpoint_path: "save_net.ckpt-95" 2 | all_model_checkpoint_paths: "save_net.ckpt-75" 3 | all_model_checkpoint_paths: "save_net.ckpt-80" 4 | all_model_checkpoint_paths: "save_net.ckpt-85" 5 | all_model_checkpoint_paths: "save_net.ckpt-90" 6 | all_model_checkpoint_paths: "save_net.ckpt-95" 7 | -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_save/save_net.ckpt-75.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_save/save_net.ckpt-75.data-00000-of-00001 -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_save/save_net.ckpt-75.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_save/save_net.ckpt-75.index -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_save/save_net.ckpt-75.meta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_save/save_net.ckpt-75.meta -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_save/save_net.ckpt-80.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_save/save_net.ckpt-80.data-00000-of-00001 -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/train_acces_100_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/train_acces_100_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/train_acces_10_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/train_acces_10_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/train_losses_100_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/train_losses_100_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/train_losses_10_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/train_losses_10_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/val_acces_100_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/val_acces_100_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/val_acces_10_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/val_acces_10_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/val_losses_100_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/val_losses_100_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/result/model_variable/val_losses_10_epoches.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AugustMe/TensorFlow-Learn/f925649ba577b269a0aefbffd81f0340becdc0d4/FlowersImageClassify/result/model_variable/val_losses_10_epoches.npy -------------------------------------------------------------------------------- /FlowersImageClassify/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Jun 26 10:54:57 2019 4 | 5 | @author: ZQQ 6 | """ 7 | 8 | import time 9 | from read_img import read_img, shuffle_data 10 | import tensorflow as tf 11 | from models import cnn 12 | from batch_get_data import minibatches 13 | 14 | start_time = time.time() 15 | 16 | # 数据:http://download.tensorflow.org/example_images/flower_photos.tgz 17 | # 花总共有五类,分别放在5个文件夹下。 18 | path = 'flower_photos/' # 设置图片路径 19 | 20 | # 设置超参数,准备将所有的图片resize成100*100 21 | w = 100 # 宽度 22 | h = 100 # 高度 23 | c = 3 # 图片通道数 24 | 25 | # step1: 加载数据集 26 | print('step1:load the datasets...') 27 | 28 | data, label = read_img(path) # 调用read_img()函数,读取图片数据和对应的标签 29 | x_train, y_train, x_val, y_val = shuffle_data(data,label) # 调用shuffle_data()函数,打乱数据集,并划分数据集 30 | 31 | # step2: 构建模型并开始训练、测试 32 | print('step2: build the model and training...') 33 | 34 | def start_run(num_epoches, batch_size): 35 | 36 | # 占位符 37 | x = tf.placeholder(tf.float32, shape=[None, w, h, c], name='x') 38 | y_ = tf.placeholder(tf.int32, shape=[None, ], name='y_') 39 | 40 | logits, pred = cnn.simple_net(x) # pred 是经过softmax处理过的 41 | loss = tf.losses.sparse_softmax_cross_entropy(labels=y_, logits=logits) 42 | #train_op = tf.train.AdadeltaOptimizer(learning_rate=0.001).minimize(loss) 43 | train_op = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss) # 和上面的优化器不同 44 | correct_prediction = tf.equal(tf.cast(tf.argmax(logits, 1), tf.int32), y_) 45 | acc = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) 46 | 47 | #num_epoches = 10 48 | #batch_size = 64 49 | train_losses = [] 50 | train_acces = [] 51 | val_losses = [] 52 | val_acces = [] 53 | sess = tf.InteractiveSession() 54 | sess.run(tf.global_variables_initializer()) 55 | saver = tf.train.Saver() 56 | 57 | with open("log.txt", "w") as log_f: 58 | for epoch in range(num_epoches): 59 | train_loss, train_acc, n_batch = 0, 0 , 0 60 | for x_train_a, y_train_a in minibatches(x_train, y_train, batch_size, shuffle=True): 61 | _, err, ac = sess.run([train_op, loss, acc], feed_dict={x: x_train_a, y_: y_train_a}) 62 | train_loss += err 63 | train_acc += ac 64 | n_batch += 1 65 | 66 | train_losses.append(train_loss / n_batch) 67 | train_acces.append(train_acc / n_batch) 68 | print('Epoch: %d - train loss: %.4f - train acc: %.4f' % (epoch, (train_loss / n_batch), (train_acc / n_batch))) 69 | 70 | log_f.write('Epoch: %d - train loss: %.4f - train acc: %.4f' % (epoch, (train_loss / n_batch), (train_acc / n_batch))) 71 | log_f.write('\n') 72 | log_f.flush() 73 | 74 | # validation 75 | val_loss, val_acc, n_batch = 0, 0 , 0 76 | for x_val_a, y_val_a in minibatches(x_val, y_val, batch_size, shuffle=False): 77 | err, ac = sess.run([loss, acc], feed_dict={x: x_val_a, y_: y_val_a}) 78 | val_loss += err 79 | val_acc += ac 80 | n_batch += 1 81 | 82 | val_losses.append(val_loss / n_batch) 83 | val_acces.append(val_acc / n_batch) 84 | #print('Epoch: %d' % epoch, '- validation loss: %.4f' % val_loss, '- validation acc: %.4f' % val_acc) # 没有除n_batch,错误额警示自己 85 | print('Epoch: %d - val loss: %.4f - val acc: %.4f' % (epoch,(val_loss / n_batch), (val_acc / n_batch))) # 为了体现两种输出,上面那种居然忘了除n_batch,找了好长时间bug!!! 86 | 87 | log_f.write('Epoch: %d - val loss: %.4f - val acc: %.4f' % (epoch,(val_loss / n_batch), (val_acc / n_batch))) 88 | log_f.write('\n') 89 | log_f.flush() 90 | 91 | if epoch % 5 == 0: 92 | saver.save(sess, "result/model_save/save_net.ckpt",epoch) 93 | print('Trained Model Saved.') 94 | 95 | sess.close() 96 | return train_losses, train_acces, val_losses, val_acces 97 | 98 | num_epoches = 100 99 | batch_size = 64 100 | train_losses, train_acces, val_losses, val_acces = start_run(num_epoches, batch_size) # 调用函数 101 | 102 | # 保存训练和测试过程中产生的变量 103 | import numpy as np 104 | 105 | train_losses_ = np.array(train_losses).reshape(len(train_losses),1) # 先将list类型保存为numpy 106 | np.save('result/model_variable/train_losses_%d_epoches' %num_epoches,train_losses_) # 保存为npy类型 107 | 108 | train_acces_ = np.array(train_acces).reshape(len(train_acces),1) # 先将list类型保存为numpy 109 | np.save('result/model_variable/train_acces_%d_epoches' %num_epoches,train_acces_) # 保存为npy类型 110 | 111 | val_losses_ = np.array(val_losses).reshape(len(val_losses),1) # 先将list类型保存为numpy 112 | np.save('result/model_variable/val_losses_%d_epoches' %num_epoches,val_losses_) # 保存为npy类型 113 | 114 | val_acces_ = np.array(val_acces).reshape(len(val_acces),1) # 先将list类型保存为numpy 115 | np.save('result/model_variable/val_acces_%d_epoches' %num_epoches,val_acces_) # 保存为npy类型 116 | 117 | # 绘制损失函数图、训练/测试过程图 118 | import matplotlib.pyplot as plt 119 | 120 | plt.figure(1) 121 | plt.title('train loss') 122 | plt.plot(np.arange(len(train_losses)),train_losses,label='train loss') 123 | plt.savefig('result/model_pic/train_loss_%d_epoches' %num_epoches, dpi=600) 124 | plt.legend() 125 | plt.show() 126 | 127 | plt.figure(2) 128 | plt.title('train acc') 129 | plt.plot(np.arange(len(train_acces)),train_acces,label='train acc') 130 | plt.savefig('result/model_pic/train_acc_%d_epoches' %num_epoches, dpi=600) 131 | plt.legend() 132 | plt.show() 133 | 134 | plt.figure(3) 135 | plt.title('val loss') 136 | plt.plot(np.arange(len(val_losses)),val_losses,label='val loss') 137 | plt.savefig('result/model_pic/val_loss_%d_epoches' %num_epoches, dpi=600) 138 | plt.legend() 139 | plt.show() 140 | 141 | plt.figure(4) 142 | plt.title('val acc') 143 | plt.plot(np.arange(len(val_acces)),val_acces,label='val acc') 144 | plt.savefig('result/model_pic/val_acc_%d_epoches' %num_epoches, dpi=600) 145 | plt.legend() 146 | plt.show() 147 | 148 | end_time = time.time() 149 | time = end_time - start_time 150 | print('run time:',time) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 基于TensorFlow实现图片分类 2 | 3 | 给我个star再fork啊,谢谢 4 | 5 | 结合项目介绍:https://blog.csdn.net/AugustMe 6 | --------------------------------------------------------------------------------