1. 程式人生 > >吳裕雄 python神經網路(6)

吳裕雄 python神經網路(6)

 

import random
import numpy as np
np.random.randint(0,49,3)

##required libararies
import tensorflow as tf
#import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense,Dropout,Convolution2D,MaxPooling2D

###MNIST dataset
from tensorflow.examples.tutorials.mnist import input_data
mnist=input_data.read_data_sets("./MNIST_data",one_hot=False)

## Establish train and test dataset
train_X,train_Y,test_X,test_Y=mnist.train.images,\
mnist.train.labels,mnist.test.images,mnist.test.labels

print(train_X.shape,train_Y.shape,test_X.shape,test_Y.shape)

train_Y[80]

3

import matplotlib.pyplot as plt
%matplotlib inline
plt.imshow(np.reshape(train_X[80],(28,28)),cmap='gray')
plt.show()

from keras.utils import np_utils #(utilities)
n_classes=10
train_y=keras.utils.to_categorical(train_Y,n_classes)
test_y=keras.utils.to_categorical(test_Y,n_classes)

print(train_y.shape,test_y.shape)

train_y[0]

np.argmax(train_y[0],axis=0)

7

Drop_prob=0.2
from keras.layers import Activation,Flatten
###設定模型為序貫模型###
model=Sequential()

###C O N V O L U T I O N L A Y E R 1###
model.add(Convolution2D(filters=32,kernel_size=(3,3),input_shape=(28,28,1),strides=(1, 1),padding='same'))
model.add(Activation("relu"))

###P O O L I N G L A Y E R 1###
model.add(MaxPooling2D(pool_size=(2, 2),padding='same'))
model.add(Dropout(Drop_prob))

###C O N V O L U T I O N L A Y E R 2###
model.add(Convolution2D(filters=64,kernel_size=(3,3),input_shape=(14,14,32),strides=(1, 1),padding='same'))
model.add(Activation("relu"))

###P O O L I N G L A Y E R 2###
model.add(MaxPooling2D(pool_size=(2, 2),padding='same'))
model.add(Dropout(Drop_prob))

###C O N V O L U T I O N L A Y E R 3###
model.add(Convolution2D(filters=128,kernel_size=(3,3),input_shape=(7,7,64),strides=(1, 1),padding='same'))
model.add(Activation("relu"))

###P O O L I N G L A Y E R 3###
model.add(MaxPooling2D(pool_size=(2, 2),padding='same'))
model.add(Flatten())
model.add(Dropout(Drop_prob))

###F U L L Y C O N N E C T E D(FC)###
model.add(Dense(units=128,activation="relu"))
model.add(Dropout(0.5))

###F U L L Y C O N N E C T E D(FC)###
model.add(Dense(units=512,activation="relu"))
model.add(Dropout(0.5))

###F U L L Y C O N N E C T E D(FC)###
model.add(Dense(units=n_classes,activation="softmax"))
model.summary()

num_parameters

18496

from keras.optimizers import Adam
train_X=np.reshape(train_X,(train_X.shape[0],28,28,1))
##compile
model.compile(optimizer=Adam(),loss="categorical_crossentropy",metrics=['accuracy'])
##train
model.fit(train_X,train_y,epochs=100,batch_size=256,verbose=1)

evaluation=model.evaluate(test_X,test_y,batch_size=256,verbose=0)
print("loss:%.4f",evaluation[0],"acuraccy:%.4f",evaluation[1])