keras使用

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/qq_16949707/article/details/70147679

1数据

http://pan.baidu.com/s/1qCdS6
来自http://blog.csdn.net/u012162613/article/details/45397033

2 keras使用(jupyter notebook)

参考:
https://elitedatascience.com/keras-tutorial-deep-learning-in-python#step-4

mnist_floder = '/home/hx133330/data/mnist'
import os
import numpy as np
import cv2
from PIL import Image
len(os.listdir(mnist_floder))
42000
data = np.empty((42000,1,28,28),dtype = np.float32)
label = np.empty((42000,),dtype = np.uint8)
imgs = os.listdir(mnist_floder)
imgs[0]
'5.552.jpg'
for i in range(len(imgs)):
    img = cv2.imread(os.path.join(mnist_floder,imgs[i]),0)
    data[i,:,:,:] = img
    label[i] = int(imgs[i].split('.')[0])
data.shape
(42000, 1, 28, 28)
print label[0:10]
[5 7 7 0 5 3 2 6 3 1]
from matplotlib import pyplot as plt
plt.imshow(data[0,0,:,:])
plt.show()

png

data /= 255
from keras.utils import np_utils
label = np_utils.to_categorical(label, 10)
label[0]
array([ 0.,  0.,  0.,  0.,  0.,  1.,  0.,  0.,  0.,  0.])
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
help(Convolution2D)
Help on class Convolution2D in module keras.layers.convolutional:

class Convolution2D(keras.engine.topology.Layer)

X_train = data.reshape(data.shape[0], 28, 28, 1)
model = Sequential()
model.add(Convolution2D(32, 3, 3, activation='relu', input_shape=(28,28,1)))
print model.output_shape
(None, 26, 26, 32)
model.add(Convolution2D(32, 3, 3, activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Dropout(0.25))
print model.output_shape
(None, 12, 12, 32)
model.add(Flatten())
print model.output_shape
(None, 4608)
model.add(Dense(128, activation='relu'))
print model.output_shape
(None, 128)
model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))
print model.output_shape
(None, 10)
model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])
model.fit(X_train, label, 
          batch_size=32, nb_epoch=10, verbose=1)
Epoch 1/10
16960/42000 [===========>..................] - ETA: 19s - loss: 0.4346 - acc: 0.8631 

猜你喜欢

转载自blog.csdn.net/qq_16949707/article/details/70147679
今日推荐