import keras
from keras.models import Sequential
import numpy
as
np
import pandas
as
pd
from keras.layers import Dense
import random
import matplotlib.pyplot
as
plt
from tensorflow.examples.tutorials.mnist import input_data
from tkinter import filedialog
import tkinter.messagebox #这个是消息框,对话框的关键
file_path = filedialog.askdirectory()
mnist = input_data.read_data_sets(file_path, validation_size=0)
#随机挑选其中一个手写数字并画图
num = random.randint(1, len(mnist.train.images))
img = mnist.train.images[num]
plt.imshow(img.reshape((28, 28)), cmap=
'Greys_r'
)
plt.show()
x_train = mnist.train.images
y_train = mnist.train.labels
x_test = mnist.test.images
y_test = mnist.test.labels
#reshaping the x_train, y_train, x_test
and
y_test to conform to MLP input
and
output dimensions
x_train = np.reshape(x_train, (x_train.shape[0], -1))
x_test = np.reshape(x_test, (x_test.shape[0], -1))
y_train = pd.get_dummies(y_train)
y_test = pd.get_dummies(y_test)
#performing one-hot encoding on target variables
for
train
and
test
y_train=np.
array
(y_train)
y_test=np.
array
(y_test)
#defining model with one input layer[784 neurons], 1 hidden layer[784 neurons] with dropout rate 0.4
and
1 output layer [10 #neurons]
model=Sequential()
model.add(Dense(784, input_dim=784, activation=
'relu'
))
keras.layers.core.Dropout(rate=0.4)
model.add(Dense(10,input_dim=784,activation=
'softmax'
))
# compiling model using adam optimiser
and
accuracy
as
metric
model.compile(loss=
'categorical_crossentropy'
, optimizer=
"adam"
, metrics=[
'accuracy'
])
# fitting model
and
performing validation
model.fit(x_train, y_train, epochs=20, batch_size=200, validation_data=(x_test, y_test))
y_test1 = pd.DataFrame(model.predict(x_test, batch_size=200))
y_pre = y_test1.idxmax(axis = 1)
result = pd.DataFrame({
'test'
: y_test,
'pre'
: y_pre})
tkinter.messagebox.showinfo(
'Message'
,
'Completed!'
)