Deep Learning: Keras + Reuters

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/DreamHome_S/article/details/82954011

Classifying newswires: a multiclass classification example

# -*- coding: utf-8 -*-

"""
@Date: 2018/10/6

@Author: dreamhomes

@Summary:
"""
from keras.datasets import reuters
from keras.utils.np_utils import to_categorical
from keras import models
from keras import layers

import numpy as np
import matplotlib.pyplot as plt

# Train 8982 Test 2246
(train_data, train_labels), (test_data,
                             test_labels) = reuters.load_data(num_words=10000)
# print(len(train_data), len(test_data))
# print(train_labels)

word_index = reuters.get_word_index()

reverse_word_index = dict([(value, key)
                           for (key, value) in word_index.items()])

# decoded_newswire = ' '.join([reverse_word_index.get(i - 3, '?')
#                            for i in train_data[0]])
# print(decoded_newswire)


def vectorize_sequences(sequences, dimension=10000):
    """
    one hot encoding - Turn lists to vectors/tensors
    :param sequences:
    :param dimension:
    :return:
    """
    results = np.zeros((len(sequences), dimension))
    for i, sequence in enumerate(sequences):
        results[i, sequence] = 1.
    return results


x_train = vectorize_sequences(train_data)
x_test = vectorize_sequences(test_data)

one_hot_train_labels = to_categorical(train_labels)
one_hot_test_labels = to_categorical(test_labels)

model = models.Sequential()
model.add(layers.Dense(64, activation='relu', input_shape=(10000,)))
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(46, activation='softmax'))

model.compile(
    optimizer='rmsprop',
    loss='binary_crossentropy',
    metrics=['accuracy'])

x_val = x_train[:1000]
partial_x_train = x_train[1000:]

y_val = one_hot_train_labels[:1000]
partial_y_train = one_hot_train_labels[1000:]

history = model.fit(
    partial_x_train,
    partial_y_train,
    epochs=20,
    batch_size=512,
    validation_data=(
        x_val,
        y_val))


# Plotting the training and validation lose side by side
history_dict = history.history
loss_values = history_dict['loss']
val_loss_values = history_dict['val_loss']
acc_values = history_dict['acc']
val_acc_values = history_dict['val_acc']

epochs = range(1, len(history_dict['acc']) + 1)

plt.figure()
plt.plot(epochs, loss_values, 'bo', label='Training loss')
plt.plot(epochs, val_loss_values, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()

plt.figure()
plt.plot(epochs, acc_values, 'bo', label='Training acc')
plt.plot(epochs, val_acc_values, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()

result = model.evaluate(x_test, one_hot_test_labels)
print(model.metrics_names)
print(result)

猜你喜欢

转载自blog.csdn.net/DreamHome_S/article/details/82954011