2 Trains a simple deep NN on the downscaled MNIST dataset.     4 https://raw.githubusercontent.com/keras-team/keras/master/examples/mnist_mlp.py     8 from __future__ 
import print_function
    11 from keras.datasets 
import mnist
    12 from keras.models 
import Sequential
    13 from keras.layers 
import Dense, Dropout, MaxPooling2D, Flatten, AveragePooling2D
    14 from keras.optimizers 
import RMSprop, SGD, Adam
    15 from keras 
import regularizers
    23 (x_train, y_train), (x_test, y_test) = mnist.load_data()
    25 x_train = x_train.reshape(60000,28,28,1)
    26 x_test = x_test.reshape(10000, 28,28,1)
    27 x_train = x_train.astype(
'float32')
    28 x_test = x_test.astype(
'float32')
    31 print(x_train.shape, 
'train samples')
    32 print(x_test.shape, 
'test samples')
    35 y_train = keras.utils.to_categorical(y_train, num_classes)
    36 y_test = keras.utils.to_categorical(y_test, num_classes)
    62 model.add(AveragePooling2D(pool_size=(3,3), input_shape=(28,28,1), data_format = 
"channels_last"))
    64 model.add(
Dense(100, activation=
'relu', 
    66                 kernel_constraint=keras.constraints.NonNeg(),kernel_regularizer=regularizers.l2(0.001)
    68 model.add(
Dense(num_classes, activation=
'relu', 
    70                 kernel_constraint=keras.constraints.NonNeg(),kernel_regularizer=regularizers.l2(0.001)
    74 model.compile(loss=
'categorical_hinge',
    75               optimizer=Adam(lr=0.001),
    78 history = model.fit(x_train_new, y_train,
    79                     batch_size=batch_size,
    82                     validation_data=(x_test_new, y_test))
    83 score = model.evaluate(x_test_new, y_test, verbose=0)
    84 print(
'Test loss:', score[0])
    85 print(
'Test accuracy:', score[1])
    88 model.save_weights(
'dnn_spikey.h5')
    89 json_string = model.to_json()
    90 with open(
'dnn_spikey.json', 
'w') 
as file:
    91     file.write(json_string)