import tensorflow as tf
from tensorflow.keras.datasets import mnist
import cv2
import argparse
import numpy as np
#from resnet import ResNet
#from callbacks.epochcheckpoint import EpochCheckpoint
#from callbacks.trainingmonitor import TrainingMonitor
import os
import sklearn
import keras.backend as K
from sklearn.preprocessing import LabelBinarizer
from pysim.my_resnet import my_ResNet
from pysim import config
from pysim.clr_callback import CyclicLR
import matplotlib.pyplot as plt

1. Probleme mit herkömmlichen Lernraten

  • Wir wissen nicht was das optimale anfängliche Rate des Modelles ist
  • Eine monotone abnehmende Lernrate kann dazu führen dass unser Modell in Plateaus der Verlustsfunktion-Landschaft "stecken bleibt"

Stattdessen können wir zyklische Lernraten verwenden, um unsere Lernrate zwischen oberen und unteren Grenzen zu oszillieren, was uns Folgendes ermöglicht:

  • Mehr Freiheit bei der Wahl der anfänglichen Lernrate
  • Aus Sattelpunkten und lokalen Minima auszubrechen

2. Die zyklische Lernrate

Die zyklische Lernrate verfolgen einen anderen Ansatz. Sie ist durch drei Paramter gekennzeichnet:

  • Eine minimale Lernrate definieren
  • Eine maximale Lernrate definieren
  • Lernrate soll zyklisch zwischen den beiden Grenzen oszillieren

In der Praxis führt die Anwendung zyklischer Lernraten zu schnellerer Konvergenz, und das wird erzielt mit weniger Experimenten/Hyperparameter-Updates. Wenn man eine zyklische Lernrate im Kombination mit dem initial learning rate Ansatz verwenden führt dies zu einer Situation, in der fürs Modell nur sehr wening Feinsteurung den Parametern notwendig sei.

3. Die "dreieckige" Politik

4. Die "dreieckige2" Politik

5. Die "exp_range" Politik

Um die Implementierung verwenden zu können müssen wir erst einige Parameter defineren:

  • Batch size: Anzahl der Trainings-Beispiele, die in einem einzigen Vorwärts- und Rückwärtsdurchlauf des Modelles während des Trainings verwendet werden sollen
  • Batch/Iteration: Number of weight updates per epoch (i.e., # of total training examples divided by the batch size).
  • Cycle: Number of iterations it takes for our learning rate to go from the lower bound, ascend to the upper bound, and then descend back to the lower bound again.
  • Step size: Number of iterations in a half cycle. Leslie Smith, the creator of CLRs, recommends that the step_size should be (2-8) * training_iterations_in_epoch). In practice, a step sizes of either 4 or 8 work well in most situations.

    NUM_CLR_CYCLES = NUM_EPOCHS / STEP_SIZE / 2

(trainX, trainy), (testX, testy) = mnist.load_data()
print(trainX.shape, trainy.shape, testX.shape, testy.shape)
trainX, trainy, testX, testy = trainX[:100, :, :], trainy[:100], testX[:100, :, :], testy[:100,]
# resize images to dimension required by model
trainX = np.array([cv2.resize(image, (32, 32)) for image in trainX])
testX  = np.array([cv2.resize(image, (32, 32)) for image in testX])
# scale images between 0-1 floats
trainX = trainX.astype("float32")/255.
testX  = testX.astype("float32")/255.
# reshape (batch, size1, size2, 1)
trainX = np.reshape(trainX, (len(trainX), 32, 32, 1))
testX  = np.reshape(trainX,  (len(trainX), 32, 32, 1))
print(trainX.shape, trainy.shape, testX.shape, testy.shape)
# binarizer of y-label
lb = LabelBinarizer()
trainy = lb.fit_transform(trainy)
testy = lb.transform(testy)
(60000, 28, 28) (60000,) (10000, 28, 28) (10000,)
(100, 32, 32, 1) (100,) (100, 32, 32, 1) (100,)
model = my_ResNet(10)
optimizer = tf.keras.optimizers.SGD(lr = 0.001)
loss = tf.keras.losses.BinaryCrossentropy()
model.compile(optimizer = optimizer, loss = loss, metrics = ["accuracy"])
print("[INFO] using '{}' method".format(config.CLR_METHOD))
clr = CyclicLR( mode=config.CLR_METHOD, base_lr=config.MIN_LR,\
               max_lr=config.MAX_LR,step_size= config.STEP_SIZE * (trainX.shape[0] // config.BATCH_SIZE))
[INFO] using 'triangular' method
H = model.fit(trainX, trainy, batch_size = config.BATCH_SIZE,
          validation_data = (testX, testy),\
          steps_per_epoch = trainX.shape[0] / config.BATCH_SIZE,
          epochs = config.NUM_EPOCHS, callbacks = [clr], 
          verbose = 1)
Epoch 1/96
2/1 [======================================] - 0s 176ms/step - loss: 0.3111 - accuracy: 0.4800 - val_loss: 0.3462 - val_accuracy: 0.0200
Epoch 2/96
2/1 [======================================] - 1s 277ms/step - loss: 0.3111 - accuracy: 0.4500 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 3/96
2/1 [======================================] - 0s 182ms/step - loss: 0.3114 - accuracy: 0.4900 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 4/96
2/1 [======================================] - 0s 165ms/step - loss: 0.3103 - accuracy: 0.5000 - val_loss: 0.3462 - val_accuracy: 0.0200
Epoch 5/96
2/1 [======================================] - 0s 218ms/step - loss: 0.3093 - accuracy: 0.4500 - val_loss: 0.3462 - val_accuracy: 0.0200
Epoch 6/96
2/1 [======================================] - 0s 248ms/step - loss: 0.3088 - accuracy: 0.4600 - val_loss: 0.3462 - val_accuracy: 0.0200
Epoch 7/96
2/1 [======================================] - 0s 210ms/step - loss: 0.3083 - accuracy: 0.4500 - val_loss: 0.3462 - val_accuracy: 0.0200
Epoch 8/96
2/1 [======================================] - 0s 187ms/step - loss: 0.3078 - accuracy: 0.4700 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 9/96
2/1 [======================================] - 0s 185ms/step - loss: 0.3072 - accuracy: 0.4800 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 10/96
2/1 [======================================] - 0s 218ms/step - loss: 0.3079 - accuracy: 0.4700 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 11/96
2/1 [======================================] - 0s 179ms/step - loss: 0.3073 - accuracy: 0.4700 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 12/96
2/1 [======================================] - 0s 187ms/step - loss: 0.3071 - accuracy: 0.4900 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 13/96
2/1 [======================================] - 0s 188ms/step - loss: 0.3055 - accuracy: 0.4800 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 14/96
2/1 [======================================] - 0s 194ms/step - loss: 0.3049 - accuracy: 0.4700 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 15/96
2/1 [======================================] - 0s 211ms/step - loss: 0.3049 - accuracy: 0.4900 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 16/96
2/1 [======================================] - 0s 200ms/step - loss: 0.3043 - accuracy: 0.4900 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 17/96
2/1 [======================================] - 0s 188ms/step - loss: 0.3038 - accuracy: 0.4900 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 18/96
2/1 [======================================] - 0s 211ms/step - loss: 0.3037 - accuracy: 0.4900 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 19/96
2/1 [======================================] - 0s 244ms/step - loss: 0.3033 - accuracy: 0.4800 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 20/96
2/1 [======================================] - 0s 201ms/step - loss: 0.3034 - accuracy: 0.4500 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 21/96
2/1 [======================================] - 0s 195ms/step - loss: 0.3018 - accuracy: 0.4900 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 22/96
2/1 [======================================] - 0s 209ms/step - loss: 0.3008 - accuracy: 0.4800 - val_loss: 0.3463 - val_accuracy: 0.0200
Epoch 23/96
2/1 [======================================] - 0s 183ms/step - loss: 0.3006 - accuracy: 0.4900 - val_loss: 0.3462 - val_accuracy: 0.0300
Epoch 24/96
2/1 [======================================] - 0s 196ms/step - loss: 0.2998 - accuracy: 0.4900 - val_loss: 0.3463 - val_accuracy: 0.0300
Epoch 25/96
2/1 [======================================] - 0s 190ms/step - loss: 0.3002 - accuracy: 0.4800 - val_loss: 0.3463 - val_accuracy: 0.0300
Epoch 26/96
2/1 [======================================] - 0s 179ms/step - loss: 0.2998 - accuracy: 0.5100 - val_loss: 0.3464 - val_accuracy: 0.0200
Epoch 27/96
2/1 [======================================] - 0s 185ms/step - loss: 0.3030 - accuracy: 0.4700 - val_loss: 0.3464 - val_accuracy: 0.0300
Epoch 28/96
2/1 [======================================] - 0s 189ms/step - loss: 0.2998 - accuracy: 0.4800 - val_loss: 0.3463 - val_accuracy: 0.0300
Epoch 29/96
2/1 [======================================] - 0s 193ms/step - loss: 0.2994 - accuracy: 0.4800 - val_loss: 0.3462 - val_accuracy: 0.0300
Epoch 30/96
2/1 [======================================] - 0s 207ms/step - loss: 0.2980 - accuracy: 0.4800 - val_loss: 0.3461 - val_accuracy: 0.0300
Epoch 31/96
2/1 [======================================] - 0s 195ms/step - loss: 0.2971 - accuracy: 0.5000 - val_loss: 0.3461 - val_accuracy: 0.0300
Epoch 32/96
2/1 [======================================] - 0s 196ms/step - loss: 0.2965 - accuracy: 0.5100 - val_loss: 0.3462 - val_accuracy: 0.0300
Epoch 33/96
2/1 [======================================] - 0s 208ms/step - loss: 0.2961 - accuracy: 0.5000 - val_loss: 0.3462 - val_accuracy: 0.0300
Epoch 34/96
2/1 [======================================] - 1s 252ms/step - loss: 0.2966 - accuracy: 0.5000 - val_loss: 0.3462 - val_accuracy: 0.0300
Epoch 35/96
2/1 [======================================] - 0s 180ms/step - loss: 0.2959 - accuracy: 0.5100 - val_loss: 0.3462 - val_accuracy: 0.0300
Epoch 36/96
2/1 [======================================] - 0s 232ms/step - loss: 0.2958 - accuracy: 0.5100 - val_loss: 0.3461 - val_accuracy: 0.0300
Epoch 37/96
2/1 [======================================] - 0s 179ms/step - loss: 0.2953 - accuracy: 0.5000 - val_loss: 0.3460 - val_accuracy: 0.0500
Epoch 38/96
2/1 [======================================] - 0s 193ms/step - loss: 0.2950 - accuracy: 0.4800 - val_loss: 0.3460 - val_accuracy: 0.0500
Epoch 39/96
2/1 [======================================] - 0s 181ms/step - loss: 0.2946 - accuracy: 0.4900 - val_loss: 0.3460 - val_accuracy: 0.0500
Epoch 40/96
2/1 [======================================] - 0s 205ms/step - loss: 0.2936 - accuracy: 0.5100 - val_loss: 0.3460 - val_accuracy: 0.0500
Epoch 41/96
2/1 [======================================] - 0s 194ms/step - loss: 0.2935 - accuracy: 0.5000 - val_loss: 0.3461 - val_accuracy: 0.0500
Epoch 42/96
2/1 [======================================] - 0s 231ms/step - loss: 0.2944 - accuracy: 0.5100 - val_loss: 0.3461 - val_accuracy: 0.0500
Epoch 43/96
2/1 [======================================] - 0s 219ms/step - loss: 0.2928 - accuracy: 0.5100 - val_loss: 0.3461 - val_accuracy: 0.0500
Epoch 44/96
2/1 [======================================] - 0s 167ms/step - loss: 0.2930 - accuracy: 0.5100 - val_loss: 0.3459 - val_accuracy: 0.0500
Epoch 45/96
2/1 [======================================] - 0s 171ms/step - loss: 0.2921 - accuracy: 0.5100 - val_loss: 0.3459 - val_accuracy: 0.0500
Epoch 46/96
2/1 [======================================] - 0s 169ms/step - loss: 0.2922 - accuracy: 0.5200 - val_loss: 0.3458 - val_accuracy: 0.0500
Epoch 47/96
2/1 [======================================] - 0s 174ms/step - loss: 0.2909 - accuracy: 0.5200 - val_loss: 0.3458 - val_accuracy: 0.0500
Epoch 48/96
2/1 [======================================] - 1s 272ms/step - loss: 0.2902 - accuracy: 0.5200 - val_loss: 0.3458 - val_accuracy: 0.0500
Epoch 49/96
2/1 [======================================] - 1s 264ms/step - loss: 0.2905 - accuracy: 0.5200 - val_loss: 0.3458 - val_accuracy: 0.0500
Epoch 50/96
2/1 [======================================] - 1s 263ms/step - loss: 0.2905 - accuracy: 0.5000 - val_loss: 0.3459 - val_accuracy: 0.0500
Epoch 51/96
2/1 [======================================] - 0s 202ms/step - loss: 0.2905 - accuracy: 0.5000 - val_loss: 0.3459 - val_accuracy: 0.0500
Epoch 52/96
2/1 [======================================] - 0s 158ms/step - loss: 0.2903 - accuracy: 0.5100 - val_loss: 0.3458 - val_accuracy: 0.0600
Epoch 53/96
2/1 [======================================] - 0s 162ms/step - loss: 0.2897 - accuracy: 0.5100 - val_loss: 0.3457 - val_accuracy: 0.0700
Epoch 54/96
2/1 [======================================] - 0s 153ms/step - loss: 0.2883 - accuracy: 0.5000 - val_loss: 0.3456 - val_accuracy: 0.0700
Epoch 55/96
2/1 [======================================] - 0s 154ms/step - loss: 0.2877 - accuracy: 0.5200 - val_loss: 0.3456 - val_accuracy: 0.0700
Epoch 56/96
2/1 [======================================] - 0s 163ms/step - loss: 0.2880 - accuracy: 0.5000 - val_loss: 0.3456 - val_accuracy: 0.0800
Epoch 57/96
2/1 [======================================] - 0s 153ms/step - loss: 0.2876 - accuracy: 0.5300 - val_loss: 0.3457 - val_accuracy: 0.0800
Epoch 58/96
2/1 [======================================] - 0s 151ms/step - loss: 0.2870 - accuracy: 0.5300 - val_loss: 0.3457 - val_accuracy: 0.0800
Epoch 59/96
2/1 [======================================] - 0s 153ms/step - loss: 0.2871 - accuracy: 0.5000 - val_loss: 0.3456 - val_accuracy: 0.0800
Epoch 60/96
2/1 [======================================] - 0s 247ms/step - loss: 0.2857 - accuracy: 0.5300 - val_loss: 0.3455 - val_accuracy: 0.0800
Epoch 61/96
2/1 [======================================] - 1s 272ms/step - loss: 0.2876 - accuracy: 0.5100 - val_loss: 0.3454 - val_accuracy: 0.0800
Epoch 62/96
2/1 [======================================] - 1s 250ms/step - loss: 0.2861 - accuracy: 0.5300 - val_loss: 0.3453 - val_accuracy: 0.0800
Epoch 63/96
2/1 [======================================] - 0s 166ms/step - loss: 0.2844 - accuracy: 0.5200 - val_loss: 0.3453 - val_accuracy: 0.0700
Epoch 64/96
2/1 [======================================] - 0s 163ms/step - loss: 0.2841 - accuracy: 0.5200 - val_loss: 0.3453 - val_accuracy: 0.0700
Epoch 65/96
2/1 [======================================] - 0s 163ms/step - loss: 0.2848 - accuracy: 0.5300 - val_loss: 0.3453 - val_accuracy: 0.0700
Epoch 66/96
2/1 [======================================] - 0s 159ms/step - loss: 0.2833 - accuracy: 0.5400 - val_loss: 0.3454 - val_accuracy: 0.0700
Epoch 67/96
2/1 [======================================] - 0s 205ms/step - loss: 0.2843 - accuracy: 0.5100 - val_loss: 0.3453 - val_accuracy: 0.0800
Epoch 68/96
2/1 [======================================] - 0s 204ms/step - loss: 0.2843 - accuracy: 0.5300 - val_loss: 0.3452 - val_accuracy: 0.0700
Epoch 69/96
2/1 [======================================] - 0s 170ms/step - loss: 0.2833 - accuracy: 0.5300 - val_loss: 0.3451 - val_accuracy: 0.0800
Epoch 70/96
2/1 [======================================] - 0s 167ms/step - loss: 0.2823 - accuracy: 0.5400 - val_loss: 0.3451 - val_accuracy: 0.0800
Epoch 71/96
2/1 [======================================] - 0s 165ms/step - loss: 0.2824 - accuracy: 0.4900 - val_loss: 0.3451 - val_accuracy: 0.0800
Epoch 72/96
2/1 [======================================] - 0s 157ms/step - loss: 0.2829 - accuracy: 0.5000 - val_loss: 0.3451 - val_accuracy: 0.0900
Epoch 73/96
2/1 [======================================] - 0s 164ms/step - loss: 0.2813 - accuracy: 0.5300 - val_loss: 0.3451 - val_accuracy: 0.0900
Epoch 74/96
2/1 [======================================] - 0s 158ms/step - loss: 0.2816 - accuracy: 0.5100 - val_loss: 0.3451 - val_accuracy: 0.1000
Epoch 75/96
2/1 [======================================] - 0s 203ms/step - loss: 0.2809 - accuracy: 0.5500 - val_loss: 0.3451 - val_accuracy: 0.1100
Epoch 76/96
2/1 [======================================] - 0s 168ms/step - loss: 0.2803 - accuracy: 0.5400 - val_loss: 0.3450 - val_accuracy: 0.1200
Epoch 77/96
2/1 [======================================] - 0s 149ms/step - loss: 0.2797 - accuracy: 0.5200 - val_loss: 0.3450 - val_accuracy: 0.1200
Epoch 78/96
2/1 [======================================] - 0s 152ms/step - loss: 0.2805 - accuracy: 0.5200 - val_loss: 0.3449 - val_accuracy: 0.1200
Epoch 79/96
2/1 [======================================] - 0s 163ms/step - loss: 0.2813 - accuracy: 0.5300 - val_loss: 0.3448 - val_accuracy: 0.1200
Epoch 80/96
2/1 [======================================] - 0s 190ms/step - loss: 0.2787 - accuracy: 0.5200 - val_loss: 0.3448 - val_accuracy: 0.1200
Epoch 81/96
2/1 [======================================] - 0s 171ms/step - loss: 0.2790 - accuracy: 0.5500 - val_loss: 0.3449 - val_accuracy: 0.1200
Epoch 82/96
2/1 [======================================] - 0s 152ms/step - loss: 0.2798 - accuracy: 0.5200 - val_loss: 0.3449 - val_accuracy: 0.1200
Epoch 83/96
2/1 [======================================] - 0s 150ms/step - loss: 0.2779 - accuracy: 0.5200 - val_loss: 0.3448 - val_accuracy: 0.1200
Epoch 84/96
2/1 [======================================] - 0s 163ms/step - loss: 0.2772 - accuracy: 0.5400 - val_loss: 0.3449 - val_accuracy: 0.1200
Epoch 85/96
2/1 [======================================] - 0s 159ms/step - loss: 0.2780 - accuracy: 0.5400 - val_loss: 0.3448 - val_accuracy: 0.1100
Epoch 86/96
2/1 [======================================] - 0s 150ms/step - loss: 0.2773 - accuracy: 0.5200 - val_loss: 0.3447 - val_accuracy: 0.1100
Epoch 87/96
2/1 [======================================] - 0s 152ms/step - loss: 0.2760 - accuracy: 0.5300 - val_loss: 0.3447 - val_accuracy: 0.1100
Epoch 88/96
2/1 [======================================] - 0s 160ms/step - loss: 0.2751 - accuracy: 0.5300 - val_loss: 0.3447 - val_accuracy: 0.1100
Epoch 89/96
2/1 [======================================] - 0s 191ms/step - loss: 0.2755 - accuracy: 0.5700 - val_loss: 0.3447 - val_accuracy: 0.1100
Epoch 90/96
2/1 [======================================] - 0s 162ms/step - loss: 0.2769 - accuracy: 0.5400 - val_loss: 0.3447 - val_accuracy: 0.1100
Epoch 91/96
2/1 [======================================] - 0s 163ms/step - loss: 0.2758 - accuracy: 0.5200 - val_loss: 0.3447 - val_accuracy: 0.1100
Epoch 92/96
2/1 [======================================] - 0s 153ms/step - loss: 0.2751 - accuracy: 0.5300 - val_loss: 0.3446 - val_accuracy: 0.1100
Epoch 93/96
2/1 [======================================] - 0s 179ms/step - loss: 0.2741 - accuracy: 0.5400 - val_loss: 0.3446 - val_accuracy: 0.1100
Epoch 94/96
2/1 [======================================] - 0s 155ms/step - loss: 0.2744 - accuracy: 0.5000 - val_loss: 0.3445 - val_accuracy: 0.1100
Epoch 95/96
2/1 [======================================] - 0s 205ms/step - loss: 0.2739 - accuracy: 0.5300 - val_loss: 0.3445 - val_accuracy: 0.1100
Epoch 96/96
2/1 [======================================] - 0s 149ms/step - loss: 0.2726 - accuracy: 0.5400 - val_loss: 0.3445 - val_accuracy: 0.1100
print("[INFO] evaluating network...")
predictions = model.predict(x=testX, batch_size=config.BATCH_SIZE)
#print(classification_report(testY.argmax(axis=1), predictions.argmax(axis=1), target_names=config.CLASSES))
[INFO] evaluating network...
N = np.arange(0, config.NUM_EPOCHS)
plt.style.use("ggplot")
plt.figure()
plt.plot(N, H.history["loss"], label="train_loss")
plt.plot(N, H.history["val_loss"], label="val_loss")
plt.plot(N, H.history["accuracy"], label="train_acc")
plt.plot(N, H.history["val_accuracy"], label="val_acc")
plt.title("Training Loss and Accuracy")
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.legend(loc="lower left")
plt.savefig(config.TRAINING_PLOT_PATH)
N = np.arange(0, len(clr.history["lr"]))
plt.figure()
plt.plot(N, clr.history["lr"])
plt.title("Cyclical Learning Rate (CLR)")
plt.xlabel("Training Iterations")
plt.ylabel("Learning Rate")
plt.savefig(config.CLR_PLOT_PATH)

References

Adrian Rosebrock, OpenCV Face Recognition, PyImageSearch, https://www.pyimagesearch.com/, accessed on 3 January, 2021> www:https://www.pyimagesearch.com/2019/10/07/is-rectified-adam-actually-better-than-adam/