keras繪制acc和loss曲線圖實例
更新時間:2020年06月15日 14:21:03 作者:ninesun11
這篇文章主要介紹了keras繪制acc和loss曲線圖實例,具有很好的參考價值,希望對大家有所幫助。一起跟隨小編過來看看吧
我就廢話不多說了,大家還是直接看代碼吧!
#加載keras模塊 from __future__ import print_function import numpy as np np.random.seed(1337) # for reproducibility import keras from keras.datasets import mnist from keras.models import Sequential from keras.layers.core import Dense, Dropout, Activation from keras.optimizers import SGD, Adam, RMSprop from keras.utils import np_utils import matplotlib.pyplot as plt %matplotlib inline #寫一個LossHistory類,保存loss和acc class LossHistory(keras.callbacks.Callback): def on_train_begin(self, logs={}): self.losses = {'batch':[], 'epoch':[]} self.accuracy = {'batch':[], 'epoch':[]} self.val_loss = {'batch':[], 'epoch':[]} self.val_acc = {'batch':[], 'epoch':[]} def on_batch_end(self, batch, logs={}): self.losses['batch'].append(logs.get('loss')) self.accuracy['batch'].append(logs.get('acc')) self.val_loss['batch'].append(logs.get('val_loss')) self.val_acc['batch'].append(logs.get('val_acc')) def on_epoch_end(self, batch, logs={}): self.losses['epoch'].append(logs.get('loss')) self.accuracy['epoch'].append(logs.get('acc')) self.val_loss['epoch'].append(logs.get('val_loss')) self.val_acc['epoch'].append(logs.get('val_acc')) def loss_plot(self, loss_type): iters = range(len(self.losses[loss_type])) plt.figure() # acc plt.plot(iters, self.accuracy[loss_type], 'r', label='train acc') # loss plt.plot(iters, self.losses[loss_type], 'g', label='train loss') if loss_type == 'epoch': # val_acc plt.plot(iters, self.val_acc[loss_type], 'b', label='val acc') # val_loss plt.plot(iters, self.val_loss[loss_type], 'k', label='val loss') plt.grid(True) plt.xlabel(loss_type) plt.ylabel('acc-loss') plt.legend(loc="upper right") plt.show() #變量初始化 batch_size = 128 nb_classes = 10 nb_epoch = 20 # the data, shuffled and split between train and test sets (X_train, y_train), (X_test, y_test) = mnist.load_data() X_train = X_train.reshape(60000, 784) X_test = X_test.reshape(10000, 784) X_train = X_train.astype('float32') X_test = X_test.astype('float32') X_train /= 255 X_test /= 255 print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert class vectors to binary class matrices Y_train = np_utils.to_categorical(y_train, nb_classes) Y_test = np_utils.to_categorical(y_test, nb_classes) #建立模型 使用Sequential() model = Sequential() model.add(Dense(512, input_shape=(784,))) model.add(Activation('relu')) model.add(Dropout(0.2)) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.2)) model.add(Dense(10)) model.add(Activation('softmax')) #打印模型 model.summary() #訓練與評估 #編譯模型 model.compile(loss='categorical_crossentropy', optimizer=RMSprop(), metrics=['accuracy']) #創(chuàng)建一個實例history history = LossHistory() #迭代訓練(注意這個地方要加入callbacks) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=1, validation_data=(X_test, Y_test), callbacks=[history]) #模型評估 score = model.evaluate(X_test, Y_test, verbose=0) print('Test score:', score[0]) print('Test accuracy:', score[1]) #繪制acc-loss曲線 history.loss_plot('epoch')
補充知識:keras中自定義驗證集的性能評估(ROC,AUC)
在keras中自帶的性能評估有準確性以及l(fā)oss,當需要以auc作為評價驗證集的好壞時,就得自己寫個評價函數(shù)了:
from sklearn.metrics import roc_auc_score from keras import backend as K # AUC for a binary classifier def auc(y_true, y_pred): ptas = tf.stack([binary_PTA(y_true,y_pred,k) for k in np.linspace(0, 1, 1000)],axis=0) pfas = tf.stack([binary_PFA(y_true,y_pred,k) for k in np.linspace(0, 1, 1000)],axis=0) pfas = tf.concat([tf.ones((1,)) ,pfas],axis=0) binSizes = -(pfas[1:]-pfas[:-1]) s = ptas*binSizes return K.sum(s, axis=0) #------------------------------------------------------------------------------------ # PFA, prob false alert for binary classifier def binary_PFA(y_true, y_pred, threshold=K.variable(value=0.5)): y_pred = K.cast(y_pred >= threshold, 'float32') # N = total number of negative labels N = K.sum(1 - y_true) # FP = total number of false alerts, alerts from the negative class labels FP = K.sum(y_pred - y_pred * y_true) return FP/N #----------------------------------------------------------------------------------- # P_TA prob true alerts for binary classifier def binary_PTA(y_true, y_pred, threshold=K.variable(value=0.5)): y_pred = K.cast(y_pred >= threshold, 'float32') # P = total number of positive labels P = K.sum(y_true) # TP = total number of correct alerts, alerts from the positive class labels TP = K.sum(y_pred * y_true) return TP/P #接著在模型的compile中設置metrics #如下例子,我用的是RNN做分類
from keras.models import Sequential from keras.layers import Dense, Dropout import keras from keras.layers import GRU model = Sequential() model.add(keras.layers.core.Masking(mask_value=0., input_shape=(max_lenth, max_features))) #masking用于變長序列輸入 model.add(GRU(units=n_hidden_units,activation='selu',kernel_initializer='orthogonal', recurrent_initializer='orthogonal', bias_initializer='zeros', kernel_regularizer=regularizers.l2(0.01), recurrent_regularizer=regularizers.l2(0.01), bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, recurrent_constraint=None, bias_constraint=None, dropout=0.5, recurrent_dropout=0.0, implementation=1, return_sequences=False, return_state=False, go_backwards=False, stateful=False, unroll=False)) model.add(Dropout(0.5)) model.add(Dense(1, activation='sigmoid')) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[auc]) #寫入自定義評價函數(shù)
接下來就自己作預測了...
方法二:
from sklearn.metrics import roc_auc_score import keras class RocAucMetricCallback(keras.callbacks.Callback): def __init__(self, predict_batch_size=1024, include_on_batch=False): super(RocAucMetricCallback, self).__init__() self.predict_batch_size=predict_batch_size self.include_on_batch=include_on_batch def on_batch_begin(self, batch, logs={}): pass def on_batch_end(self, batch, logs={}): if(self.include_on_batch): logs['roc_auc_val']=float('-inf') if(self.validation_data): logs['roc_auc_val']=roc_auc_score(self.validation_data[1], self.model.predict(self.validation_data[0], batch_size=self.predict_batch_size)) def on_train_begin(self, logs={}): if not ('roc_auc_val' in self.params['metrics']): self.params['metrics'].append('roc_auc_val') def on_train_end(self, logs={}): pass def on_epoch_begin(self, epoch, logs={}): pass def on_epoch_end(self, epoch, logs={}): logs['roc_auc_val']=float('-inf') if(self.validation_data): logs['roc_auc_val']=roc_auc_score(self.validation_data[1], self.model.predict(self.validation_data[0], batch_size=self.predict_batch_size)) import numpy as np import tensorflow as tf from keras.models import Sequential from keras.layers import Dense, Dropout from keras.layers import GRU import keras from keras.callbacks import EarlyStopping from sklearn.metrics import roc_auc_score from keras import metrics cb = [ my_callbacks.RocAucMetricCallback(), # include it before EarlyStopping! EarlyStopping(monitor='roc_auc_val',patience=300, verbose=2,mode='max') ] model = Sequential() model.add(keras.layers.core.Masking(mask_value=0., input_shape=(max_lenth, max_features))) # model.add(Embedding(input_dim=max_features+1, output_dim=64,mask_zero=True)) model.add(GRU(units=n_hidden_units,activation='selu',kernel_initializer='orthogonal', recurrent_initializer='orthogonal', bias_initializer='zeros', kernel_regularizer=regularizers.l2(0.01), recurrent_regularizer=regularizers.l2(0.01), bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, recurrent_constraint=None, bias_constraint=None, dropout=0.5, recurrent_dropout=0.0, implementation=1, return_sequences=False, return_state=False, go_backwards=False, stateful=False, unroll=False)) #input_shape=(max_lenth, max_features), model.add(Dropout(0.5)) model.add(Dense(1, activation='sigmoid')) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[auc]) #這里就可以寫其他評估標準 model.fit(x_train, y_train, batch_size=train_batch_size, epochs=training_iters, verbose=2, callbacks=cb,validation_split=0.2, shuffle=True, class_weight=None, sample_weight=None, initial_epoch=0)
親測有效!
以上這篇keras繪制acc和loss曲線圖實例就是小編分享給大家的全部內容了,希望能給大家一個參考,也希望大家多多支持腳本之家。
相關文章
Django的HttpRequest和HttpResponse對象詳解
這篇文章主要介紹了Django的HttpRequest和HttpResponse對象,分享了相關代碼示例,小編覺得還是挺不錯的,具有一定借鑒價值,需要的朋友可以參考下2018-01-01Python實現(xiàn)希爾排序,歸并排序和桶排序的示例代碼
希爾、歸并、快速排序算法可歸為同一類,它們的共同點都是建立在分治思想之上。把大問題分拆成小問題,解決所有小問題后,再合并每一個小問題的結果,最終得到對原始問題的解答。本文將介紹這三種算法的實現(xiàn)代碼,需要的可以參考一下2022-04-04Python使用PyMongo4.x操作MongoDB的教程分享
PyMongo是一個Python編程語言中用于連接和操作MongoDB數(shù)據(jù)庫的庫,它提供了豐富的功能和API,使開發(fā)者能夠在Python中輕松地進行MongoDB的數(shù)據(jù)交互和管理,本文給大家總結了Python如何使用PyMongo4.x操作MongoDB,需要的朋友可以參考下2023-09-09Pytorch如何打印與Keras的model.summary()類似的輸出(最新推薦)
這篇文章主要介紹了Pytorch如何打印與Keras的model.summary()類似的輸出,本文給大家介紹的非常詳細,對大家的學習或工作具有一定的參考借鑒價值,需要的朋友可以參考下2023-07-07Python報錯NameError: name ‘secrets‘ is not
在使用Python進行安全編程時,我們經常需要使用secrets模塊來生成安全的隨機數(shù),但是卻遇到這個問題,本文主要介紹了Python報錯NameError: name ‘secrets‘ is not defined解決,感興趣的可以了解一下2024-06-06