144 lines
4.4 KiB
Python
144 lines
4.4 KiB
Python
# -*- encoding:utf-8 -*-
|
|
|
|
'''
|
|
@Author : dingjiawen
|
|
@Date : 2023/11/10 15:57
|
|
@Usage :
|
|
@Desc :
|
|
'''
|
|
|
|
import os
|
|
import shutil
|
|
|
|
import numpy as np
|
|
import pandas as pd
|
|
|
|
|
|
def folderGenerate(folder_name):
|
|
if not os.path.exists(folder_name):
|
|
os.makedirs(folder_name)
|
|
# os.mkdir(folder_name)
|
|
|
|
|
|
# 递归删除文件夹
|
|
def folderDelete(folder_name):
|
|
if os.path.exists(folder_name):
|
|
shutil.rmtree(folder_name)
|
|
|
|
|
|
# 判断这次是否进行模型保存,history_loss存储历史上的loss
|
|
def SaveBestModel(model, save_name, history_loss, loss_value, pattern: str = "min", epoch=0, is_all=False):
|
|
weight_folder = save_name[:-4]
|
|
if is_all:
|
|
weight_folder = weight_folder + '_epoch' + str(epoch) + "_" + str(loss_value)
|
|
save_name = weight_folder + save_name[-7:]
|
|
|
|
# 如果history_loss为空,那么直接保存
|
|
if len(history_loss) == 0:
|
|
folderGenerate(weight_folder)
|
|
model.save_weights(save_name)
|
|
return
|
|
|
|
if pattern == "min":
|
|
# 先判断要不要存模型,如果上一次的比这一次的loss要大,就保存这一次的
|
|
if np.min(history_loss) > loss_value:
|
|
# 删除上一次的保存这一次的
|
|
folderDelete(weight_folder)
|
|
folderGenerate(weight_folder)
|
|
model.save_weights(save_name)
|
|
print("保存这次模型")
|
|
return
|
|
elif pattern == "max":
|
|
# 先判断要不要存模型,如果上一次的比这一次的loss要大,就保存这一次的
|
|
if np.max(history_loss) < loss_value:
|
|
# 删除上一次的保存这一次的
|
|
folderDelete(weight_folder)
|
|
folderGenerate(weight_folder)
|
|
model.save_weights(save_name)
|
|
print("保存这次模型")
|
|
return
|
|
else:
|
|
raise ValueError("算法尚未实现")
|
|
|
|
pass
|
|
|
|
|
|
# 判断这次是否进行模型保存,history_loss存储历史上的loss
|
|
def SaveBestModelByAccuracy(model, save_name, history_accuracy, accuracy_value):
|
|
weight_folder = save_name[:-7]
|
|
|
|
# 如果history_loss为空,那么直接保存
|
|
if len(history_accuracy) == 0:
|
|
folderGenerate(weight_folder)
|
|
model.save_weights(save_name)
|
|
return
|
|
|
|
# 先判断要不要存模型,如果上一次的比这一次的loss要大,就保存这一次的
|
|
if np.max(history_accuracy) < accuracy_value:
|
|
# 删除上一次的保存这一次的
|
|
folderDelete(weight_folder)
|
|
folderGenerate(weight_folder)
|
|
model.save_weights(save_name)
|
|
print("保存这次模型")
|
|
return
|
|
|
|
pass
|
|
|
|
|
|
# 判断这次是否进行模型保存,history_loss存储历史上的loss
|
|
def SaveBestH5Model(model, save_name, history_loss, loss_value):
|
|
dirpath = os.path.dirname(save_name)
|
|
folderGenerate(dirpath)
|
|
# 如果history_loss为空,那么直接保存
|
|
if len(history_loss) == 0:
|
|
model.save(save_name)
|
|
return
|
|
|
|
# 先判断要不要存模型,如果上一次的比这一次的loss要大,就保存这一次的
|
|
if np.min(history_loss) > loss_value:
|
|
# 删除上一次的保存这一次的
|
|
model.save(save_name, overwrite=True)
|
|
print("保存这次模型")
|
|
return
|
|
|
|
pass
|
|
|
|
|
|
def IsStopTraining(history_loss, patience=5, pattern: str = "min"):
|
|
if len(history_loss) <= patience:
|
|
return False
|
|
if pattern == "min":
|
|
if history_loss[-(patience + 1)] < min(history_loss[-patience:]):
|
|
print(patience, "次loss未下降,训练停止")
|
|
return True
|
|
elif pattern == "max":
|
|
if history_loss[-(patience + 1)] > max(history_loss[-patience:]):
|
|
print(patience, "次准确率为上升,训练停止")
|
|
return True
|
|
else:
|
|
raise ValueError("算法尚未实现")
|
|
|
|
return False
|
|
|
|
|
|
def Is_Reduce_learning_rate(history_loss, patience=3, pattern: str = "min"):
|
|
if len(history_loss) <= patience:
|
|
return False
|
|
if pattern == "min":
|
|
for i in range(patience):
|
|
if history_loss[-(patience + 1)] > history_loss[-i]:
|
|
return False
|
|
elif pattern == "max":
|
|
for i in range(patience):
|
|
if history_loss[-(patience + 1)] < history_loss[-i]:
|
|
return False
|
|
else:
|
|
raise ValueError("算法尚未实现")
|
|
print(patience, "次loss未下降,降低学习率")
|
|
return True
|
|
|
|
|
|
if __name__ == '__main__':
|
|
history_loss = [0.1, 0.2, 0.3, 0.25, 0.42, 0.12, 0.31]
|
|
IsStopTraining(history_loss)
|