171 lines
7.6 KiB
Python
171 lines
7.6 KiB
Python
import tensorflow as tf
|
|
import tensorflow_datasets as tfds
|
|
import sys
|
|
import loadData
|
|
import labeled_and_piece
|
|
import numpy as np
|
|
|
|
# 导入数据,并进行数据处理
|
|
# 导入第一类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱齿轮表面磨损故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱齿轮表面磨损故障恒速\DATA'))
|
|
dataWithLabel1 = labeled_and_piece.ConcatLabel(data, 1, False)
|
|
dataWithLabel1 = np.array(dataWithLabel1(data, 1, False))
|
|
|
|
# 导入第二类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱齿轮齿根裂纹故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱齿轮齿根裂纹故障恒速\DATA'))
|
|
dataWithLabel2 = labeled_and_piece.ConcatLabel(data, 2, False)
|
|
dataWithLabel2 = np.array(dataWithLabel2(data, 2, False))
|
|
|
|
# 导入第三类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱齿轮断齿故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱齿轮断齿故障恒速\DATA'))
|
|
dataWithLabel3 = labeled_and_piece.ConcatLabel(data, 3, False)
|
|
dataWithLabel3 = np.array(dataWithLabel3(data, 3, False))
|
|
|
|
# 导入第四类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱齿轮偏心故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱齿轮偏心故障恒速\DATA'))
|
|
dataWithLabel4 = labeled_and_piece.ConcatLabel(data, 4, False)
|
|
dataWithLabel4 = np.array(dataWithLabel4(data, 4, False))
|
|
|
|
# 导入第五类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱齿轮缺齿故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱齿轮缺齿故障恒速\DATA'))
|
|
dataWithLabel5 = labeled_and_piece.ConcatLabel(data, 5, False)
|
|
dataWithLabel5 = np.array(dataWithLabel5(data, 5, False))
|
|
|
|
# 导入第六类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱轴承复合故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱轴承复合故障恒速\DATA'))
|
|
dataWithLabel6 = labeled_and_piece.ConcatLabel(data, 6, False)
|
|
dataWithLabel6 = np.array(dataWithLabel6(data, 6, False))
|
|
|
|
# 导入第七类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱轴承滚动体故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱轴承滚动体故障恒速\DATA'))
|
|
dataWithLabel7 = labeled_and_piece.ConcatLabel(data, 7, False)
|
|
dataWithLabel7 = np.array(dataWithLabel7(data, 7, False))
|
|
|
|
# 导入第八类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱轴承内圈故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱轴承内圈故障恒速\DATA'))
|
|
dataWithLabel8 = labeled_and_piece.ConcatLabel(data, 8, False)
|
|
dataWithLabel8 = np.array(dataWithLabel8(data, 8, False))
|
|
|
|
# 导入第九类故障并打标签
|
|
data0 = loadData.DataDeal2(9, 'E:\data\DDS_data\平行齿轮箱轴承外圈故障恒速\DATA')
|
|
data = np.array(data0(9, 'E:\data\DDS_data\平行齿轮箱轴承外圈故障恒速\DATA'))
|
|
dataWithLabel9 = labeled_and_piece.ConcatLabel(data, 9, False)
|
|
dataWithLabel9 = np.array(dataWithLabel9(data, 9, False))
|
|
|
|
data_all = tf.concat(
|
|
[dataWithLabel1, dataWithLabel2, dataWithLabel3, dataWithLabel4, dataWithLabel5, dataWithLabel6, dataWithLabel7,
|
|
dataWithLabel8, dataWithLabel9], axis=0)
|
|
data_all = np.array(data_all)
|
|
data_train = tf.random.shuffle(data_all)
|
|
# print(data_train)
|
|
data_new = labeled_and_piece.PieceAndBag(data_train, False)
|
|
(train_data, train_label), (test_data, test_label) = data_new(data_train, False)
|
|
train_data = np.array(train_data)
|
|
train_label = np.array(train_label)
|
|
test_data = np.array(test_data)
|
|
test_label = np.array(test_label)
|
|
|
|
'''print(train_data.shape,train_data.shape[0],train_data.shape[1])
|
|
print(train_label.shape,train_label.shape[0])
|
|
print(test_data.shape,test_data.shape[0],test_data.shape[1])
|
|
print(test_label.shape,test_label.shape[0])'''
|
|
|
|
train_data = tf.broadcast_to(train_data, [3, train_data.shape[0], train_data.shape[1]])
|
|
train_label = tf.broadcast_to(train_label, [3, train_label.shape[0]])
|
|
test_data = tf.broadcast_to(test_data, [3, test_data.shape[0], test_data.shape[1]])
|
|
test_label = tf.broadcast_to(test_label, [3, test_label.shape[0]])
|
|
|
|
train_data = tf.transpose(train_data, [1, 2, 0])
|
|
train_label = tf.transpose(train_label, [1, 0])
|
|
test_data = tf.transpose(test_data, [1, 2, 0])
|
|
test_label = tf.transpose(test_label, [1, 0])
|
|
|
|
print(train_data.shape)
|
|
print(train_label.shape)
|
|
print(test_data.shape)
|
|
print(test_label.shape)
|
|
'''
|
|
train_data.shape:(2430, 204800)
|
|
train_label.shape:(2430,)
|
|
test_data.shape:(486, 204800)
|
|
(test_label.shape:486,)'''
|
|
|
|
'''CIFAR_100_data = tf.keras.datasets.cifar100
|
|
(train_data, train_label), (test_data, test_label) = CIFAR_100_data.load_data()
|
|
# print(train_data)
|
|
# print(test_data)'''
|
|
|
|
|
|
def identity_block(input_tensor, out_dim):
|
|
con1 = tf.keras.layers.Conv1D(filters=out_dim // 4, kernel_size=1, padding='SAME', activation=tf.nn.relu)(
|
|
input_tensor)
|
|
bhn1 = tf.keras.layers.BatchNormalization()(con1)
|
|
|
|
con2 = tf.keras.layers.Conv1D(filters=out_dim // 4, kernel_size=3, padding='SAME', activation=tf.nn.relu)(bhn1)
|
|
bhn2 = tf.keras.layers.BatchNormalization()(con2)
|
|
|
|
con3 = tf.keras.layers.Conv1D(filters=out_dim, kernel_size=1, padding='SAME', activation=tf.nn.relu)(bhn2)
|
|
|
|
out = tf.keras.layers.Add()([input_tensor, con3])
|
|
out = tf.nn.relu(out)
|
|
|
|
return out
|
|
|
|
|
|
def resnet_Model():
|
|
inputs = tf.keras.Input(shape=[ 204800, 3])
|
|
conv1 = tf.keras.layers.Conv1D(filters=64, kernel_size=3, padding='SAME', activation=tf.nn.relu)(inputs)
|
|
'''第一层'''
|
|
output_dim = 64
|
|
identity_1 = tf.keras.layers.Conv1D(filters=output_dim, kernel_size=3, padding='SAME', activation=tf.nn.relu)(
|
|
conv1)
|
|
identity_1 = tf.keras.layers.BatchNormalization()(identity_1)
|
|
for _ in range(1):
|
|
identity_1 = identity_block(identity_1, output_dim)
|
|
'''第二层'''
|
|
output_dim = 128
|
|
identity_2 = tf.keras.layers.Conv1D(filters=output_dim, kernel_size=3, padding='SAME', activation=tf.nn.relu)(
|
|
identity_1)
|
|
identity_2 = tf.keras.layers.BatchNormalization()(identity_2)
|
|
for _ in range(1):
|
|
identity_2 = identity_block(identity_2, output_dim)
|
|
'''第三层'''
|
|
output_dim = 256
|
|
identity_3 = tf.keras.layers.Conv1D(filters=output_dim, kernel_size=3, padding='SAME', activation=tf.nn.relu)(
|
|
identity_2)
|
|
identity_3 = tf.keras.layers.BatchNormalization()(identity_3)
|
|
for _ in range(1):
|
|
identity_3 = identity_block(identity_3, output_dim)
|
|
'''第四层'''
|
|
output_dim = 512
|
|
identity_4 = tf.keras.layers.Conv1D(filters=output_dim, kernel_size=3, padding='SAME', activation=tf.nn.relu)(
|
|
identity_3)
|
|
identity_4 = tf.keras.layers.BatchNormalization()(identity_4)
|
|
for _ in range(1):
|
|
identity_4 = identity_block(identity_4, output_dim)
|
|
flatten = tf.keras.layers.Flatten()(identity_4)
|
|
dropout = tf.keras.layers.Dropout(0.217)(flatten)
|
|
'''dense = tf.keras.layers.Dense(1, activation=tf.nn.relu)(dropout)
|
|
dense = tf.keras.layers.BatchNormalization()(dense)'''
|
|
dense = tf.keras.layers.Dense(9, activation=tf.nn.softmax)(dropout)
|
|
model = tf.keras.Model(inputs=inputs, outputs=dense)
|
|
return model
|
|
|
|
|
|
if __name__ == '__main__':
|
|
resnet_model = resnet_Model()
|
|
resnet_model.compile(optimizer=tf.optimizers.Adam(1e-2), loss=tf.losses.sparse_categorical_crossentropy,
|
|
metrics=['accuracy'])
|
|
resnet_model.summary()
|
|
resnet_model.fit(train_data, train_label, epochs=10, batch_size=10)
|
|
score = resnet_model.evaluate(test_data, test_label)
|
|
print('score:', score)
|