self_example/TensorFlow_eaxmple/TensorFlow_constant_Variable/review.py

26 lines
1.2 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import tensorflow as tf
import numpy as np
from sklearn.datasets import load_iris
data = load_iris()
iris_target = data.target
iris_data = np.float32(data.data)
print('first:',iris_data.shape)
iris_target = np.float32(tf.keras.utils.to_categorical(iris_target,num_classes=3))
iris_data = tf.data.Dataset.from_tensor_slices(iris_data).batch(50)
print('first:',iris_data.shape)
iris_target = tf.data.Dataset.from_tensor_slices(iris_target).batch(50)
model = tf.keras.models.Sequential()
# Add layers
model.add(tf.keras.layers.Dense(32, activation="relu"))
model.add(tf.keras.layers.Dense(64, activation="relu"))
model.add(tf.keras.layers.Dense(3,activation="softmax"))
opt = tf.optimizers.Adam(1e-3)
for epoch in range(1000):
for _data,lable in zip(iris_data,iris_target):
with tf.GradientTape() as tape:
'''固定的写法tf.reduce_mean计算损失函数tape.gradient计算gradient'''
logits = model(_data)
loss_value = tf.reduce_mean(tf.keras.losses.categorical_crossentropy(y_true = lable,y_pred = logits))
grads = tape.gradient(loss_value, model.trainable_variables)
opt.apply_gradients(zip(grads, model.trainable_variables))
print('Training loss is :', loss_value.numpy())