#python #tensorflow #keras
#python #тензорный поток #keras
Вопрос:
Я пытаюсь сравнить fit
функцию с. GradientTape
В то время как я получаю 80% процентов при использовании функции подгонки, я получаю 27% GradientTape
. Я уже проверил возможные проблемы здесь, где используется дополнительный декоратор tf.function
, однако, без каких-либо улучшений. Здесь вы можете найти мой код:
epochs = 50
batch_size = 128
# First we're going to fit to task A and retain a copy of parameters trained on Task A
#model.fit([X_amp_tr_A, X_phase_tr_A, weight_tr_A, temp_tr_A, humidity_tr_A, loc_tr_A], shelf_life_tr_A,
# validation_data = ([X_amp_test_A, X_phase_test_A, weight_test_A,
# temp_test_A, humidity_test_A, loc_test_A], np.array(shelf_life_test_A)))
theta_A = {n: p.value() for n, p in enumerate(model.trainable_variables.copy())}
# Metrics for the custom training loop
accuracy = tf.keras.metrics.CategoricalAccuracy('accuracy')
metric = tf.keras.metrics.CategoricalCrossentropy('categorical_crossentropy')
loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
optimizer = tf.keras.optimizers.SGD()
dataset = tf.data.Dataset.from_tensor_slices(({'amp': X_amp_tr_A, 'phase': X_phase_tr_A,
'weight': weight_tr_A, 'temp': temp_tr_A,
'moist': humidity_tr_A, 'zone': loc_tr_A}, shelf_life_tr_A)).batch(batch_size)#
@tf.function
def train_step(images, labels, metric):
loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
with tf.GradientTape() as tape:
predictions = model(images, training=True)
total_loss = loss(labels, predictions) l2_penalty(model, theta_A)
grads = tape.gradient(total_loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
metric.update_state(labels, predictions)
accuracy.update_state(labels, predictions)
#metric.update_state(labels, preds)
#return total_loss, accuracy
for epoch in range(epochs):
#accuracy.reset_states()
#metric.reset_states()
cumulative_loss = 0
for i, (batch) in enumerate(dataset):
imgs = batch[0]
labels = batch[1]
#with tf.GradientTape() as tape:
#preds = model(imgs)
#total_loss = loss(labels, preds) l2_penalty(model, theta_A)
#print(total_loss)
train_step(imgs, labels, metric)
#cumulative_loss = cumulative_loss loss
#grads = tape.gradient(cumulative_loss, model.trainable_variables)
#optimizer.apply_gradients(zip(grads, model.trainable_variables))
print("rEpoch: {}, Batch: {}, Loss: {:.3f}, Accuracy: {:.3f}".format(
epoch 1, i 1, metric.result().numpy(), accuracy.result().numpy()), flush=True, end='')
print("")
Есть ли у вас какие-либо