obh*_*obh 5 python machine-learning keras tensorflow
我正在将模型训练为:
with tf.Graph().as_default():
with tf.Session(config=tf.ConfigProto(allow_soft_placement = True)) as sess:
K.set_session(sess)
tf.train.create_global_step()
#with tf.device('/gpu:0:'):
m = GAReader.Model(nlayers, data.vocab_size, data.num_chars, W_init,
nhidden, embed_dim, dropout, train_emb,
char_dim, use_feat, gating_fn, words).build_network()
m.compile(optimizer=tf.train.AdamOptimizer(0.01),
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_accuracy])
tensorboard = TensorBoardCustom(log_dir="logs", sess=sess)
m.fit_generator(generator=batch_loader_train, steps_per_epoch=len(batch_loader_train.batch_pool), epochs=100, callbacks=[tensorboard])
Run Code Online (Sandbox Code Playgroud)
我定义了一个自定义回调,将keras.callbacks.Tensorboard扩展为:
class TensorBoardCustom(TensorBoard):
def __init__(self, log_dir, sess, **kwargs):
super(TensorBoardCustom, self).__init__(log_dir, **kwargs)
self.sess = sess
def on_batch_end(self, batch, logs={}):
summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(self.log_dir)
s = self.sess.run(summary)
writer.add_summary(s, batch)
writer.close()
super(TensorBoardCustom, self).on_batch_end(batch, logs)
Run Code Online (Sandbox Code Playgroud)
我添加一个新的摘要为:
l_docin = tf.keras.layers.Input(shape=(None,))
with tf.name_scope('summaries'):
table = tf.contrib.lookup.index_to_string_table_from_tensor(
self.mapping_string, default_value="UNKNOWN")
words = table.lookup(tf.cast(l_qin, tf.int64))
text = tf.reduce_join(words, 1, separator=' ')
tf.summary.text('text', text)
Run Code Online (Sandbox Code Playgroud)
但是,这不起作用,并且出现以下错误:
InvalidArgumentError (see above for traceback): You must feed a value for placeholder tensor 'input_2' with dtype float and shape [?,?]
[[{{node input_2}} = Placeholder[dtype=DT_FLOAT, shape=[?,?], _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
Run Code Online (Sandbox Code Playgroud)
有人可以解释为什么会发生这种情况以及如何纠正吗?有没有更简单/更好的方法来添加自定义摘要?
下面定义的TensorFlow 回调TensorBoardWithTime记录累积训练和评估批处理时间。它依赖于私有财产TensorBoard._train_writer和TensorBoard._val_writer。它适用于 TensorFlow 2.4.0rc2。
import tensorflow as tf
class TensorBoardWithTime(tf.keras.callbacks.TensorBoard):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.timer_test_batch = Timer()
self.timer_train_batch = Timer()
def on_epoch_end(self, epoch, logs=None):
super().on_epoch_end(epoch, logs=logs)
with self._train_writer.as_default():
tf.summary.scalar('time_batch_sum', self.timer_train_batch.pop_result(), step=epoch)
with self._val_writer.as_default():
tf.summary.scalar('time_batch_sum', self.timer_test_batch.pop_result(), step=epoch)
def on_test_batch_begin(self, batch, logs=None):
self.timer_test_batch.begin()
super().on_test_batch_begin(batch, logs=logs)
def on_test_batch_end(self, batch, logs=None):
super().on_test_batch_end(batch, logs=logs)
self.timer_test_batch.end()
def on_train_batch_begin(self, batch, logs=None):
self.timer_train_batch.begin()
super().on_train_batch_begin(batch, logs=logs)
def on_train_batch_end(self, batch, logs=None):
super().on_train_batch_end(batch, logs=logs)
self.timer_train_batch.end()
class Timer(tf.keras.metrics.Sum):
def __init__(self):
super().__init__()
self.time_begin = None
def pop_result(self):
result = self.result()
self.reset_states()
return result
def begin(self):
self.time_begin = tf.timestamp()
def end(self):
self.update_state(tf.timestamp() - self.time_begin)
self.time_begin = None
Run Code Online (Sandbox Code Playgroud)