I do not have access to browser. I am using nvidia modulus on a gpu server using remote access. I want to visualize all the loss vs epoch plots or save them in the near by directory.
Hi,
in the outputs folder of a modulus experiment you can find event file that tensorboard uses for display data.
I share my solution to the same problem:
import tensorflow as tf
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
from tensorflow.python.framework import tensor_util
from matplotlib.pyplot import *
# Function for getting tag from Tensorboard
# <filepath> path to event file (files with extension *.events.out)
def get_tags(filepath):
tags = []
for event in tf.train.summary_iterator(filepath):
for value in event.summary.value:
if value.tag in tags:
break
tags.append(value.tag)
return tags
# Function for getting the values of losses
# <filepath> path to a event file
# <tag> is the name of a tag get by get_tags()
# <steps>, <values> array for steps and values of the losses
def get_values_tensorflow(filepath, tag, steps, values):
for event in tf.train.summary_iterator(filepath):
for value in event.summary.value:
if value.tag == tag:
t = tensor_util.MakeNdarray(value.tensor)
steps.append(event.step)
values.append(t.item(0))
# Function for create a loss graph
# <event_files_folder_path> path to folder that contain the events.out files (in my case i have
# more than one files with this extension inside a folder)
# <experiment_name> name of experiment (in my case i want to name the file with a specific name)
# <saving_folder_path> where to save the figure
def create_loss_graphs(event_files_folder_path, experiment_name, saving_folder_path):
event_files = [filename for filename in os.listdir(event_files_folder_path) if 'events.out.' in filename]
event_files.sort()
if not event_files:
raise Exception("No event files in " + event_files_folder_path)
# i have more events files but i know that all the tags in differente files are the same
# so i get tags only from the first event file
tags = get_tags(os.path.join(event_files_folder_path, event_files[0]))
# i remove this because it is't a loss
tags.remove('config/text_summary')
for tag in tags:
steps = []
values = []
for filename in event_files:
get_values_tensorflow(os.path.join(event_files_folder_path, filename), tag, steps, values)
# sort by steps value
zip_steps_values = sorted(dict(sorted(zip(steps, values), key=lambda x: x[0], reverse=True)).items(), key=lambda x: x[0])
steps = [step[0] for step in zip_steps_values]
values = [value[1] for value in zip_steps_values]
log_values = [math.log10(values[i]) for i in range(len(values))]
figure(figsize=(16, 9))
# i prefere logarithmic scale for loss (not for learning rate)
if tag != 'Train/learning_rate':
plot(steps,log_values)
title_string = tag.replace('Train/', '') + ' of ' + experiment_name + ' logarithmic scale'
else:
plot(steps,values)
title_string = tag.replace('Train/', '') + ' of ' + experiment_name
grid()
title(title_string)
figure_name_file = tag.replace('Train/', '') + '.png'
print('Saving ' + saving_folder_path + '/' + figure_name_file.replace('.png', ''))
savefig(saving_folder_path + '/' + figure_name_file, bbox_inches = "tight")
close()
Then you can use this code calling the function create_loss_graphs(). This function create a different graph for every loss in tensorboard. I prefer to visualize the logarithmic scale because with raw values there can be visualization problems for the scale of the results.
Note that i work with more than one event file so the code can be simpler in this case.