Skip to content

Instantly share code, notes, and snippets.

View SubhadityaMukherjee's full-sized avatar

Subhaditya Mukherjee SubhadityaMukherjee

View GitHub Profile
@SubhadityaMukherjee
SubhadityaMukherjee / freelance.md
Last active December 8, 2023 11:31
Freelance Projects I have worked on

Freelance Projects/Blogs I have worked on

I am Subhaditya, it's nice to meet you!
Below is a list of projects I have worked on for various clients over the years.
I wanted to keep a list of them for both myself and future employers. 
That being said, I am almost always open for more freelancw work, so hit me up if you want to discuss some projects :)
@SubhadityaMukherjee
SubhadityaMukherjee / tb_log_to_df.py
Created April 11, 2023 09:27
Process Tensorboard logs and convert them to a queriable DataFrame
"""
Run result aggregator
- Read all tensorbord logs and save as a pandas dataframe for analysis
"""
import os
import pandas as pd
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
from tqdm import tqdm
from pathlib import Path
filtered_df = combined_df[(~pd.isnull(combined_df['converted_proxy'])) & (~pd.isnull(combined_df['original_images']))]
filtered_df.iloc[0].original_images
combined_df = process_runs(main_path=main_path)
combined_df = combined_df[(~pd.isnull(combined_df['experiment_name'])) & (~pd.isnull(combined_df['Loss/Val']))]
combined_df.head()
import pickle
with open("pickled_df.pkl", "wb+") as f:
pickle.dump(combined_df, f)
with open("pickled_df.pkl", "rb+") as f:
combined_df = pickle.load(f)
@SubhadityaMukherjee
SubhadityaMukherjee / tbprocessevents.py
Last active April 11, 2023 09:26
tbprocessevents
def process_event_acc(event_acc, save_ims=False) -> dict:
"""Process the EventAccumulator and return a dictionary of tag values"""
all_tags = event_acc.Tags() # Get all tags
temp_dict = {} # Store all values here
for tag in all_tags.keys(): # Loop over all tags
if tag == "scalars":
# Process scalars
for subtag in all_tags[tag]:
try:
# Try to get the last value
def process_runs(main_path, save_ims=False) -> pd.DataFrame:
"""Process all runs and return a dataframe of all results"""
all_files = get_event_files(main_path=main_path, save_ims=False)
all_dict = {}
for files in tqdm(all_files, total=len(all_files)):
try:
# Process each file using the EventAccumulator and save to a dictionary
event_acc = EventAccumulator(files)
event_acc.Reload()
temp_dict = process_event_acc(event_acc, save_ims=save_ims)
def get_event_files(main_path) -> list:
"""Return a list of event files under the given directory"""
all_files = []
for root, _, filenames in os.walk(main_path):
for filename in filenames:
if "events.out.tfevents" in filename:
all_files.append(str(Path(root) / Path(filename)))
return all_files
"""
Run result aggregator
- Read all tensorbord logs and save as a pandas dataframe for analysis
"""
import os
import pandas as pd
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
from tqdm import tqdm
from pathlib import Path
@SubhadityaMukherjee
SubhadityaMukherjee / tb_log_parser.py
Created March 16, 2023 10:51
parse_query_tb_logs
import os
import pandas as pd
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
from tqdm import tqdm
from pathlib import Path
import numpy as np
import matplotlib.pyplot as plt
import clipboard
import base64
from io import BytesIO