Skip to content

Instantly share code, notes, and snippets.

@Yu-AnChen
Last active December 15, 2021 00:32
Show Gist options
  • Save Yu-AnChen/5f3f5ea14442ef472c8cd1698752c9fe to your computer and use it in GitHub Desktop.
Save Yu-AnChen/5f3f5ea14442ef472c8cd1698752c9fe to your computer and use it in GitHub Desktop.
Export CyCIF Experiment Tracker plans in HTAN metadata format

Purpose

Export CyCIF Experiment Tracker plans in HTAN metadata format

Execution

  1. In google chrome, go to CyCIF Experiment Tracker and make sure you are logged in
  2. In any of your chrome tab/window, hit F12 to launch the developer tools
  3. In the console tab of the DevTools paste in the contents of the export-cycif-channel-metadata.js script
  4. Edit project_number
  5. Edit exp_number
  6. Edit exp_name
  7. Hit enter to run, it will ask you to save a csv file, your metadata sheet can now be downloaded

Editting in Excel

  1. In Excel, go to File -> Open... and select the downloaded CSV file
  2. Select all cells, change Number Formats from "General" to "Text"
  3. Save the file as .xlsx
  4. Edit the .xlsx file as needed
var project_number = 111;
var exp_number = 1;
var exp_name = '168-tumor';
var href = `https://reagenttracker.hms.harvard.edu/api/v0/imaging_cycle/${project_number}/${exp_number}`;
var csv_name = `${exp_name}-${project_number}-${exp_number}`;
var use_actual_cycles = false;
Promise
.all([parse_vocab(), get_exp(href)])
.then(results => assemble(...results, use_actual_cycles))
.then(r => {console.info(r); return r;})
.then(r => export_csv(table_columns, r, csv_name));
var table_columns = [
'Channel ID',
'Channel Name',
'Channel Passed QC',
'Cycle Number',
'Sub Cycle Number',
'Target Name',
'Antibody Name',
'Antibody Role',
'RRID identifier',
'Fluorophore',
'Clone',
'Lot',
'Vendor',
'Catalog Number',
'Excitation Wavelength',
'Emission Wavelength',
'Excitation Bandwidth',
'Emission Bandwidth',
'Metal Isotope Element',
'Metal Isotope Mass',
'Oligo Barcode Upper Strand',
'Oligo Barcode Lower Strand',
'Dilution',
'Concentration'
];
async function parse_vocab() {
var vocabs_res = await fetch('https://reagenttracker.hms.harvard.edu/api/v0/vocabs');
var vocabs_res_data = await vocabs_res.json();
var vocabs_data = vocabs_res_data.vocabs;
var out = {}
out['label'] = vocabs_data.molecular_label.reduce((a, b) => ({...a, ...{[b.id]: b.value}}), {});
out['ex_filter'] = vocabs_data.microscope_ex_filter
.reduce((a, b) => ({...a, ...{[b.id]: b.value.split(' ').pop().split('/')}}), {})
out['em_filter'] = vocabs_data.microscope_em_filter
.reduce((a, b) => ({...a, ...{[b.id]: b.value.split(' ').pop().split('/')}}), {});
return out
}
async function get_exp(href) {
let exp_res = await fetch(href);
let exp_res_data = await exp_res.json();
return exp_res_data
}
function assign_cycle_channel_num(rows) {
return rows.reduce((prev, curr) => {
let out = {};
let prev_obj = prev.slice(-1).pop();
prev_obj.cycle_number = prev_obj.cycle_number || 0;
prev_obj.ordinal = prev_obj.ordinal || 0;
prev_obj.channel_idx = prev_obj.channel_idx || 0;
let cycle_number = curr.ordinal === 0
? prev_obj.cycle_number + 1
: prev_obj.cycle_number;
let add_channel_idx = prev_obj.ordinal === curr.ordinal ? 0 : 1;
let curr_channel_idx = prev_obj.channel_idx
? prev_obj.channel_idx + add_channel_idx
: add_channel_idx;
return prev.concat({
...curr,
cycle_number: cycle_number,
channel_idx: curr_channel_idx,
'Cycle Number': cycle_number,
'Channel ID': `Channel:0:${curr_channel_idx}`,
ordinal: curr.ordinal}
);
}, [{}]).slice(1);
}
function assemble(vocabs, exp, use_actual_cycles=false) {
let out_rows;
let antibodies;
let batches = exp.batches.reduce((a, b) => ({...a, ...{[b.href]: b}}), {});
let cycles_to_use = use_actual_cycles ? 'actual_imaging_cycles' : 'planned_imaging_cycles';
out_rows = exp
[cycles_to_use]
.map(
cycle => cycle.channels.map(
channel => {
let abs = channel.antibodies;
return abs.map(ab => ({...ab, ...channel}));
}
).reduce((a, b) => a.concat(b), [])
).reduce((a, b) => a.concat(b), []);
antibodies = exp
.antibodies
.map(ab => ({[ab.lincs_id]: ab}))
.reduce((a, b) => ({...a, ...b}), {});
// add antibody object
out_rows.forEach(row => row.antibody = antibodies[row.antibody_stain_lsp_id]);
// add batch
out_rows.forEach(row => {
let catalog_id = row.antibody
? [...new Set(row.antibody.batches.map(row => batches[row].provider_catalog_id))][0]
: '';
let vendor = row.antibody
? [...new Set(row.antibody.batches.map(row => batches[row].provider))][0]
: '';
row.batch = {catalog_id: catalog_id, vendor: vendor};
});
out_rows.forEach(row => {
let antibody = row.antibody ? row.antibody : {};
row['Channel Passed QC'] = row.comments;
row['Target Name'] = antibody.target_name;
row['Antibody Name'] = antibody.name;
row['Antibody Role'] = row['Antibody Name']
? row.antibodies.length === 1 ? 'Primary' : 'Edit'
: '';
row['RRID identifier'] = antibody.rrid;
row['Fluorophore'] = vocabs.label[antibody.label];
row['Clone'] = antibody.clone_name;
row['Vendor'] = row.batch.vendor;
row['Catalog Number'] = row.batch.catalog_id;
row['Excitation Wavelength'] = vocabs.ex_filter[row.ex_filter]?.[0];
row['Emission Wavelength'] = vocabs.em_filter[row.em_filter]?.[0];
row['Excitation Bandwidth'] = vocabs.ex_filter[row.ex_filter]?.[1];
row['Emission Bandwidth'] = vocabs.em_filter[row.em_filter]?.[1];
row['Dilution'] = row.dilution_factor ? `1:${row.dilution_factor}` : '';
});
return assign_cycle_channel_num([...out_rows]);
}
function export_csv(table_columns, rows, csv_name) {
csv_name = csv_name || 'cycif-tracker';
let row_string = rows
// adding ="XXX" to each value for editting in excel
.map(row => table_columns.map(column => `="${row?.[column] || ''}"`))
.map(l => l.join(','))
.join('\n');
row_string = `data:text/csv;charset=utf-8,${table_columns.join(',')}\n${row_string.replace(/#/g, '---')}`;
let uri = encodeURI(row_string);
let link = document.createElement("a");
link.setAttribute("href", uri);
link.setAttribute("download", `${csv_name}.csv`);
document.body.appendChild(link);
link.click();
return row_string;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment