Skip to content

Instantly share code, notes, and snippets.

@lgray
lgray / test_laurelin_nano1.log
Created September 26, 2019 13:54
log from fermicloud318 on /eos/uscms/store/group/lpccoffea/coffeabeans/102X/nano_2018/QCD_HT2000toInf_TuneCP5_13TeV-madgraphMLM-pythia8/NanoTuples-2018_RunIIAutumn18MiniAOD-102X_v15-v1/190321_064928/0000/nano_1.root
(py36) [lagray@fermicloud318 laurelin]$ python ../test_laurelin.py
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/opt/spark-2.4.3-bin-hadoop2.7/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/opt/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Ivy Default Cache set to: /home/lagray/.ivy2/cache
The jars for the packages stored in: /home/lagray/.ivy2/jars
:: loading settings :: url = jar:file:/opt/spark-2.4.3-bin-hadoop2.7/jars/ivy-2.4.0.jar!/org/apache/ivy/core/settings/ivysettings.xml
edu.vanderbilt.accre#laurelin added as a dependency
import fast_curator
import fast_flow.v1 as fast_flow
import pprint
import copy
datasets = fast_curator.read.from_yaml('curator/file_list.yml')
pprint.pprint(datasets)
coffea_datasets = {}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
import pyarrow as pa
import pyarrow.parquet as pq
def nanoaod2arrowtable(params):
"""
takes as input a (list of) root file(s) of ~flat ntuples
and convert into a single arrow table
"""
random.seed(None)
# here's a dynamic reduction network that can categorize
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.drn = DynamicReductionNetwork(input_dim=3, hidden_dim=64,
k = 16,
output_dim=2, aggr='add',
norm=torch.tensor([1., 1./27., 1./27.]))
def forward(self, data):
import torch
import torch.nn.functional as F
import torch_geometric.transforms as T
import torch.nn as nn
from torch_geometric.nn import EdgeConv, DynamicEdgeConv
#let's try a basic implementation of really simple message passing
from torch_scatter import scatter_add
class NodeNetwork(nn.Module):
def __init__(self, input_dim, output_dim, hidden_activation=nn.Tanh):
import os
import os.path as osp
import math
import numpy as np
import torch
import gc
import torch.nn as nn
import torch.nn.functional as F
import torch_geometric.transforms as T
Traceback (most recent call last):
File "mnist_nn_conv.py", line 71, in <module>
model = Net().to(device)
File "mnist_nn_conv.py", line 45, in __init__
self.conv1 = conv1.jittable(x=init_data.x, edge_index=init_data.edge_index, edge_attr=init_data.edge_attr)
File "/Users/lagray/pytorch_work/pytorch_geometric/torch_geometric/nn/conv/message_passing.py", line 608, in jittable
out = torch.jit.script(out)
File "/anaconda3/envs/torch/lib/python3.7/site-packages/torch/jit/__init__.py", line 1261, in script
return torch.jit._recursive.create_script_module(obj, torch.jit._recursive.infer_methods_to_compile)
File "/anaconda3/envs/torch/lib/python3.7/site-packages/torch/jit/_recursive.py", line 305, in create_script_module
import sys
import os
import requests
import argparse
import json
from uuid import uuid1
import pprint
os.environ['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'