Skip to content

Instantly share code, notes, and snippets.

@jmchilton
Created February 25, 2013 15:18
Show Gist options
  • Save jmchilton/5030503 to your computer and use it in GitHub Desktop.
Save jmchilton/5030503 to your computer and use it in GitHub Desktop.
Modifying Galaxy's example_watch_folder.py for multi-file uploads. Assumes directory_ready(path) exists and indicates a directory is ready to be processed.
while 1:
# Watch in_folder, upload anything that shows up there to data library and get ldda,
# invoke workflow, move file to out_folder.
for fname in os.listdir(in_folder):
fullpath = os.path.join(in_folder, fname)
if os.path.isdir(fullpath) and directory_ready(fullpath):
directory_files = [os.path.join(fullpath, subdir_fname) for subdir_fname in os.path.listdir(fullpath)]
data = {}
data['folder_id'] = library_folder_id
data['file_type'] = 'auto'
data['dbkey'] = ''
data['upload_option'] = 'upload_paths_multifile'
data['filesystem_paths'] = directory_files
data['create_type'] = 'file'
libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted = False)
#TODO Handle this better, but the datatype isn't always
# set for the followup workflow execution without this
# pause.
time.sleep(5)
for ds in libset:
if 'id' in ds:
# Successful upload of dataset, we have the ldda now. Run the workflow.
wf_data = {}
wf_data['workflow_id'] = workflow['id']
wf_data['history'] = "%s - %s" % (fname, workflow['name'])
wf_data['ds_map'] = {}
for step_id, ds_in in workflow['inputs'].iteritems():
wf_data['ds_map'][step_id] = {'src':'ld', 'id':ds['id']}
res = submit( api_key, api_url + 'workflows', wf_data, return_formatted=False)
if res:
print res
# Successful workflow execution, safe to move dataset.
shutil.move(fullpath, os.path.join(out_folder, fname))
time.sleep(10)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment