Skip to content

Instantly share code, notes, and snippets.

@DailyDreaming
Last active March 29, 2021 21:30
Show Gist options
  • Save DailyDreaming/c5a4eaf314f8e15a323f82863dbb03d6 to your computer and use it in GitHub Desktop.
Save DailyDreaming/c5a4eaf314f8e15a323f82863dbb03d6 to your computer and use it in GitHub Desktop.
import json
import logging
import os
import requests
import uuid
import sevenbridges as sbg
log = logging.getLogger(__name__)
os.environ['SB_API_ENDPOINT'] = 'https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2'
# https://platform.sb.biodatacatalyst.nhlbi.nih.gov/developer/token
# make sure that this is set ^: os.environ['SB_AUTH_TOKEN']
def get_sevenbridges_status():
expected_api_response = {"rate_limit_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/rate_limit",
"user_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/user",
"users_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/users",
"billing_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/billing",
"projects_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/projects",
"files_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/files",
"tasks_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/tasks",
"apps_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/apps",
"action_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/action",
"upload_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/upload",
"storage_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/storage",
"markers_url": "https://api.sb.biodatacatalyst.nhlbi.nih.gov/v2/genome/markers"}
sb_status = {'Login': 'unknown',
# 'Execution': 'unknown',
'API': 'unknown',
# 'Data Uploaders': 'unknown',
'Files': 'unknown',
'Billing': 'unknown'}
try:
response = requests.get(os.environ['SB_API_ENDPOINT'])
response.raise_for_status()
if response.json() != expected_api_response:
log.warning(f'API seems to have returned an unexpected response (and may have been updated): '
f'{json.dumps(response.json(), indent=4)}')
sb_status['API'] = True
log.info('API use returned successfully.')
except Exception as e:
log.info('API use failed.')
log.warning(e)
sb_status['API'] = False
try:
api = sbg.Api(advance_access=True)
api.projects.query().all() # assert 'lonb/test' in [p.id for p in api.projects.query().all() if p.id == 'lonb/test']
sb_status['Login'] = True
log.info('Login returned successfully.')
except Exception as e:
log.warning('Login use failed.')
log.warning(e)
sb_status['Login'] = False
return sb_status
try:
bg = api.billing_groups.query(limit=1)[0]
log.info('Billing query returned successfully.')
sb_status['Billing'] = True
except Exception as e:
log.warning('Billing query failed.')
log.warning(e)
sb_status['Billing'] = False
return sb_status
# create test file
test_file_name = 'test.txt'
test_file_path = os.path.abspath(test_file_name)
with open(test_file_path, 'w') as f:
f.write('nothing to see here')
# this file doesn't exist yet and will be made when we download test_file_name from the project after upload
downloaded_test_file_path = os.path.abspath(f'downloaded_{test_file_name}')
try:
# create project
project = api.projects.create(name=str(uuid.uuid4()), billing_group=bg.id)
log.info(f'Creating project: {project}')
try:
files = api.files.query(project=project)
log.info(f' - Fresh project should have 0 files: {files}')
log.info(f' - Uploading: {test_file_path}')
upload = api.files.upload(test_file_path, project, file_name=test_file_name)
upload.wait()
files = api.files.query(project=project)
log.info(f' - Project should now have 1 file: {files}')
my_file = [file for file in files if test_file_name == file.name][0]
file_id = api.files.get(my_file.id)
file_id.download(downloaded_test_file_path)
assert os.path.exists(downloaded_test_file_path)
log.info(f' - File downloaded successfully: {downloaded_test_file_path}')
log.info('File upload/download to project returned successfully.')
sb_status['Files'] = True
except Exception as e:
log.warning('File upload/download to project failed.')
log.warning(e)
sb_status['Files'] = False
return sb_status
finally:
log.info(f' - Removing {test_file_path} and {downloaded_test_file_path}')
for f in [test_file_path, downloaded_test_file_path]:
if os.path.exists(f):
os.remove(f)
p = api.projects.get(id=project)
log.info(f' - Deleting project: {p}')
p.delete()
return sb_status
def suppress_exotic_logging(local_logger):
"""
Attempts to suppress the loggers of all packages other than our own by setting them to CRITICAL.
For example: 'requests_oauthlib', 'google', 'boto', 'websocket', 'oauthlib', etc.
This will only suppress loggers that have already been instantiated and can be seen in the environment.
"""
never_suppress = ['__main__']
top_level_loggers = list()
for pkg_logger in list(logging.Logger.manager.loggerDict.keys()):
if pkg_logger != local_logger:
# many sub-loggers may exist, like "boto.a", "boto.b", "boto.c"; we only want the top_level: "boto"
top_level_logger = pkg_logger.split('.')[0] if '.' in pkg_logger else pkg_logger
if top_level_logger not in top_level_loggers + never_suppress:
top_level_loggers.append(top_level_logger)
logging.getLogger(top_level_logger).setLevel(logging.CRITICAL)
suppress_exotic_logging(log)
response = get_sevenbridges_status()
print(json.dumps(response, indent=4))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment