Skip to content

Instantly share code, notes, and snippets.

@nishitpatel01
Last active August 7, 2022 17:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nishitpatel01/7946c6612b9a02667a978e6d74bde1c3 to your computer and use it in GitHub Desktop.
Save nishitpatel01/7946c6612b9a02667a978e6d74bde1c3 to your computer and use it in GitHub Desktop.
Timeseries Insights API - Create Dataset
# TODO - REPLACE WITH SA ACCOUNT KEY
KEY_FILE = '/<path-to-your-service-account-file>/file.json' #REPLACE THIS WITH PROJECT SPECIFIC SERVICE ACCOUNT KEY
ts_endpoint = f'https://timeseriesinsights.googleapis.com/v1/projects/{PROJECT_ID}/datasets'
# Helper function
def query_ts(method, endpoint, data, auth_token):
data = str(data)
headers = {'Content-type': 'application/json', "Authorization": f"Bearer {auth_token}"}
if method == "GET":
resp = requests.get(endpoint, headers=headers)
if method == "POST":
resp = requests.post(endpoint, data=data, headers=headers)
if method == "DELETE":
resp = requests.delete(endpoint, headers=headers)
return(resp.json())
# Authentication
!gcloud auth activate-service-account --key-file {key_file}
token_array = !gcloud auth print-access-token
# Create dataset using API
file_data = {
"name": "sensor-data", # TSI dataset name
"ttl": "30000000s", # time to live - this is used for streaming inserts. Indicates what records to discard
"dataNames": [
"measure",
"Humidity",
"Light",
"h2_raw",
"temp",
],
"dataSources": [
{"uri": "gs://<your-bucket-name-here>/transformed.json"} #sample of data in Cloud Storage transformed JSON file
],
}
res = query_ts(method="POST", endpoint=ts_endpoint, data=file_data, auth_token=token_array[0])
res
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment