Skip to content

Instantly share code, notes, and snippets.

@rjha
Created April 22, 2016 17:32
Show Gist options
  • Save rjha/2ed715338cfb44a9b884ac44142bdc2b to your computer and use it in GitHub Desktop.
Save rjha/2ed715338cfb44a9b884ac44142bdc2b to your computer and use it in GitHub Desktop.
#!/usr/bin/python
import json
import requests
import random
import sys
import traceback
import logging
import logging.handlers
import yuktix
import time
def format_csv_header(day,month,year,channels,data):
header = "Date,"
channels = yuktix.sort_data_structure(channels)
for channel in channels:
if data.get(channel) is None:
continue
aggregates = data.get(channel)
if aggregates is None:
continue
keys = yuktix.sort_data_structure(aggregates)
for key in keys:
header = header + channel + "(" + key + "),"
header = header[:-1]
header = header + "\r\n"
return header
def format_csv(day,month,year,channels,data):
column = "%d-%d-%d," %(day,month,year)
if channels is None:
return column
channels = yuktix.sort_data_structure(channels)
for channel in channels:
aggregates = data.get(channel)
keys = yuktix.sort_data_structure(aggregates)
for key in keys:
value = aggregates[key]
value = yuktix.convert_unicode_channel_value(channel,value)
column = column + value + ","
column = column[:-1]
column = column + "\r\n"
return column
# main script
logger = logging.getLogger("main")
logger.setLevel(logging.INFO)
# user input
# _______________________________________
fh = logging.FileHandler("aws.log")
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
# only need public key: copy from your yuktix account : profile page
server = {
"endpoint" : "http://api1.yuktix.com:8080/sensordb/v1" ,
"publicKey" : "xyzxyzxyz",
"signature" : "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
channelNames = ["T"]
# channel vs. aggregate functions
# supported: mean, max, min, sum, stddev etc.
queryMap = {
"T" : ["min","max","mean"],
"P" : ["mean"],
"RH" : ["min","max","mean"],
"WS" : ["mean", "max"],
"Rain" : ["sum"],
"Lux" : ["min", "max","mean"]
}
service = yuktix.RestService()
serials = ["gkvk001","thejesh","wipro01"]
# start + end date
start_tt = yuktix.date_to_time_tuple(1,4,2016)
end_tt = yuktix.date_to_time_tuple(21,4,2016)
# No changes below this line
#________________________________________________________
ts_end = yuktix.date_to_unixts(end_tt.tm_mday,end_tt.tm_mon,end_tt.tm_year)
for serial in serials:
current_tt = start_tt
fname = serial + "_report_" + str(current_tt.tm_year) + ".csv"
# No write buffering
bufsize = 0
f = open(fname, 'a+',bufsize)
count = 0 ;
try:
while(count < 366):
print "processing %s for %d-%d-%d" %(serial,current_tt.tm_mday, current_tt.tm_mon, current_tt.tm_year)
data = service.get_device_channel_aggregates(server,serial,channelNames,queryMap,current_tt)
if ( not yuktix.is_non_zero_file(fname)) :
line = format_csv_header(current_tt.tm_mday,current_tt.tm_mon,current_tt.tm_year,channelNames,data)
f.write(line)
f.flush()
line = format_csv(current_tt.tm_mday,current_tt.tm_mon,current_tt.tm_year,channelNames,data)
f.write(line)
f.flush()
current_tt = yuktix.get_next_date_tuple(current_tt.tm_mday,current_tt.tm_mon, current_tt.tm_year)
ts_current = yuktix.date_to_unixts(current_tt.tm_mday,current_tt.tm_mon,current_tt.tm_year)
if(ts_current > ts_end ) :
break
#play nice!
time.sleep(5)
count +=1
except yuktix.RestAPIError as ex:
xlog = traceback.format_exc()
logger.error(xlog)
logger.error(ex)
except Exception as e:
xlog = traceback.format_exc()
logger.error(xlog)
logger.error(e)
finally:
#resource cleanup
f.close()
f.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment