This gist shows how to query the MailChimp API using Google Apps Script and pushes data to Google BigQuery
// This script is designed to run on a 1 hour trigger in Google Apps Script. It is also written to "WRITE_TRUNCATE" your table | |
// which means it deletes the table and updates it with the newest information. You can change the variables in campaignList | |
// if you want to adjust it for your needs. | |
function chimpyAPI30days() { | |
projectId = "xxx"; | |
datasetId = "xxx"; | |
tableId = 'xxx'; | |
yesterday = new Date(); | |
yesterday.setDate(yesterday.getDate() - 29); | |
var yesterday = Utilities.formatDate(yesterday, 'UTC', 'yyyy-MM-dd'); | |
data = []; | |
API_KEY = 'xxx'; | |
dc = API_KEY.slice(-3); | |
api = 'https://us8.api.mailchimp.com/3.0/'; | |
campaignList = 'campaigns?&count=100&since_send_time='+yesterday | |
options = {"headers": {"authorization": 'apikey '+API_KEY}}; | |
apiCall = function(endpoint){ | |
apiResponseCampaigns = UrlFetchApp.fetch(api+endpoint,options); | |
json = JSON.parse(apiResponseCampaigns.getContentText()); | |
return json | |
} | |
campaigns = apiCall(campaignList); | |
total = campaigns.total_items; | |
campaignData = campaigns.campaigns; | |
for (i=0; i< campaignData.length; i++){ | |
c = campaignData[i]; | |
cid = c.id; | |
send_time = c.send_time; | |
if (send_time){ | |
apiResponseReports = UrlFetchApp.fetch('https://us8.api.mailchimp.com/3.0/reports/'+cid,options); | |
reports = JSON.parse(apiResponseReports.getContentText()); | |
reportsSendTime = reports.send_time; | |
if(reportsSendTime){ | |
test = JSON.stringify({ 'id':cid, | |
'campaign_title':c.settings.title, | |
'subject_line':c.settings.subject_line, | |
'emails_sent':reports.emails_sent, | |
'abuse_reports':reports.abuse_reports, | |
'unsubscribed':reports.unsubscribed, | |
'send_time':reports.send_time, | |
'bounces':[ { | |
'hard_bounces':reports.bounces.hard_bounces, | |
'soft_bounces':reports.bounces.soft_bounces, | |
'syntax_errors':reports.bounces.syntax_errors } ], | |
'forwards':[ { | |
'forwards_count':reports.forwards.forwards_count, | |
'forwards_opens':reports.forwards.forwards_opens }], | |
'opens':[ { | |
'opens_total':reports.opens.opens_total, | |
'unique_opens':reports.opens.unique_opens, | |
'open_rate':reports.opens.open_rate, | |
'last_open':reports.opens.last_open }] | |
}); | |
data.push(test); | |
} | |
} | |
} | |
data = data.join("\n"); | |
blobData = Utilities.newBlob(data, "application/octet-stream"); | |
job = { | |
configuration: { | |
load: { | |
destinationTable: { | |
projectId: projectId, | |
datasetId: datasetId, | |
tableId: tableId | |
}, | |
sourceFormat: "NEWLINE_DELIMITED_JSON" | |
,writeDisposition: "WRITE_TRUNCATE" | |
} | |
} | |
} | |
job = BigQuery.Jobs.insert(job, projectId, blobData); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment