Skip to content

Instantly share code, notes, and snippets.

@jobscry
Last active October 29, 2020 21:43
Show Gist options
  • Save jobscry/e4dac923116d2ec5474d2cd7f5f37c2b to your computer and use it in GitHub Desktop.
Save jobscry/e4dac923116d2ec5474d2cd7f5f37c2b to your computer and use it in GitHub Desktop.
Sentinel One API Pull Software Report into CSV
#! python3
import argparse
import csv
import time
import requests
HEADERS = {
"Accept": "application/json",
"User-Agent": "vz/s1_software_report_v1.0",
"Content-Type": "application/json",
}
S1_GET_AGENTS = "/web/api/v2.1/agents"
AGENT_FIELDS = ["computerName", "id", "os", "domain"]
S1_GET_SOFTWARE = "/web/api/v2.1/agents/applications"
SOFTWARE_FIELDS = ["name", "version", "publisher", "installedDate", "size"]
FIELDS = ["computerName", "domain", "passphrase"]
LIMIT = 200
WAIT = 2
def result_generator(url, fields, api_key, params=None):
headers = {**HEADERS, "Authorization": "ApiToken " + api_key}
next_cursor = None
done, errored = False, False
while not (done or errored):
if next_cursor:
if params:
params = {**params, "cursor": next_cursor}
else:
params = {"cursor": next_cursor}
response = requests.get(url, headers=headers, params=params)
if response.status_code != requests.codes.ok:
errored = True
print(f"error getting data: {response.status_code}")
print(response.url)
print(response.headers)
else:
data = response.json()
if "pagination" in data:
next_cursor = data["pagination"]["nextCursor"]
if next_cursor is None:
done = True
else:
done = True
if "data" in data:
for item in data["data"]:
yield {k: str(item[k]) for k in fields if k in item}
time.sleep(WAIT)
else:
errored = True
print("error parsing data")
print(data["errors"])
del data
def main():
parser = argparse.ArgumentParser(
description="Query SentinelOne API, return software list as CSV."
)
parser.add_argument("url")
parser.add_argument("api_key", help="API Key")
parser.add_argument("site_ids", help="SentinelOne site Id(s), separated by commas.")
parser.add_argument(
"--output_file",
"-o",
help="Output filename, default is output.csv",
default="output.csv",
)
args = parser.parse_args()
print("getting agents")
agents = [
a
for a in result_generator(
args.url + S1_GET_AGENTS,
AGENT_FIELDS,
args.api_key,
{"siteIds": args.site_ids.split(","), "limit": LIMIT},
)
]
agent_count = len(agents)
print(f"found {agent_count} agents")
fields = AGENT_FIELDS + SOFTWARE_FIELDS
with open(args.output_file, "w", newline="", encoding="utf-8") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fields)
writer.writeheader()
i = 1
for agent in agents:
print(
f"getting software for system {i}/{agent_count} ("
+ agent["computerName"]
+ ", "
+ agent["id"]
+ ")"
)
for line in result_generator(
args.url + S1_GET_SOFTWARE,
SOFTWARE_FIELDS,
args.api_key,
{"ids": [agent["id"]]},
):
writer.writerow({**agent, **line})
print("done")
i += 1
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment