Skip to content

Instantly share code, notes, and snippets.

What would you like to do?
grab infoblox networks via wapi
import datetime
import requests
import csv
import json
url = 'https://infobloxhostname-or-ip/wapi/v2.0/' # API URL to query. Make sure API access is enabled on this host.
id = '' # API enabled credentials.
pwd = ''
response = requests.get(url + 'network', auth=(id, pwd), verify=False)
json_data = json.loads(response.text)
data = []
header='Network,Site,Vlan,Description\n' # Update based on what you put in your infoblox networks' description text
for item in json_data:
description = item['comment'].split(' - ') # ' - ' is the delimiter I use in infoblox networks' description text
data.append([item['network'], ",".join(str(x) for x in description)])
with open('networks.csv', 'w') as writefile:
for item in data:
# Schedule this via cron
# EX: #*/5 * * * 1-5 /home/user/ >> /home/user/cron.log 2>&1
python3 /home/user/ #replace with path to
cp /home/user/networks.csv /opt/splunk/etc/apps/search/lookups/ # ensure path is correct to splunk home
# chown splunk:splunk /opt/splunk/etc/apps/search/lookups/networks.csv # Should execute as splunk user
batch_index_query = 0
case_sensitive_match = 1
default_match = NONE
filename = networks.csv
match_type = CIDR(Network)
max_matches = 1
min_matches = 1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.