Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save tarsil/20b580e0b22134b6ea00639a6c8a68c2 to your computer and use it in GitHub Desktop.
Save tarsil/20b580e0b22134b6ea00639a6c8a68c2 to your computer and use it in GitHub Desktop.
By using the Covalent APIs, given an NFT contract and chain id, fetch all token ids and prepare an array of urls that call Get External NFT Metadata to be passed into the body for the batch query endpoint.
import requests
import time
import urllib3
def check_internet(url):
try:
req = urllib3.PoolManager().request("GET", url)
if str(req.status) != "500":
if str(req.status) != "404":
print(req.status)
return True
else:
print(req.status)
return False
except urllib3.exceptions.HTTPError as err:
print(err)
return False
contract_address = "0x60f80121c31a0d46b5279700f9df786054aa5ee5"
chain_id = 1
API_KEY = "ckey_covalent_scraper"
page_number = 0
page_size = 200_000
get_token_ids_url = "https://api.covalenthq.com/v1/" + str(chain_id) + "/tokens/" + contract_address + "/nft_token_ids/"
has_more = True
start = time.time()
while has_more:
conn = check_internet("https://api.covalenthq.com/")
if conn is True:
try:
start_request_time = time.time()
payload = {
"key": API_KEY,
"page-size": page_size,
"page-number": page_number,
"block-signed-at-asc": True
}
print("Getting page: " + str(page_number) + " from: " + get_token_ids_url + "....")
raw_data = requests.get(url=get_token_ids_url, params=payload)
data_json = raw_data.json()
string_comma = ","
string_builder = "["
for item in data_json["data"]["items"]:
token_id = item["token_id"]
string_builder += "\"https://api.covalenthq.com/v1/" + str(chain_id) + "/tokens/" + contract_address + "/nft_metadata/" + str(token_id) + "/\""
string_builder += string_comma
string_builder = string_builder[:-1]
string_builder += "]"
print(string_builder)
file_name = contract_address + "_" + str(page_number) + "_" + str(page_size) + "_token_ids.json"
f = open(file_name, "w")
f.write(string_builder)
f.close()
print("Wrote: " + file_name)
has_more = bool(data_json["data"]["pagination"]["has_more"])
except:
print("ERR: \n" + string_builder)
page_number = page_number - 1
time.sleep(20)
print("Has more: " + str(has_more))
page_number = page_number + 1
end = time.time()
elapsed_time = end - start
request_time = end - start_request_time
print("Request time: " + str(request_time))
print("Elapsed time: " + str(elapsed_time))
else:
time.sleep(30)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment