Last active
July 17, 2024 15:19
-
-
Save JonathanGarro/750f905007b6485d073393531c403c3c to your computer and use it in GitHub Desktop.
Downloads all learning data from the GO API
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Given the paginated results, this script can take a few minutes to run. Be patient! | |
import requests | |
import csv | |
base_url = "https://goadmin.ifrc.org/api/v2/ops-learning/" | |
def fetch_data(url): | |
""" | |
Fetches data from the URL passed into it. | |
Parameters: | |
url (str): The URL to fetch data from. | |
Returns: | |
dict: The JSON response from the URL if the request is successful. | |
None: If the request fails, returns None. | |
""" | |
response = requests.get(url) | |
if response.status_code == 200: | |
return response.json() | |
else: | |
print(f"Failed to retrieve data. HTTP Status code: {response.status_code}") | |
return None | |
def save_to_csv(data, filename="ops_learning_data.csv"): | |
""" | |
Exports the data to a CSV file. | |
Parameters: | |
data (list of dict): The data to be written to the CSV file. | |
filename (str): The name of the CSV file. Defaults to "ops_learning_data.csv". | |
Returns: | |
None | |
""" | |
if data: | |
keys = data[0].keys() | |
with open(filename, "w", newline='', encoding='utf-8') as output_file: | |
dict_writer = csv.DictWriter(output_file, fieldnames=keys) | |
dict_writer.writeheader() | |
dict_writer.writerows(data) | |
print(f"Success! Data saved to: {filename}") | |
else: | |
print("There's no data to save.") | |
def process_learnings(): | |
""" | |
Processes the learnings records by fetching data from the base URL and saves it to CSV. | |
Handles pagination of the GO API by looking for 'next' keys. | |
Returns: | |
None | |
""" | |
url = base_url | |
all_results = [] | |
while url: | |
data = fetch_data(url) | |
if data: | |
all_results.extend(data['results']) | |
url = data.get('next') | |
else: | |
break | |
if all_results: | |
save_to_csv(all_results) | |
else: | |
print("Whoops - couldn't get data.") | |
if __name__ == "__main__": | |
process_learnings() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment