Skip to content

Instantly share code, notes, and snippets.

@thecraftman
Created June 20, 2023 13:27
Show Gist options
  • Save thecraftman/8cc15e861282cb4d45daaf12d4ef12ea to your computer and use it in GitHub Desktop.
Save thecraftman/8cc15e861282cb4d45daaf12d4ef12ea to your computer and use it in GitHub Desktop.
logs to Axiom
import boto3
import requests
import os
import json
from urllib.parse import unquote_plus
s3_client = boto3.client('s3')
def lambda_handler(event, context):
# Retrieve bucket name and file_key from the S3 event
bucket = event['Records'][0]['s3']['bucket']['name']
key = unquote_plus(event['Records'][0]['s3']['object']['key'])
# Get the log file from S3
response = s3_client.get_object(Bucket=bucket, Key=key)
# Read the content of the file
file_content = response['Body'].read().decode('utf-8')
logs = process_logs(file_content)
# Send logs to Axiom
send_to_axiom(logs)
def process_logs(file_content):
# Process the log file content and return the processed logs.
# Here you should implement your own log processing logic
logs = file_content.split('\n')
return logs
def send_to_axiom(logs):
AXIOM_API_KEY = os.environ['AXIOM_API_KEY'] # API Key for Axiom
AXIOM_DATASET = os.environ['AXIOM_DATASET'] # Dataset name for Axiom
headers = {
"X-Axiom-Team": Axiom_API_KEY,
"Content-Type": "application/json"
}
for log in logs:
# Here we're sending each log entry one by one. Depending on your setup and log size,
# you might want to send them in batches.
response = requests.post(
f"https://api.axiom.co/v1/datasets/{AXIOM_DATASET}/ingest",
headers=headers,
data=json.dumps({"data": log})
)
if response.status_code != 200:
print(f"Failed to send log to Axiom. Status: {response.status_code}. Log: {log}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment