Go to any directory and clone repo with sources.
cd ~
git clone https://github.com/certbot/certbot
def main(data, context, dataset='test_dataset_dev', table='beer'): | |
print('Event ID: {}'.format(context.event_id)) | |
print('Event type: {}'.format(context.event_type)) | |
print('Importing required modules.') | |
from google.cloud import bigquery | |
print('This is the data: {}'.format(data)) | |
input_bucket_name = data['bucket'] |
class ItemStream(FindMyStream): | |
name = "Item" | |
primary_keys = ["name", "timestamp"] | |
schema = th.PropertiesList( | |
th.Property("name", th.StringType), | |
th.Property("location", th.ObjectType( | |
th.Property("longitude", th.NumberType()), | |
th.Property("latitude", th.NumberType()), | |
th.Property("altitude", th.NumberType()), | |
th.Property("timeStamp", th.NumberType()), |
extractors: | |
- name: tap-findmy | |
namespace: tap_findmy | |
pip_url: -e /Dev/findmyairtag/tap-findmy | |
capabilities: | |
- state | |
- catalog | |
- discover | |
settings: | |
- name: item_name |
{{ | |
config( | |
materialized='table' | |
) | |
}} | |
with raw as ( | |
select | |
name as name, | |
timestamp as ts, |
{{ | |
config( | |
materialized='table' | |
) | |
}} | |
select | |
latitude, | |
longitude, |
import json | |
from kafka import KafkaProducer | |
from sseclient import SSEClient as EventSource | |
def produce_events_from_url(url: str, topic: str) -> None: | |
for event in EventSource(url): | |
if event.event == "message": | |
try: |
WITH jsonified_source AS ( | |
SELECT | |
(data ->> 'title') :: string as title, | |
(data ->> '$schema') :: string as schema, | |
(data ->> 'type') :: string as type, | |
(data ->> 'bot') :: boolean as bot, | |
(data ->> 'comment') :: string as comment, | |
(data ->> 'id') :: integer as id, | |
(data ->> 'length') :: jsonb as length, | |
(data ->> 'log_action') :: string as log_action, |
{{ config(materialized='materializedview') }} | |
select | |
server_name, | |
count(id) | |
from {{ ref('stg_event_changes') }} | |
WHERE mz_logical_timestamp() >= timestamp * 1000 | |
AND mz_logical_timestamp() < timestamp * 1000 + 3600000 | |
group by server_name | |
order by count desc |
def new_messages(interval="1m"): | |
results = engine.execute(f"SELECT count(*) FROM changes_by_server_{interval}") | |
return None if results.fetchone()[0] == 0 else True | |
async def event_generator(interval: str): | |
if new_messages(interval=interval): | |
print(f"New messages in {interval}") | |
connection = engine.raw_connection() | |
with connection.cursor() as cur: |