Skip to content

Instantly share code, notes, and snippets.

@mattfysh
mattfysh / resource.ts
Last active May 19, 2023 05:47
Pulumi auto-hierarchy
export abstract class ProjectResource extends pulumi.ComponentResource {
protected abstract name: string
protected addResource<RT, ResourceArgs>(
name: string,
Resource: new (
name: string,
args: ResourceArgs,
opts: pulumi.CustomResourceOptions
) => RT,
@mattfysh
mattfysh / flow.py
Last active April 6, 2023 05:38
Roast My Flow
import hashlib
from datetime import datetime, timedelta
from prefect import flow, task
from prefect.blocks.system import JSON
from prefect.task_runners import SequentialTaskRunner
import pandas as pd
import requests
from deltalake.writer import write_deltalake
@mattfysh
mattfysh / ci.yaml
Last active January 20, 2023 05:14
A github workflow for NX, inspired by remix run's indie stack
name: 🤖 CI
on:
push:
branches:
- main
pull_request:
jobs:
lint:
name: 🏖️ Lint
@mattfysh
mattfysh / index.js
Created December 17, 2022 04:19
Stitch
const { makeExecutableSchema } = require('@graphql-tools/schema')
const { stitchSchemas } = require('@graphql-tools/stitch')
const { RenameObjectFields, FilterRootFields, PruneSchema, wrapSchema } = require('@graphql-tools/wrap')
const { printSchema, parse, execute, print } = require('graphql')
/// SERVICE 1
const source = /* GraphQL */ `
type Query {
foo(key: String!): Foo
// const { makeExecutableSchema } = require('@graphql-tools/schema')
const { stitchSchemas } = require('@graphql-tools/stitch')
const { execute, parse, printSchema, print, buildSchema } = require('graphql')
const aSchema = buildSchema(/* GraphQL */ `
type Thing {
id: ID!
foo: String
}
type Query {
const { visit, visitWithTypeInfo, TypeInfo, Kind } = require('graphql')
const { MapperKind, mapSchema, visitData } = require('@graphql-tools/utils')
const { GraphQLObjectType, GraphQLID } = require('graphql')
class Transform {
constructor(groupTypes, typeIdFields) {
this.groupTypes = groupTypes
this.typeIdFields = typeIdFields
}
  • Running on Flink v1.13.2
  • as a AWS Kinesis Data Analytics application
import zlib
from datetime import datetime
import dpkt
def get_tcpip_frame(buf):
# standard pcap uses ethernet root layer
eth = dpkt.ethernet.Ethernet(buf)
if isinstance(eth.data, dpkt.ip6.IP6):
# print('Unexpected: IPv6 protocol')
@mattfysh
mattfysh / noob.py
Last active September 8, 2022 22:05
Reading dynamodb kinesis stream into databricks delta live table
import dlt
from pyspark.sql.functions import from_json, col, expr, transform_values, transform, coalesce
from pyspark.sql.types import *
nf_cdc_schema = StructType([
StructField('eventName', StringType()),
StructField('dynamodb', StructType([
StructField('ApproximateCreationDateTime', LongType()),
StructField('NewImage', StructType([
StructField('service', StructType([
version: '3.8'
services:
api:
build:
# ...snip...
awscollector:
image: public.ecr.aws/aws-observability/aws-otel-collector
ports:
- '4317:4317'