Skip to content

Instantly share code, notes, and snippets.

@mattfysh
mattfysh / ci.yaml
Last active January 20, 2023 05:14
A github workflow for NX, inspired by remix run's indie stack
name: 🤖 CI
on:
push:
branches:
- main
pull_request:
jobs:
lint:
name: 🏖️ Lint
@mattfysh
mattfysh / index.js
Created December 17, 2022 04:19
Stitch
const { makeExecutableSchema } = require('@graphql-tools/schema')
const { stitchSchemas } = require('@graphql-tools/stitch')
const { RenameObjectFields, FilterRootFields, PruneSchema, wrapSchema } = require('@graphql-tools/wrap')
const { printSchema, parse, execute, print } = require('graphql')
/// SERVICE 1
const source = /* GraphQL */ `
type Query {
foo(key: String!): Foo
// const { makeExecutableSchema } = require('@graphql-tools/schema')
const { stitchSchemas } = require('@graphql-tools/stitch')
const { execute, parse, printSchema, print, buildSchema } = require('graphql')
const aSchema = buildSchema(/* GraphQL */ `
type Thing {
id: ID!
foo: String
}
type Query {
const { visit, visitWithTypeInfo, TypeInfo, Kind } = require('graphql')
const { MapperKind, mapSchema, visitData } = require('@graphql-tools/utils')
const { GraphQLObjectType, GraphQLID } = require('graphql')
class Transform {
constructor(groupTypes, typeIdFields) {
this.groupTypes = groupTypes
this.typeIdFields = typeIdFields
}
  • Running on Flink v1.13.2
  • as a AWS Kinesis Data Analytics application
@mattfysh
mattfysh / example.ts
Created October 8, 2022 01:44
xpath compatibility with parse5 and htmlparser2 tree
import { parse } from 'parse5'
import { adapter } from 'parse5-htmlparser2-tree-adapter'
import xpath from 'xpath'
const html = '...'
const selector = '...'
const doc = parse(html, { treeAdapter: adapter })
const htmlEl = doc.childNodes.find(c => c.name === 'html')
import zlib
from datetime import datetime
import dpkt
def get_tcpip_frame(buf):
# standard pcap uses ethernet root layer
eth = dpkt.ethernet.Ethernet(buf)
if isinstance(eth.data, dpkt.ip6.IP6):
# print('Unexpected: IPv6 protocol')
@mattfysh
mattfysh / noob.py
Last active September 8, 2022 22:05
Reading dynamodb kinesis stream into databricks delta live table
import dlt
from pyspark.sql.functions import from_json, col, expr, transform_values, transform, coalesce
from pyspark.sql.types import *
nf_cdc_schema = StructType([
StructField('eventName', StringType()),
StructField('dynamodb', StructType([
StructField('ApproximateCreationDateTime', LongType()),
StructField('NewImage', StructType([
StructField('service', StructType([
version: '3.8'
services:
api:
build:
# ...snip...
awscollector:
image: public.ecr.aws/aws-observability/aws-otel-collector
ports:
- '4317:4317'
const opentelemetry = require('@opentelemetry/sdk-node')
// const { getNodeAutoInstrumentations } = require('@opentelemetry/auto-instrumentations-node')
const { DnsInstrumentation } = require('@opentelemetry/instrumentation-dns')
const { AwsLambdaInstrumentation } = require('@opentelemetry/instrumentation-aws-lambda')
const { AWSXRayIdGenerator } = require('@opentelemetry/id-generator-aws-xray')
const { AWSXRayPropagator } = require('@opentelemetry/propagator-aws-xray')
// const { OTLPTraceExporter } = require('@opentelemetry/exporter-trace-otlp-http')
const { OTLPTraceExporter } = require('@opentelemetry/exporter-otlp-proto')
const CONSOLE_DEBUG = false