Skip to content

Instantly share code, notes, and snippets.

from kfp.v2 import dsl, compiler
import configparser
from google.cloud import aiplatform
@dsl.component(
base_image="python:3.9",
packages_to_install=[
"pandas==1.3.5",
"gcsfs==2023.1.0"
],
@PennyQ
PennyQ / airflow-callback-slack.py
Created August 24, 2022 08:35
python code for airflow callback on slack
import logging
from airflow.hooks.base import BaseHook
from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook
SLACK_CONN_ID = "pco_slack_alerts"
logger = logging.getLogger("airflow.task")
def send_fail_slack_alert(context):
apiVersion: apps/v1
kind: Deployment
metadata:
name: prbot
spec:
replicas: 1
selector:
matchLabels:
app: prbot
template:
import logging
try:
from opencensus.ext.azure.log_exporter import AzureLogHandler
except:
raise ImportError("To be able to use add_azure_log_handler opencensus must be installed.\
Specifically opencensus-ext-azure==1.0.8")
# ----set up application insights connection----
instrumentation_key= "your_instrumentation_key"
from kafka import KafkaConsumer
from json import loads
consumer = KafkaConsumer(
'numtest',
bootstrap_servers=['localhost:9092'],
auto_offset_reset='earliest',
enable_auto_commit=True,
group_id='my-group',
value_deserializer=lambda x: loads(x.decode('utf-8')))
from time import sleep
from json import dumps
from kafka import KafkaProducer
producer = KafkaProducer(bootstrap_servers=['localhost:9092'], value_serializer=lambda x:dumps(x).encode('utf-8'))
for e in range(10):
data = {'number' : e}
producer.send('numtest', value=data)
print("produces %d"%e)
name: project_name
trigger:
branches:
include:
- master
- develop
# User defined parameters
variables:
entry_points:
score:
parameters:
run_id: string
workspace: string
command: "python model/score.py --run_id {run_id} --workspace {workspace}"
import pandas as pd
import argparse
# Parse the run id from the passed argument, the run id is get from each MLflow experiment run (within the URL)
parser = argparse.ArgumentParser()
parser.add_argument('--run_id', dest='run_id')
args = parser.parse_args()
# generate predict results for each model
for i, model_key in enumerate(model_keys):
import mlflow
import pickle
from sklearn.linear_model import LogisticRegression, LinearRegression
def gen_result(model_key):
"""
Summary: trained model result
Input: