Skip to content

Instantly share code, notes, and snippets.

View pietheinstrengholt's full-sized avatar

Piethein Strengholt pietheinstrengholt

View GitHub Profile
# sep/16/2022 12:50:26 by RouterOS 7.5
# software id = 79VK-VRAH
#
# model = RB5009UPr+S+
# serial number = HCY08E7SYTZ
/interface bridge
add name=bridge-local
/interface vlan
add interface=ether1 name=vlan1.4 vlan-id=4
add interface=ether1 loop-protect=off name=vlan1.6 vlan-id=6
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@pietheinstrengholt
pietheinstrengholt / .env
Last active August 29, 2023 14:48
Azure OpenAI demo using azure-sdk-for-js
OPENAI_API_HOST=https://endpointname.openai.azure.com/
OPENAI_API_KEY=xxxxxxxxxxxxxxxxxxxxxx
AZURE_DEPLOYMENT_ID=text-davinci-003
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# Set arguments
dfDataOriginalPath = "/processedzone/"
dfDataChangedPath = "/changedzone/"
cw_database = "AdventureWorks"
# sep/15/2022 08:47:33 by RouterOS 7.5
# software id = 79VK-VRAH
#
# model = RB5009UPr+S+
# serial number = HCY08E7SYTZ
/interface bridge
add name=bridge-local
/interface vlan
add interface=ether1 name=vlan1.4 vlan-id=4
add interface=ether1 loop-protect=off name=vlan1.6 vlan-id=6
# Set arguments
SourceSystemName = "AdventureWorks"
FlowName = "SalesLTAddress"
SourceStorageAccount = "synapsepiethein"
SourceContainer = "synapsedata"
SourcePath = "/landingzone/AdventureWorks/"
TargetStorageAccount = "synapsepiethein"
TargetContainer = "synapsedata"
TargetPath = "/processedzone/AdventureWorks"
SinkOperation = "merge"
# Prepare for merge, rename columns of newly loaded data, append 'src_'
from pyspark.sql import functions as F
# Rename all columns in dataChanged, prepend src_, and add additional columns
df_new = dataChanged.select([F.col(c).alias("src_"+c) for c in dataChanged.columns])
src_columnNames = df_new.schema.names
df_new2 = df_new.withColumn('src_current', lit(True)).withColumn('src_effectiveDate', lit(current_date)).withColumn('src_endDate', lit(date(9999, 12, 31)))
df_new2.printSchema()
import hashlib
var Kafka = require('node-rdkafka');
var producer = new Kafka.Producer({
//'debug' : 'all',
'metadata.broker.list': 'atlas-004133bc-3c87-4862-bf9d-b0ea6ae351f5.servicebus.windows.net:9093', //REPLACE
'dr_cb': true, //delivery report callback
'security.protocol': 'SASL_SSL',
'sasl.mechanisms': 'PLAIN',
'sasl.username': '$ConnectionString', //do not replace $ConnectionString
'sasl.password': 'Endpoint=sb://atlas-004133bc-3c87-4862-bf9d-b0ea6ae351f5.servicebus.windows.net/;SharedAccessKeyName=AlternateSharedAccessKey;SharedAccessKey=WrIVbXQnYutxKXsvmfP+Wz4G4OLKHjDtuKH&6=' //REPLACE