Skip to content

Instantly share code, notes, and snippets.

View pietheinstrengholt's full-sized avatar

Piethein Strengholt pietheinstrengholt

View GitHub Profile
@pietheinstrengholt
pietheinstrengholt / .env
Last active August 29, 2023 14:48
Azure OpenAI demo using azure-sdk-for-js
OPENAI_API_HOST=https://endpointname.openai.azure.com/
OPENAI_API_KEY=xxxxxxxxxxxxxxxxxxxxxx
AZURE_DEPLOYMENT_ID=text-davinci-003
# sep/16/2022 12:50:26 by RouterOS 7.5
# software id = 79VK-VRAH
#
# model = RB5009UPr+S+
# serial number = HCY08E7SYTZ
/interface bridge
add name=bridge-local
/interface vlan
add interface=ether1 name=vlan1.4 vlan-id=4
add interface=ether1 loop-protect=off name=vlan1.6 vlan-id=6
# sep/15/2022 08:47:33 by RouterOS 7.5
# software id = 79VK-VRAH
#
# model = RB5009UPr+S+
# serial number = HCY08E7SYTZ
/interface bridge
add name=bridge-local
/interface vlan
add interface=ether1 name=vlan1.4 vlan-id=4
add interface=ether1 loop-protect=off name=vlan1.6 vlan-id=6
# Set arguments
SourceSystemName = "AdventureWorks"
FlowName = "SalesLTAddress"
SourceStorageAccount = "synapsepiethein"
SourceContainer = "synapsedata"
SourcePath = "/landingzone/AdventureWorks/"
TargetStorageAccount = "synapsepiethein"
TargetContainer = "synapsedata"
TargetPath = "/processedzone/AdventureWorks"
SinkOperation = "merge"
var Kafka = require('node-rdkafka');
var producer = new Kafka.Producer({
//'debug' : 'all',
'metadata.broker.list': 'atlas-004133bc-3c87-4862-bf9d-b0ea6ae351f5.servicebus.windows.net:9093', //REPLACE
'dr_cb': true, //delivery report callback
'security.protocol': 'SASL_SSL',
'sasl.mechanisms': 'PLAIN',
'sasl.username': '$ConnectionString', //do not replace $ConnectionString
'sasl.password': 'Endpoint=sb://atlas-004133bc-3c87-4862-bf9d-b0ea6ae351f5.servicebus.windows.net/;SharedAccessKeyName=AlternateSharedAccessKey;SharedAccessKey=WrIVbXQnYutxKXsvmfP+Wz4G4OLKHjDtuKH&6=' //REPLACE
var Transform = require('stream').Transform;
var Kafka = require('node-rdkafka'); //See more info: https://github.com/Blizzard/node-rdkafka
var consumer = new Kafka.KafkaConsumer({
//'debug' : 'all',
'metadata.broker.list': 'atlas-004133bc-3c87-4862-bf9d-b0ea6ae351f5.servicebus.windows.net:9093', //REPLACE
'group.id': 'nodejs-cg', //The default consumer group for EventHubs is $Default
'socket.keepalive.enable': true,
'enable.auto.commit': false,
'security.protocol': 'SASL_SSL',
=== INSTALL SQUID ===
sudo apt install squid
sudo systemctl status squid
sudo cp /etc/squid/squid.conf{,.orginal}
sudo ufw allow 'Squid'
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# Set arguments
dfDataOriginalPath = "/processedzone/"
dfDataChangedPath = "/changedzone/"
cw_database = "AdventureWorks"
{
"info": {
"_postman_id": "956b4730-2042-49dd-9f39-b65c30c5b192",
"name": "Purview Demo",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
},
"item": [
{
"name": "Authenticate Purview",
"request": {
# Set arguments
dfDataOriginalPath = "/processedzone/"
dfDataChangedPath = "/changedzone/"
cw_database = "AdventureWorks"
cw_table = "SalesLTAddress"
tenant_id = "xxxxx-xxxx-xxxx-xxxx-xxxxxxx"
client_id = "xxxxx-xxxx-xxxx-xxxx-xxxxxxx"
client_secret = "xxxxxxxxxxxxxxxx"
purview_account = "purview_account"