-- Create DB
CREATE DATABASE raki_pitr_test
CREATE TABLE table1 (ID int, value nvarchar(10))
GO
INSERT INTO table1 VALUES (1, 'demo1')
INSERT INTO table1 VALUES (2, 'demo2')
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Import widgets for Event Hub (i.e. topic) name | |
# Set default | |
dbutils.widgets.text("topic", "your--topic--name") | |
topic = dbutils.widgets.get("topic") | |
# Import required modules | |
from pyspark.sql import * | |
from pyspark.sql.functions import * | |
from pyspark.sql.types import * |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
### Azure Data Factory info | |
$SubscriptionName = "Azure Subscription Name" | |
$ResourceGroupName = "Resource Group Name containing Data Factory" | |
$DataFactoryName = "Data Factory Name" | |
$AzureSSISName = "SSIS IR Name that will appear in Portal" | |
$AzureSSISDescription = "E.g. Testing SSIS IR Deployment through PowerShell" | |
$AzureSSISLocation = "CanadaCentral" | |
$AzureSSISNodeSize = "Standard_D8_v3" | |
$AzureSSISNodeNumber = 4 | |
$AzureSSISEdition = "Standard" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import json | |
from termcolor import colored | |
def aadtoken(client_id, client_secret, client_tenant): | |
url = "https://login.microsoftonline.com/{}/oauth2/token".format(client_tenant) | |
payload='grant_type=client_credentials&client_id={}&client_secret={}&resource=https%3A%2F%2Fpurview.azure.net'.format(client_id, client_secret) | |
response = requests.request("POST", url, data=payload) | |
return json.loads(response.text)['access_token'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import json | |
from termcolor import colored | |
def aadtoken(client_id, client_secret, client_tenant): | |
url = "https://login.microsoftonline.com/{}/oauth2/token".format(client_tenant) | |
payload='grant_type=client_credentials&client_id={}&client_secret={}&resource=https%3A%2F%2Fpurview.azure.net'.format(client_id, client_secret) | |
response = requests.request("POST", url, data=payload) | |
return json.loads(response.text)['access_token'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"entities": [ | |
{ | |
"attributes": { | |
"name": "aemigration.database.windows.net", | |
"qualifiedName": "mssql://aemigration.database.windows.net" | |
}, | |
"collectionId": "aia-purview-new", | |
"status": "ACTIVE", | |
"typeName": "azure_sql_server" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
network: | |
ethernets: | |
eth0: | |
dhcp4: true | |
dhcp4-overrides: | |
route-metric: 100 | |
dhcp6: false | |
addresses: [172.20.0.0/21] | |
nameservers: | |
addresses: [172.20.0.4, 168.63.129.16] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Kafka Topic ➡ Delta tables: `scene` via Spark Streaming | |
// `SOURCE`: Connect to Kafka topic as a streaming Dataframe: `raw_DF` | |
import org.apache.spark.sql.types._ | |
import org.apache.spark.sql.functions._ | |
import org.apache.spark.sql._ | |
import org.apache.spark.sql.types.StructType | |
import org.apache.spark.sql.functions.from_json | |
// Pull from Key Vault for non-sandbox |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
URL="http://localhost:9200" | |
from=20 | |
to=30 | |
for i in `seq $from $to` | |
do | |
# Delete index if older than i days | |
DATE=`date -d "$dataset_date - $i days" +%Y.%m.%d` | |
echo "Deleting day: $DATE" | |
curl -XDELETE "$URL/logstash-$DATE" # Comment out to see what range is deleted | |
done |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# the Namespace and ServiceAccount name that is used for the config | |
namespace=arc | |
serviceAccount=arcOnboard | |
###################### | |
# actual script starts | |
set -o errexit | |
secretName=$(kubectl --namespace $namespace get serviceAccount $serviceAccount -o jsonpath='{.secrets[0].name}') | |
ca=$(kubectl --namespace $namespace get secret/$secretName -o jsonpath='{.data.ca\.crt}') |
OlderNewer