Skip to content

Instantly share code, notes, and snippets.

View hsuyuming's full-sized avatar

hsuyuming hsuyuming

View GitHub Profile
CREATE TABLE dbo.TEST
(
ID INT NOT NULL,
COL1 VARCHAR(255) NOT NULL,
COL2 VARCHAR(255) NOT NULL,
COL3 VARCHAR(255) NOT NULL,
COL4 VARCHAR(255) NOT NULL,
COL5 VARCHAR(255) NOT NULL,
StartTime datetime,
EndTime datetime
GO
EXEC sys.sp_cdc_enable_db
GO
USE DEMO
GO
SELECT [name], database_id, is_cdc_enabled
FROM sys.databases
GO
docker exec -it sql1 `/opt/mssql/bin/mssql-conf set sqlagent.enabled true`
docker restart sql1
import org.apache.spark.sql.SparkSession
import net.snowflake.spark.snowflake.Utils.SNOWFLAKE_SOURCE_NAME
val sparkSession = SparkSession.builder.master("local[*]").appName("spark session example").getOrCreate()
var sfOptions = Map ("sfUrl"->"<host>","sfAccount"->"<account>","sfUser"->"ABEHSU","sfPassword"->"<pwd>","sfDatabase"->"SPARK_TEST")
val df = sparkSession.read.format(SNOWFLAKE_SOURCE_NAME).options(sfOptions).option("query","SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCDS_SF10TCL.CUSTOMER_ADDRESS;").load()
df.show
import net.snowflake.spark.snowflake.Utils.SNOWFLAKE_SOURCE_NAME
import org.apache.spark.sql.SparkSession
val sparkSession = SparkSession.builder.master("local[*]").appName("spark session example").getOrCreate()
var sfOptions = Map ("sfUrl"->"<host>","sfAccount"->"<account>","sfUser"->"SPARK","sfPassword"->"<pwd>","sfDatabase"->"CREATE_STAGE_ONLY","sfRole"->"SPARK","sfSchema"->"TEST")
val df = sparkSession.read.format(SNOWFLAKE_SOURCE_NAME).options(sfOptions).option("query","SELECT * FROM SPARK_PERMISSION_TEST.PUBLIC.CUSTOMER;").load()
df.show
CREATE DATABASE CREATE_STAGE_ONLY;
CREATE SCHEMA "CREATE_STAGE_ONLY"."TEST";
GRANT USAGE ON DATABASE CREATE_STAGE_ONLY TO ROLE SPARK;
GRANT USAGE ON SCHEMA CREATE_STAGE_ONLY.TEST TO ROLE SPARK;
GRANT CREATE STAGE ON SCHEMA CREATE_STAGE_ONLY.TEST TO ROLE SPARK;
import net.snowflake.spark.snowflake.Utils.SNOWFLAKE_SOURCE_NAME
import org.apache.spark.sql.SparkSession
val sparkSession = SparkSession.builder.master("local[*]").appName("spark session example").getOrCreate()
var sfOptions = Map ("sfUrl"->"<hostname>","sfAccount"->"<acount>","sfUser"->"<username>","sfPassword"->"<password>","sfDatabase"->"<database>","sfSchema"->"<schema>")
df = sparkSession.read.format(SNOWFLAKE_SOURCE_NAME).options(sfOptions).option("query","SELECT * FROM SPARK_PERMISSION_TEST.PUBLI;").load()
df.show
GRANT USAGE ON DATABASE SPARK_PERMISSION_TEST TO ROLE SPARK;
GRANT USAGE ON SCHEMA SPARK_PERMISSION_TEST.PUBLIC TO ROLE SPARK;
GRANT SELECT ON TABLE SPARK_PERMISSION_TEST.PUBLIC.CUSTOMER TO ROLE SPARK;
GRANT USAGE ON DATABASE SPARK_PERMISSION_TEST TO ROLE SPARK;
GRANT USAGE ON SCHEMA SPARK_PERMISSION_TEST.PUBLIC TO ROLE SPARK;
GRANT SELECT ON TABLE SPARK_PERMISSION_TEST.PUBLIC.CUSTOMER TO ROLE SPARK;