from pyspark.sql import SparkSession
from pyspark.sql.functions import from_unixtime, unix_timestamp
# Initialize Spark session
spark = SparkSession.builder.appName("date_format_conversion").getOrCreate()
# Example data
for i in {"04eb34f2-3778-46e7-a1f5-391291a2bc6c","6af03cc1-8262-4e81-9fc0-bd6131b23996","7eecf3f5-67f0-4792-a9c2-c9f60ff5a96c","c5a71e40-4636-41bb-8232-b8b00fe470fb","026520ce-c449-43e1-ac4f-215db4827af0"}; do az ad sp show --id api://${i} --verbose ; done
from pyspark.dbutils import DBUtils
import pyspark.sql.functions as F
import pyspark
import pandas as pd
from itertools import chain
from functools import reduce
# mapping Walgreens status to RX-Lighting status and substatus
from pyspark.dbutils import DBUtils
import pyspark.sql.functions as F
import pyspark
import pandas as pd
from itertools import chain
from functools import reduce
List all files in container ds-tmartch-output
in storage account prodfixdseus2tmartchsa01
path = "abfss://ds-tmartch-output@prodfixdseus2tmartchsa01.dfs.core.windows.net/acc-activation"
dbutils.fs.ls(path)
The result looks like this
import yaml
from pyspark.dbutils import DBUtils
import pyspark.sql.functions as F
import time
def get_dir_content(path):
!pip install pyyaml
import yaml
from pyspark.dbutils import DBUtils
import pyspark.sql.functions as F
import time
NewerOlder