from pyspark.dbutils import DBUtils
dbutils.secrets.listScopes()
the ouput :
Out[16]: [SecretScope(name='mleng-secrets')]
dbutils.secrets.list("mleng-secrets")
the ouput :
Out[11]: [SecretMetadata(key='ado-token'),
SecretMetadata(key='aks-token'),
SecretMetadata(key='ARTIFACT-PAT'),
SecretMetadata(key='azp-token'),
SecretMetadata(key='dns-api-key'),
SecretMetadata(key='dockercfgjson'),
SecretMetadata(key='dockerconfigjson'),
SecretMetadata(key='p-prodfix-ds-foo-svcp-client-id'),
SecretMetadata(key='p-prodfix-ds-foo-svcp-secret'),
SecretMetadata(key='p-prodfix-ds-servicebus-svcp-client-id'),
SecretMetadata(key='p-prodfix-ds-servicebus-svcp-secret'),
SecretMetadata(key='p-prodfix-ds-tmleng-svcp-client-id'),
SecretMetadata(key='p-prodfix-ds-tmleng-svcp-secret'),
SecretMetadata(key='p-prodfix-ds-tmleng-svcp-tenant-id'),
SecretMetadata(key='prodfix-devops-tdeploy'),
SecretMetadata(key='prodfix-ds-tmleng-eastus2-app-insights-01'),
SecretMetadata(key='prodfix-ds-tmleng-eastus2-app-insights-01-cxn'),
SecretMetadata(key='prodfix-ds-tmleng-eastus2-app-insights-01-key'),
SecretMetadata(key='prodfix-ds-tmleng-eastus2-databricks-01Token'),
SecretMetadata(key='prodfix-mleng-service-bus-connection-string'),
SecretMetadata(key='prodfix-mleng-service-bus-key')]
dbutils.widgets.removeAll()
# Changed per environment
dbutils.widgets.text("env", "prodfix")
env = dbutils.widgets.get("env")
# values are p for project, and t for team, if you don't know it's probably p
dbutils.widgets.text("worktype", "t")
workspace_type = dbutils.widgets.get("worktype")
dbutils.widgets.text("propertyFilePath", "/dtrans")
property_file_path = dbutils.widgets.get("propertyFilePath")
dbutils.widgets.text("short_code", "dtrans")
short_code = dbutils.widgets.get("short_code")
sp_name = f"p-{env}-ds-{workspace_type}{short_code}-svcp"
secret_scope = f"{short_code}-secrets"
client_id_name = f"{sp_name}-client-id"
client_secret_name = f"{sp_name}-secret"
When we do dbutils.secrets.get(scope=secret_scope, key=client_id_name)
,it only shows REDACTED
. The reason is to protect credential, databricks redacts these secrets. The secrets won't be visible in the notebook by design.
In order to show the secret, there is a workaround.
client_secrets = dbutils.secrets.get(scope=secret_scope, key=client_id_name)
for value in client_secrets :
print(value , end=" ")
the output is like this
6 w e t c e t 4 - 2 e 5 r t 1 8 c 6 2 3 2
secrets = dbutils.secrets.get(scope=secret_scope, key=client_secret_name)
for value in secrets :
print(value , end=" ")