databricks using secret
container = "raw"
storageAccount = "testarunacc"
accountKey = "fs.azure.account.key.{}.blob.core.windows.net".format(storageAccount)
accessKey = dbutils.secrets.get(scope = "arunscope", key = "key1")
# Mount the drive for native python
inputSource = "wasbs://{}@{}.blob.core.windows.net".format(container, storageAccount)
mountPoint = "/mnt/" + container
extraConfig = {accountKey: accessKey}
print("Mounting: {}".format(mountPoint))
try:
dbutils.fs.mount(
source = inputSource,
mount_point = str(mountPoint),
extra_configs = extraConfig
)
print("=> Succeeded")
except Exception as e:
if "Directory already mounted" in str(e):
print("=> Directory {} already mounted".format(mountPoint))
else:
raise(e)
# Set the credentials to Spark configuration
spark.conf.set(
accountKey,
accessKey)
spark._jsc.hadoopConfiguration().set(
accountKey,
accessKey)
df = spark.read.text("/mnt/raw/test.csv")
storageAccount = "testarunacc"
accountKey = "fs.azure.account.key.{}.blob.core.windows.net".format(storageAccount)
accessKey = dbutils.secrets.get(scope = "arunscope", key = "key1")
# Mount the drive for native python
inputSource = "wasbs://{}@{}.blob.core.windows.net".format(container, storageAccount)
mountPoint = "/mnt/" + container
extraConfig = {accountKey: accessKey}
print("Mounting: {}".format(mountPoint))
try:
dbutils.fs.mount(
source = inputSource,
mount_point = str(mountPoint),
extra_configs = extraConfig
)
print("=> Succeeded")
except Exception as e:
if "Directory already mounted" in str(e):
print("=> Directory {} already mounted".format(mountPoint))
else:
raise(e)
# Set the credentials to Spark configuration
spark.conf.set(
accountKey,
accessKey)
spark._jsc.hadoopConfiguration().set(
accountKey,
accessKey)
df = spark.read.text("/mnt/raw/test.csv")
Comments
Post a Comment