xxxxxxxxxx
val containerName = "<Container Name>"
val storageAccountName = "<StorageAccount Nmae>"
val sas = "<Generated SAS Key>"
val config = "fs.azure.sas." + containerName+ "." + storageAccountName + ".blob.core.windows.net"
dbutils.fs.mount(
source = "wasbs://"+containerName+"@"+storageAccountName+".blob.core.windows.net/employe_data.csv",
extraConfigs = Map(config -> sas))
val mydf = spark.read.option("header","true").option("inferSchema", "true").csv("/mnt/myfile")
display(mydf)