Access Fabric Lakehouse With Onelake SAS
Principal Program Manager, Microsoft Fabric CAT helping users and organizations build scalable, insightful, secure solutions. Blogs, opinions are my own and do not represent my employer.
Delegated access to Fabric Onelake using short-lived shared=access signature (SAS) was announced last year. It allows secure, short-term, delegated access to files and folders in Onelake. A OneLake SAS can provide temporary access to applications that don't support Microsoft Entra. These applications can then load data or serve as proxies between other customer applications or software development companies.[*]. If you are building client-side applications, this is a great way to securely give read/write access to the data. You can learn more about it from here : OneLake shared access signatures (SAS) now available in public preview | Microsoft Fabric Blog | Microsoft Fabric
There three steps for you to generate the SAS :
Enable it in the workspace settings:

Generate the User Delegation Key (UDK) : Read more about it here.
Build the SAS URL : This can be the tricky part because you have to parse and create XML. This documentation provides all the details.
Once you have the SAS URL, you just make a get request like any other API to access the data.
Below I share how to do this using service principal for demonstration purposes. For user facing client application, use the appropriate authentication method to generate the token. Also note that you should use Azure Key Vault for security, for my demo I am hard coding them to keep it simple. Read the best practices before implementing.
Data
I have a finance.csv file in a Fabric Lakehouse that I want read using Onelake SAS.

Code
I created a service principal and gave it a granular access to the folder.
%pip install azure-identity --q
import base64
import hmac
import hashlib
import datetime as dt
import requests
from urllib.parse import quote
from xml.etree import ElementTree as ET
from azure.identity import ClientSecretCredential
from email.utils import formatdate
import io
import pandas as pd
import logging
TENANT_ID = ""
CLIENT_ID = ""
CLIENT_SECRET = ""
REGION = "centralus" #IMPORTANT : use the regional endpoint, i.e. capacity region
WORKSPACE_ID = ""
ITEM_ID = "" #lakehouse id
PATH = "Files/raw_data/finance.csv" # relative path
cred = ClientSecretCredential(TENANT_ID, CLIENT_ID, CLIENT_SECRET)
STORAGE_BEARER = cred.get_token("https://storage.azure.com/.default").token
FABRIC_BEARER = cred.get_token("https://api.fabric.microsoft.com/.default").token
def get_udk():
token_obj = cred.get_token("https://storage.azure.com/.default")
token_expiry = dt.datetime.fromtimestamp(token_obj.expires_on, dt.timezone.utc)
now = dt.datetime.now(dt.timezone.utc)
udk_expiry = now + dt.timedelta(minutes=55)
if token_expiry <= udk_expiry:
raise ValueError(f"OAuth token expires before UDK. Token: {token_expiry}, UDK: {udk_expiry}")
url = f"https://{REGION}-onelake.blob.fabric.microsoft.com/?restype=service&comp=userdelegationkey"
st = (now - dt.timedelta(minutes=2)).strftime("%Y-%m-%dT%H:%M:%SZ")
se = (now + dt.timedelta(minutes=55)).strftime("%Y-%m-%dT%H:%M:%SZ")
xml = f'<?xml version="1.0"?><KeyInfo><Start>{st}</Start><Expiry>{se}</Expiry></KeyInfo>'
hdr = {"Authorization": f"Bearer {STORAGE_BEARER}", "x-ms-version":"2022-11-02",
"x-ms-date": formatdate(usegmt=True), "Content-Type":"application/xml"}
r = requests.post(url, data=xml, headers=hdr, timeout=20); r.raise_for_status()
x = ET.fromstring(r.text)
logging.info(f"UDK generated successfully, expires at {se}")
return {t: x.findtext(t) for t in ["SignedOid","SignedTid","SignedStart","SignedExpiry","SignedService","SignedVersion","Value"]}
def build_file_sas_guid(udk: dict, workspace_id: str, item_id: str, path: str, perms="r"):
sv = udk["SignedVersion"]
sr = "b"
spr = "https"
key = base64.b64decode(udk["Value"])
canonical = f"/blob/onelake/{workspace_id}/{item_id}/{path}"
now = dt.datetime.now(dt.timezone.utc)
signed_start = dt.datetime.strptime(udk["SignedStart"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=dt.timezone.utc)
signed_expiry = dt.datetime.strptime(udk["SignedExpiry"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=dt.timezone.utc)
st = max(now - dt.timedelta(minutes=2), signed_start)
se = min(now + dt.timedelta(minutes=50), signed_expiry)
st_s = st.replace(tzinfo=None).strftime("%Y-%m-%dT%H:%M:%SZ")
se_s = se.replace(tzinfo=None).strftime("%Y-%m-%dT%H:%M:%SZ")
parts = [
perms, st_s, se_s, canonical,
udk["SignedOid"], udk["SignedTid"], udk["SignedStart"], udk["SignedExpiry"],
udk["SignedService"], udk["SignedVersion"],
"", "", "",
"",
spr,
sv,
sr,
"",
"",
"", "", "", "", ""
]
sig = base64.b64encode(hmac.new(key, "\n".join(parts).encode(), hashlib.sha256).digest()).decode()
enc = lambda v: quote(v, safe="")
qs = (
f"sp={perms}&st={enc(st_s)}&se={enc(se_s)}"
f"&skoid={enc(udk['SignedOid'])}&sktid={enc(udk['SignedTid'])}"
f"&skt={enc(udk['SignedStart'])}&ske={enc(udk['SignedExpiry'])}"
f"&sks={udk['SignedService']}&skv={udk['SignedVersion']}"
f"&sv={sv}&sr={sr}&spr={spr}&sig={enc(sig)}"
)
path_url = quote(path, safe="/")
sas_url = f"https://onelake.blob.fabric.microsoft.com/{workspace_id}/{item_id}/{path_url}?{qs}"
logging.info(f"SAS URL generated successfully, expires at {se_s}")
return sas_url
try:
udk = get_udk()
sas_url = build_file_sas_guid(udk, WORKSPACE_ID, ITEM_ID, PATH, perms="r")
print("SAS URL:", sas_url)
r = requests.get(sas_url, timeout=30)
print("GET:", r.status_code)
if r.status_code == 200:
r.raise_for_status()
df = pd.read_csv(io.BytesIO(r.content))
df.head(2)
else:
print(f"Error Status code: {r.status_code}")
print(f"Response: {r.text}")
except Exception as e:
print(f"Error : {e}")
import traceback
traceback.print_exc()
Example:
I was able to generate the SAS and read the CSV from Google Colab notebook :

Writing Files
For writing back to the Lakehouse, follow the same steps to generate the SAS and use the put method:
import requests
sas_url = build_file_sas_guid(udk, WORKSPACE_ID, ITEM_ID, "Files/raw_data/my_new_file.csv", perms="cw")
data_to_write = "column1,column2,column3\nvalue1,value2,value3"
headers = {
'x-ms-blob-type': 'BlockBlob',
'Content-Type': 'text/csv'
}
response = requests.put(sas_url, data=data_to_write, headers=headers, timeout=30)
response.raise_for_status()
print("Upload status:", response.status_code)
## 201 if successful

Onelake SAS has a maximum life of one hour.