Sessions API
The Sessions API allows AI systems to interact with data through active access sessions, providing governed access with full tracking and evidence generation.
API endpoints
Stream Data
GET https://api.xase.ai/v1/sessions/{session_id}/stream
# Query Parameters
# batch_size (optional) - Number of records per batch (default: 32, max: 100)
# filter (optional) - JSON filter expression
# fields (optional) - Comma-separated list of fields to include
# sort (optional) - Field to sort by
# Response (200 OK)
{
"batch": [
{
"record_id": "rec_a1b2c3",
"patient_id": "p12345",
"age": 57,
"diagnosis": "diabetes",
"symptoms": ["fatigue", "thirst", "blurred vision"]
},
// ... more records
],
"batch_id": "batch_d4e5f6",
"record_count": 32,
"has_more": true,
"next_cursor": "cursor_g7h8i9"
}
# To get next batch
GET https://api.xase.ai/v1/sessions/{session_id}/stream?cursor=cursor_g7h8i9This endpoint streams data in batches from the dataset associated with the active session.
Each request returns a batch of records and a cursor for pagination.
Get Specific Record
GET https://api.xase.ai/v1/sessions/{session_id}/records/{record_id}
# Response (200 OK)
{
"record_id": "rec_a1b2c3",
"patient_id": "p12345",
"age": 57,
"diagnosis": "diabetes",
"symptoms": ["fatigue", "thirst", "blurred vision"],
"treatments": ["insulin", "dietary changes"],
"lab_results": {
"glucose": 210,
"a1c": 8.1
}
}Apply Filter
POST https://api.xase.ai/v1/sessions/{session_id}/filter
# Request Body
{
"filter": {
"age": {"$gte": 50},
"diagnosis": {"$in": ["diabetes", "hypertension"]},
"$or": [
{"lab_results.glucose": {"$gte": 200}},
{"lab_results.bp_systolic": {"$gte": 140}}
]
},
"fields": ["patient_id", "age", "diagnosis", "lab_results"],
"sort": {"age": -1},
"limit": 100
}
# Response (200 OK)
{
"records": [
{
"patient_id": "p12345",
"age": 72,
"diagnosis": "hypertension",
"lab_results": {
"bp_systolic": 155,
"bp_diastolic": 95
}
},
// ... more records
],
"record_count": 87,
"has_more": false
}This endpoint applies MongoDB-like query filters to data without downloading everything.
Filtering happens server-side to reduce bandwidth and processing requirements.
Apply Transformation
POST https://api.xase.ai/v1/sessions/{session_id}/transform
# Request Body
{
"record_ids": ["rec_a1b2c3", "rec_d4e5f6"],
"transformation": "anonymize",
"parameters": {
"fields": ["patient_id", "name", "address"],
"method": "hash"
},
"metadata": {
"purpose": "privacy protection",
"operator": "data_scientist@company.com"
}
}
# Response (200 OK)
{
"transformed_records": [
{
"record_id": "rec_a1b2c3",
"patient_id": "b3d8e9f2a7c1...", // Hashed
"age": 57,
"diagnosis": "diabetes"
// Other fields preserved
},
// ... more records
],
"transformation_id": "transform_h8i9j0",
"evidence_url": "https://api.xase.ai/v1/evidence/transform_h8i9j0"
}Record Operation
POST https://api.xase.ai/v1/sessions/{session_id}/operations
# Request Body
{
"operation_type": "TRAIN",
"records": ["rec_a1b2c3", "rec_d4e5f6"],
"metadata": {
"model_id": "diagnostic-model-v2",
"training_step": 1452,
"metrics": {
"loss": 0.0342,
"accuracy": 0.967
}
}
}
# Response (200 OK)
{
"operation_id": "op_j0k1l2",
"created_at": "2026-01-15T17:23:45Z",
"status": "RECORDED",
"evidence_id": "ev_m3n4o5"
}This endpoint records operations performed on data, such as training, analysis, or validation.
Each operation is recorded with evidence for later verification and audit.
Error responses
Session Expired
{
"error": {
"type": "session_expired",
"message": "This session has expired",
"details": {
"session_id": "sess_7f6e5d4c",
"expired_at": "2026-02-14T14:30:00Z",
"current_time": "2026-02-15T09:12:34Z"
},
"request_id": "req_p6q7r8s9"
}
}Access Revoked
{
"error": {
"type": "access_revoked",
"message": "Access to this dataset has been revoked",
"details": {
"session_id": "sess_7f6e5d4c",
"revoked_at": "2026-01-25T18:42:17Z",
"reason": "Data holder has revoked access",
"revoked_by": "data_admin@hospital.org"
},
"request_id": "req_t9u8v7w6"
}
}Rate Limit
{
"error": {
"type": "rate_limit_exceeded",
"message": "Rate limit exceeded",
"details": {
"limit": 100,
"period": "1m",
"reset_at": "2026-01-15T17:30:00Z"
},
"request_id": "req_x5y4z3a2"
}
}SDK usage
Streaming Data
import xase
client = xase.Client(api_key="sk_...")
# Get access session
session = client.access(
dataset="medical-records-2024",
purpose="model-training",
duration="30d"
)
# Stream data for training
for batch in session.stream(batch_size=32):
# Train model on batch
model.train(batch)
# Record training operation with metrics
session.record_operation(
operation_type="TRAIN",
records=batch.record_ids,
metadata={
"model_id": "diagnostic-model-v2",
"metrics": {"loss": model.current_loss, "accuracy": model.current_accuracy}
}
)Filtering and Transformations
# Apply filters
elderly_diabetics = session.filter(
query={
"age": {"$gte": 65},
"diagnosis": "diabetes"
},
sort={"age": -1},
limit=100
)
print(f"Found {len(elderly_diabetics)} records")
# Apply transformations
anonymized = session.transform(
records=elderly_diabetics,
transformation="anonymize",
parameters={
"fields": ["patient_id", "name", "address"],
"method": "hash"
}
)
# Get specific record
patient = session.get_record("rec_a1b2c3")
print(f"Patient age: {patient.age}, diagnosis: {patient.diagnosis}")Working with Aggregations
# Perform aggregations server-side
results = session.aggregate([
{"$match": {"age": {"$gte": 50}}},
{"$group": {
"_id": "$diagnosis",
"count": {"$sum": 1},
"avg_age": {"$avg": "$age"}
}},
{"$sort": {"count": -1}}
])
for result in results:
print(f"Diagnosis: {result['_id']}")
print(f"Count: {result['count']}")
print(f"Average age: {result['avg_age']}")
# Close session when done
session.close(reason="Analysis complete")Next steps
© 2025 Xasefounders@xase.ai
