Integrate Lyceum into your workflows using the REST API. Start executions, stream output, and manage workloads programmatically.
Start a Python Execution
Copy
import requests
token = "YOUR_API_TOKEN"
base_url = "https://api.lyceum.technology/api/v2/external"
# Start execution
response = requests.post(
f"{base_url}/execution/streaming/start",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
},
json={
"code": "import numpy as np; print(np.random.rand(3))",
"execution_type": "cpu"
}
)
data = response.json()
print(f"Execution ID: {data['execution_id']}")
print(f"Streaming URL: {data['streaming_url']}")
Start a Docker Execution
Copy
response = requests.post(
f"{base_url}/execution/image/start",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
},
json={
"docker_image_ref": "python:3.11-slim",
"docker_run_cmd": ["python", "-c", "print('Hello from Docker')"],
"execution_type": "cpu",
"timeout": 300
}
)
data = response.json()
print(f"Execution ID: {data['execution_id']}")
Stream Output with Callbacks
Send real-time output to your own endpoint:Copy
response = requests.post(
f"{base_url}/execution/image/start",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
},
json={
"docker_image_ref": "python:3.11",
"docker_run_cmd": ["python", "train.py"],
"execution_type": "gpu",
"user_callback_url": "https://your-server.com/webhook/lyceum"
}
)
Check Credits Before Execution
Copy
# Check balance
credits = requests.get(
f"{base_url}/billing/credits",
headers={"Authorization": f"Bearer {token}"}
).json()
print(f"Available credits: {credits['available_credits']}")
if credits['available_credits'] > 5:
# Start execution
response = requests.post(
f"{base_url}/execution/streaming/start",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
},
json={
"code": "print('Running with sufficient credits')",
"execution_type": "cpu"
}
)
else:
print("Low credits — top up at dashboard.lyceum.technology")
Upload Files via API
Copy
# Upload a file to storage
with open('data.csv', 'rb') as f:
response = requests.post(
f"{base_url}/storage/upload",
headers={"Authorization": f"Bearer {token}"},
files={"file": f},
data={"key": "datasets/data.csv"}
)
print(f"Upload status: {response.status_code}")
JavaScript Example
Copy
const token = 'YOUR_API_TOKEN';
const baseUrl = 'https://api.lyceum.technology/api/v2/external';
const response = await fetch(`${baseUrl}/execution/image/start`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
docker_image_ref: 'node:20-alpine',
docker_run_cmd: ['node', '-e', "console.log('Hello from Lyceum')"],
execution_type: 'cpu'
})
});
const { execution_id, streaming_url } = await response.json();
console.log(`Started: ${execution_id}`);
Use environment variables for your API token. Never hardcode credentials in scripts.
See the API Reference for complete endpoint documentation and the OpenAPI spec.

