Real-time progress updates via Server-Sent Events.
Basic Streaming
1from inferencesh import inference, TaskStatus2 3client = inference(api_key="inf_your_key")4 5for update in client.run({6 "app": "infsh/flux",7 "input": {"prompt": "A sunset"}8}, stream=True):9 print(f"Status: {TaskStatus(update['status']).name}")10 11 if update.get("logs"):12 print(f"Logs: {update['logs']}")13 14 if update["status"] == TaskStatus.COMPLETED:15 print(f"Output: {update['output']}")Stream Existing Task
1# Start task without waiting2task = client.run(params, wait=False)3 4# Stream updates later5with client.stream_task(task["id"]) as stream:6 for update in stream:7 print(f"Status: {update['status']}")8 if update["status"] == TaskStatus.COMPLETED:9 break10 11# Access final result12print(f"Final: {stream.result}")Combined Callbacks (JavaScript)
typescript
1await client.run(params, {2 onUpdate: (update) => {3 console.log(`Status: ${update.status}`);4 },5 onPartialUpdate: (update, fields) => {6 if (fields.includes('logs')) {7 console.log('New logs:', update.logs);8 }9 }10});Async Streaming (Python)
python
1from inferencesh import async_inference2 3async def main():4 client = async_inference(api_key="inf_your_key")5 6 async for update in await client.run(params, stream=True):7 print(f"Status: {update['status']}")Reconnection Options
1# Tune SSE performance2client = inference(3 api_key="inf_your_key",4 sse_chunk_size=8192,5 sse_mode="iter_lines"6)7 8# Or via environment variables9# export INFERENCE_SSE_READ_BYTES=819210# export INFERENCE_SSE_MODE=iter_linesStatus Constants
1from inferencesh import TaskStatus2 3TaskStatus.RECEIVED # 1 - Request received4TaskStatus.QUEUED # 2 - Waiting in queue5TaskStatus.SCHEDULED # 3 - Worker assigned6TaskStatus.PREPARING # 4 - Preparing7TaskStatus.SERVING # 5 - Serving8TaskStatus.SETTING_UP # 6 - Setting up9TaskStatus.RUNNING # 7 - Executing10TaskStatus.UPLOADING # 8 - Uploading results11TaskStatus.COMPLETED # 10 - Done12TaskStatus.FAILED # 11 - Error13TaskStatus.CANCELLED # 12 - Cancelled