Spaces:
Sleeping
Sleeping
File size: 3,468 Bytes
c30abb6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 |
/**
* LeRobot Arena Inference Server TypeScript Client
*
* This client provides TypeScript access to the LeRobot Arena Inference Server
* for ACT (Action Chunking Transformer) model inference and session management.
*
* @example Basic Usage
* ```typescript
* import { LeRobotInferenceServerClient, CreateSessionRequest } from '@lerobot-arena/inference-server-client';
*
* const client = new LeRobotInferenceServerClient('http://localhost:8001');
*
* // Create and start a session
* const sessionRequest: CreateSessionRequest = {
* session_id: 'my-robot-01',
* policy_path: './checkpoints/act_so101_beyond',
* camera_names: ['front', 'wrist'],
* arena_server_url: 'http://localhost:8000'
* };
*
* const session = await client.createSession(sessionRequest);
* await client.startInference('my-robot-01');
*
* // Monitor session
* const status = await client.getSessionStatus('my-robot-01');
* console.log(`Session status: ${status.status}`);
* ```
*/
export * from './generated';
import type { CreateSessionRequest, CreateSessionResponse, SessionStatusResponse } from './generated';
/**
* LeRobot Arena Inference Server Client
*
* A convenience wrapper around the generated OpenAPI client that provides
* a simpler interface for common operations while maintaining full type safety.
*/
export declare class LeRobotInferenceServerClient {
private baseUrl;
constructor(baseUrl: string);
/**
* Check if the inference server is healthy and responding
*/
isHealthy(): Promise<boolean>;
/**
* Get detailed server health information
*/
getHealth(): Promise<unknown>;
/**
* Create a new inference session
*/
createSession(request: CreateSessionRequest): Promise<CreateSessionResponse>;
/**
* List all active sessions
*/
listSessions(): Promise<SessionStatusResponse[]>;
/**
* Get detailed status of a specific session
*/
getSessionStatus(sessionId: string): Promise<SessionStatusResponse>;
/**
* Start inference for a session
*/
startInference(sessionId: string): Promise<void>;
/**
* Stop inference for a session
*/
stopInference(sessionId: string): Promise<void>;
/**
* Restart inference for a session
*/
restartInference(sessionId: string): Promise<void>;
/**
* Delete a session and clean up all resources
*/
deleteSession(sessionId: string): Promise<void>;
/**
* Wait for a session to reach a specific status
*/
waitForSessionStatus(sessionId: string, targetStatus: string, timeoutMs?: number): Promise<SessionStatusResponse>;
/**
* Convenience method to create a session and start inference in one call
*/
createAndStartSession(request: CreateSessionRequest): Promise<{
session: CreateSessionResponse;
status: SessionStatusResponse;
}>;
/**
* Get system information for debugging
*/
getSystemInfo(): Promise<unknown>;
/**
* Reset a session's internal state (debug method)
*/
debugResetSession(sessionId: string): Promise<void>;
/**
* Get detailed information about a session's action queue
*/
getSessionQueueInfo(sessionId: string): Promise<unknown>;
}
export declare function createClient(baseUrl: string): LeRobotInferenceServerClient;
export declare const LeRobotAIServerClient: typeof LeRobotInferenceServerClient;
//# sourceMappingURL=index.d.ts.map |