mirror of
https://github.com/xtr-dev/rondevu-client.git
synced 2025-12-13 20:33:25 +00:00
Implement RPC request batching and throttling
Adds automatic request batching to reduce HTTP overhead by combining
multiple RPC calls into a single request.
Features:
- RpcBatcher class for intelligent request batching
- Configurable batch size (default: 10 requests)
- Configurable wait time (default: 50ms)
- Throttling to prevent overwhelming the server (default: 10ms)
- Automatic flushing when batch is full
- Enabled by default, can be disabled via options
Changes:
- Created rpc-batcher.ts with RpcBatcher class
- Updated RondevuAPI to use batcher by default
- Added batching option to RondevuOptions
- Updated README with batching documentation
- Bumped version to 0.16.0
Example usage:
// Default (batching enabled with defaults)
const rondevu = new Rondevu({ apiUrl: 'https://api.ronde.vu' })
// Custom batching settings
const rondevu = new Rondevu({
apiUrl: 'https://api.ronde.vu',
batching: { maxBatchSize: 20, maxWaitTime: 100 }
})
// Disable batching
const rondevu = new Rondevu({
apiUrl: 'https://api.ronde.vu',
batching: false
})
This can reduce HTTP requests by up to 90% during intensive operations
like ICE candidate exchange.
🤖 Generated with Claude Code
https://claude.com/claude-code
Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
157
src/rpc-batcher.ts
Normal file
157
src/rpc-batcher.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* RPC Batcher - Throttles and batches RPC requests to reduce HTTP overhead
|
||||
*/
|
||||
|
||||
export interface BatcherOptions {
|
||||
/**
|
||||
* Maximum number of requests to batch together
|
||||
* Default: 10
|
||||
*/
|
||||
maxBatchSize?: number
|
||||
|
||||
/**
|
||||
* Maximum time to wait before sending a batch (ms)
|
||||
* Default: 50ms
|
||||
*/
|
||||
maxWaitTime?: number
|
||||
|
||||
/**
|
||||
* Minimum time between batches (ms)
|
||||
* Default: 10ms
|
||||
*/
|
||||
throttleInterval?: number
|
||||
}
|
||||
|
||||
interface QueuedRequest {
|
||||
request: any
|
||||
resolve: (value: any) => void
|
||||
reject: (error: Error) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Batches and throttles RPC requests to optimize network usage
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const batcher = new RpcBatcher(
|
||||
* (requests) => api.rpcBatch(requests),
|
||||
* { maxBatchSize: 10, maxWaitTime: 50 }
|
||||
* )
|
||||
*
|
||||
* // These will be batched together if called within maxWaitTime
|
||||
* const result1 = await batcher.add(request1)
|
||||
* const result2 = await batcher.add(request2)
|
||||
* const result3 = await batcher.add(request3)
|
||||
* ```
|
||||
*/
|
||||
export class RpcBatcher {
|
||||
private queue: QueuedRequest[] = []
|
||||
private batchTimeout: ReturnType<typeof setTimeout> | null = null
|
||||
private lastBatchTime: number = 0
|
||||
private options: Required<BatcherOptions>
|
||||
private sendBatch: (requests: any[]) => Promise<any[]>
|
||||
|
||||
constructor(
|
||||
sendBatch: (requests: any[]) => Promise<any[]>,
|
||||
options?: BatcherOptions
|
||||
) {
|
||||
this.sendBatch = sendBatch
|
||||
this.options = {
|
||||
maxBatchSize: options?.maxBatchSize ?? 10,
|
||||
maxWaitTime: options?.maxWaitTime ?? 50,
|
||||
throttleInterval: options?.throttleInterval ?? 10,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an RPC request to the batch queue
|
||||
* Returns a promise that resolves when the request completes
|
||||
*/
|
||||
async add(request: any): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.queue.push({ request, resolve, reject })
|
||||
|
||||
// Send immediately if batch is full
|
||||
if (this.queue.length >= this.options.maxBatchSize) {
|
||||
this.flush()
|
||||
return
|
||||
}
|
||||
|
||||
// Schedule batch if not already scheduled
|
||||
if (!this.batchTimeout) {
|
||||
this.batchTimeout = setTimeout(() => {
|
||||
this.flush()
|
||||
}, this.options.maxWaitTime)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush the queue immediately
|
||||
*/
|
||||
async flush(): Promise<void> {
|
||||
// Clear timeout if set
|
||||
if (this.batchTimeout) {
|
||||
clearTimeout(this.batchTimeout)
|
||||
this.batchTimeout = null
|
||||
}
|
||||
|
||||
// Nothing to flush
|
||||
if (this.queue.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// Throttle: wait if we sent a batch too recently
|
||||
const now = Date.now()
|
||||
const timeSinceLastBatch = now - this.lastBatchTime
|
||||
if (timeSinceLastBatch < this.options.throttleInterval) {
|
||||
const waitTime = this.options.throttleInterval - timeSinceLastBatch
|
||||
await new Promise(resolve => setTimeout(resolve, waitTime))
|
||||
}
|
||||
|
||||
// Extract requests from queue
|
||||
const batch = this.queue.splice(0, this.options.maxBatchSize)
|
||||
const requests = batch.map(item => item.request)
|
||||
|
||||
this.lastBatchTime = Date.now()
|
||||
|
||||
try {
|
||||
// Send batch request
|
||||
const results = await this.sendBatch(requests)
|
||||
|
||||
// Resolve individual promises
|
||||
for (let i = 0; i < batch.length; i++) {
|
||||
batch[i].resolve(results[i])
|
||||
}
|
||||
} catch (error) {
|
||||
// Reject all promises in batch
|
||||
for (const item of batch) {
|
||||
item.reject(error as Error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current queue size
|
||||
*/
|
||||
getQueueSize(): number {
|
||||
return this.queue.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the queue without sending
|
||||
*/
|
||||
clear(): void {
|
||||
if (this.batchTimeout) {
|
||||
clearTimeout(this.batchTimeout)
|
||||
this.batchTimeout = null
|
||||
}
|
||||
|
||||
// Reject all pending requests
|
||||
for (const item of this.queue) {
|
||||
item.reject(new Error('Batch queue cleared'))
|
||||
}
|
||||
|
||||
this.queue = []
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user