import PQueue from 'p-queue'; import { ClientEventEmitter } from './request'; import { RateLimitState } from './types'; /** * The following considerations have been taken into account: * * - Qualys API docs indicate `X-Concurrency-Limit-Running` is "Number of * API calls that are running right now (including the one identified in * the current HTTP response header)." * - The response headers come back before the stream is completely * processed. * - Until the stream has been completely processed, the request should be * considered active. * - The queue needs to be throttled back as soon as we know we cannot * make more requests, and throttled up as soon as we know more can be * made. */ export declare function withConcurrency(fn: (queue: PQueue) => void, { events, rateLimitState, }: { events: ClientEventEmitter; rateLimitState: RateLimitState; }): Promise; /** * Calculates the number of concurrent connections this process can maintain * with the server represented by the provided state, targeting 75% of available * concurrency (always leaves something open to other scripts). * * It is important to recognize that the current process is responsible for some * of the active use of the available concurrency limits of the server. * Therefore, this function must be careful to avoid pushing down the number of * maintained active connections inadvertently. * * @param active number of active connections from this process * @param limit concurrency limit of server provided in most recent response * @param running concurrency running of server provided in most recent response */ export declare function calculateConcurrency(active: number, limit: number, running: number): number; //# sourceMappingURL=concurrency.d.ts.map