///
/**
* For Node.js streams only:
*
* Converts a stream into a stream of streams, where the next stream is requested via '.take(sizeInBytes: number): Stream'
*
* This allows the source stream to be sequentially read (in serial) as a sequence of sub-streams, for the purpose of
* issuing PutObject requests for a multipart upload, whereby each request requires its own stream, but where that stream
* needs to be a slice of the source stream.
*/
export declare class NodeChunkedStream {
private readonly source;
private buffer;
private consumer;
private isSourceFullyConsumed;
private isFinishedConsuming;
private resolver;
constructor(source: NodeJS.ReadableStream);
/**
* If the source stream is larger than the 'size' the user is consuming (i.e. they're only wanting to upload a subset
* of the stream) then the stream won't be resolved by the 'end' event inside 'runChunkPipeline', so calling this
* method is necessary.
*/
finishedConsuming(): void;
/**
* Promise resolves when the entire stream has finished processing, or an error occurs.
* You must call 'take' a sufficient number of times after calling this method in order for this promise to resolve.
*/
runChunkPipeline(): Promise;
/**
* Only call 'take' after the previously returned stream has been fully consumed.
*/
take(bytes: number): NodeJS.ReadableStream;
private consumeFromBuffer;
private splitBuffer;
}