/** * @deprecated This is temporarily preserved for enterprise users as * some users are using older OpenAI models that have a combined input/output window. * * We need to support configuring the maximum output limit at an instance level. * This will allow us to increase this limit whilst still supporting models with a lower output limit. * See: https://github.com/sourcegraph/cody/issues/3648#issuecomment-2056954101 */ export declare const ANSWER_TOKENS = 1000; export declare const MAX_CURRENT_FILE_TOKENS = 1000; export declare const SURROUNDING_LINES = 50; export declare const NUM_CODE_RESULTS = 12; export declare const NUM_TEXT_RESULTS = 3; export declare const MAX_BYTES_PER_FILE = 4096; //# sourceMappingURL=constants.d.ts.map