/** * @license * Copyright 2021 Google LLC. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================================= */ import * as tflite from '@tensorflow/tfjs-tflite'; import { TaskModelLoader } from '../../task_model'; import { Runtime, Task, TFLiteCustomModelCommonLoadingOption } from '../common'; import { NLClassifierTFLite } from './tflite_common'; declare type TFLiteNS = typeof tflite; /** Loading options. */ export interface NCCustomModelTFLiteLoadingOptions extends TFLiteCustomModelCommonLoadingOption, tflite.NLClassifierOptions { } /** * Inference options. * * TODO: placeholder for now. */ export interface NCCustomModelTFLiteInferenceOptions { } /** Loader for custom nl classification TFLite model. */ export declare class NLClassificationCustomModelTFLiteLoader extends TaskModelLoader { readonly metadata: { name: string; description: string; resourceUrls: { 'TFLite task library': string; }; runtime: Runtime; version: string; supportedTasks: Task[]; }; readonly packageUrls: string[][]; readonly sourceModelGlobalNamespace = "tflite"; protected transformSourceModel(sourceModelGlobal: TFLiteNS, loadingOptions?: NCCustomModelTFLiteLoadingOptions): Promise; } /** * A custom TFLite natural language classification model loaded from a model url * or an `ArrayBuffer` in memory. * * The underlying NL classifier is built on top of the NLClassifier in * [TFLite Task * Library](https://www.tensorflow.org/lite/inference_with_metadata/task_library/overview). * As a result, the custom model needs to meet the [metadata * requirements](https://www.tensorflow.org/lite/inference_with_metadata/task_library/nl_classifier#model_compatibility_requirements). * * Usage: * * ```js * // Load the model from a custom url with other options (optional). * const model = await tfTask.NLClassification.CustomModel.TFLite.load({ * model: * 'https://storage.googleapis.com/download.tensorflow.org/models/tflite/text_classification/text_classification_v2.tflite', * }); * * // Run inference on text. * const result = await model.predict('This is a great movie!'); * console.log(result.classes); * * // Clean up. * model.cleanUp(); * ``` * * Refer to `tfTask.NLClassifier` for the `predict` and `cleanUp` method. * * @docextratypes [ * {description: 'Options for `load`', symbol: * 'NCCustomModelTFLiteLoadingOptions'}, * {description: 'Options for `predict`', symbol: * 'NCCustomModelTFLiteInferenceOptions'} * ] * * * @doc {heading: 'NL Classification', subheading: 'Models'} */ export declare class NCCustomModelTFLite extends NLClassifierTFLite { } export declare const nlClassifierCustomModelTfliteLoader: NLClassificationCustomModelTFLiteLoader; export {};