From 3f3d69abac5a71bdc57e80e4dd17e6872f38daad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 9 Jul 2025 11:28:09 +0200 Subject: [PATCH] Adds input_type parameter to POST inference docs at the root level (#4846) * Adds input_type parameter to POST inference docs at the root level. * Update specification/inference/inference/InferenceRequest.ts Co-authored-by: David Kyle --------- Co-authored-by: David Kyle (cherry picked from commit ab647f51141d10836db7eb5082c5303cc12d5fdf) --- .../inference/inference/InferenceRequest.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/specification/inference/inference/InferenceRequest.ts b/specification/inference/inference/InferenceRequest.ts index 32a39b548f..cb9e9487f3 100644 --- a/specification/inference/inference/InferenceRequest.ts +++ b/specification/inference/inference/InferenceRequest.ts @@ -82,6 +82,19 @@ export interface Request extends RequestBase { * > Inference endpoints for the `completion` task type currently only support a single string as input. */ input: string | Array + /** + * Specifies the input data type for the text embedding model. The `input_type` parameter only applies to Inference Endpoints with the `text_embedding` task type. Possible values include: + * * `SEARCH` + * * `INGEST` + * * `CLASSIFICATION` + * * `CLUSTERING` + * Not all services support all values. Unsupported values will trigger a validation exception. + * Accepted values depend on the configured inference service, refer to the relevant service-specific documentation for more info. + * + * > info + * > The `input_type` parameter specified on the root level of the request body will take precedence over the `input_type` parameter specified in `task_settings`. + */ + input_type?: string /** * Task settings for the individual inference request. * These settings are specific to the task type you specified and override the task settings specified when initializing the service.