diff --git a/sentence_transformers/SentenceTransformer.py b/sentence_transformers/SentenceTransformer.py index 4d0f4fb20..beb67d658 100644 --- a/sentence_transformers/SentenceTransformer.py +++ b/sentence_transformers/SentenceTransformer.py @@ -22,6 +22,7 @@ import numpy as np import numpy.typing as npt +from PIL import Image import torch import torch.multiprocessing as mp import transformers @@ -678,7 +679,7 @@ def encode_document( @overload def encode( self, - sentences: str, + sentences: str | Image.Image, prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -700,7 +701,7 @@ def encode( @overload def encode( self, - sentences: str | list[str] | np.ndarray, + sentences: str | list[str] | np.ndarray | Image.Image | list[Image.Image], prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -722,7 +723,7 @@ def encode( @overload def encode( self, - sentences: str | list[str] | np.ndarray, + sentences: str | list[str] | np.ndarray | Image.Image | list[Image.Image], prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -743,7 +744,7 @@ def encode( @overload def encode( self, - sentences: list[str] | np.ndarray, + sentences: list[str] | np.ndarray | list[Image.Image], prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -764,7 +765,7 @@ def encode( @overload def encode( self, - sentences: list[str] | np.ndarray, + sentences: list[str] | np.ndarray | list[Image.Image], prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -785,7 +786,7 @@ def encode( @overload def encode( self, - sentences: str, + sentences: str | Image.Image, prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -806,7 +807,7 @@ def encode( @overload def encode( self, - sentences: str, + sentences: str | Image.Image, prompt_name: str | None = ..., prompt: str | None = ..., batch_size: int = ..., @@ -826,7 +827,7 @@ def encode( @torch.inference_mode() def encode( self, - sentences: str | list[str] | np.ndarray, + sentences: str | list[str] | np.ndarray | Image.Image | list[Image.Image], prompt_name: str | None = None, prompt: str | None = None, batch_size: int = 32,