11"""
22title: Google GenAI (Vertex AI) Manifold Pipeline
3- author: Hiromasa Kakehashi
3+ author: Hiromasa Kakehashi & Olv Grolle
44date: 2024-09-19
55version: 1.0
66license: MIT
1414"""
1515
1616import os
17+ import base64
1718from typing import Iterator , List , Union
1819
1920import vertexai
@@ -40,7 +41,7 @@ class Valves(BaseModel):
4041
4142 def __init__ (self ):
4243 self .type = "manifold"
43- self .name = "vertexai : "
44+ self .name = "VertexAI : "
4445
4546 self .valves = self .Valves (
4647 ** {
@@ -50,13 +51,16 @@ def __init__(self):
5051 }
5152 )
5253 self .pipelines = [
53- { "id" : "gemini-1.5-flash-001" , "name" : "Gemini 1.5 Flash" },
54- { "id" : "gemini-2.0-flash" , "name" : " Gemini 2.0 Flash" },
54+
55+ # Gemini 2.0 models
5556 {"id" : "gemini-2.0-flash-lite" , "name" : "Gemini 2.0 Flash-Lite" },
56- {"id" : "gemini-2.0-flash-thinking-exp-01-21" , "name" : "Gemini 2.0 Flash Thinking" },
57- {"id" : "gemini-1.5-pro-001" , "name" : "Gemini 1.5 Pro" },
58- {"id" : "gemini-2.0-pro-exp-02-05" , "name" : "Gemini 2.0 Pro" },
59- ]
57+ {"id" : "gemini-2.0-flash" , "name" : "Gemini 2.0 Flash" },
58+ # Gemini 2.5 models
59+ {"id" : "gemini-2.5-flash-lite" , "name" : "Gemini 2.5 Flash-Lite" },
60+ {"id" : "gemini-2.5-flash" , "name" : "Gemini 2.5 Flash" },
61+ {"id" : "gemini-2.5-pro" , "name" : "Gemini 2.5 Pro " },
62+
63+ ]
6064
6165 async def on_startup (self ) -> None :
6266 """This function is called when the server is started."""
@@ -83,11 +87,22 @@ def pipe(
8387 self , user_message : str , model_id : str , messages : List [dict ], body : dict
8488 ) -> Union [str , Iterator ]:
8589 try :
86- if not model_id .startswith ("gemini-" ):
90+ if not ( model_id .startswith ("gemini-" ) or model_id . startswith ( "gemma-" ) ):
8791 return f"Error: Invalid model name format: { model_id } "
8892
8993 print (f"Pipe function called for model: { model_id } " )
9094 print (f"Stream mode: { body .get ('stream' , False )} " )
95+ print (f"Received { len (messages )} messages from OpenWebUI" )
96+
97+ # Debug: Log message structure
98+ for i , msg in enumerate (messages ):
99+ print (f"Message { i } : role={ msg .get ('role' )} , content type={ type (msg .get ('content' ))} " )
100+ if isinstance (msg .get ('content' ), list ):
101+ for j , content_part in enumerate (msg ['content' ]):
102+ print (f" Part { j } : type={ content_part .get ('type' )} " )
103+ if content_part .get ('type' ) == 'image_url' :
104+ img_url = content_part .get ('image_url' , {}).get ('url' , '' )
105+ print (f" Image URL prefix: { img_url [:50 ]} ..." )
91106
92107 system_message = next (
93108 (msg ["content" ] for msg in messages if msg ["role" ] == "system" ), None
@@ -100,9 +115,21 @@ def pipe(
100115
101116 if body .get ("title" , False ): # If chat title generation is requested
102117 contents = [Content (role = "user" , parts = [Part .from_text (user_message )])]
118+ print ("Title generation mode - using simple text content" )
103119 else :
104120 contents = self .build_conversation_history (messages )
105121
122+ # Log what we're sending to Vertex AI
123+ print (f"Sending { len (contents )} messages to Vertex AI:" )
124+ for i , content in enumerate (contents ):
125+ print (f" Message { i } : role={ content .role } , parts={ len (content .parts )} " )
126+ for j , part in enumerate (content .parts ):
127+ if hasattr (part , '_raw_data' ) and part ._raw_data :
128+ print (f" Part { j } : Image data ({ len (part ._raw_data )} bytes)" )
129+ else :
130+ part_text = str (part )[:100 ] if str (part ) else "No text"
131+ print (f" Part { j } : Text - { part_text } ..." )
132+
106133 generation_config = GenerationConfig (
107134 temperature = body .get ("temperature" , 0.7 ),
108135 top_p = body .get ("top_p" , 0.9 ),
@@ -121,6 +148,7 @@ def pipe(
121148 else :
122149 safety_settings = body .get ("safety_settings" )
123150
151+ print ("Calling Vertex AI generate_content..." )
124152 response = model .generate_content (
125153 contents ,
126154 stream = body .get ("stream" , False ),
@@ -153,20 +181,52 @@ def build_conversation_history(self, messages: List[dict]) -> List[Content]:
153181 parts = []
154182
155183 if isinstance (message .get ("content" ), list ):
184+ print (f"Processing multi-part message with { len (message ['content' ])} parts" )
156185 for content in message ["content" ]:
186+ print (f"Processing content type: { content .get ('type' , 'unknown' )} " )
157187 if content ["type" ] == "text" :
158188 parts .append (Part .from_text (content ["text" ]))
189+ print (f"Added text part: { content ['text' ][:50 ]} ..." )
159190 elif content ["type" ] == "image_url" :
160191 image_url = content ["image_url" ]["url" ]
192+ print (f"Processing image URL (first 50 chars): { image_url [:50 ]} ..." )
161193 if image_url .startswith ("data:image" ):
162- image_data = image_url .split ("," )[1 ]
163- parts .append (Part .from_image (image_data ))
194+ try :
195+ # Split the data URL to get mime type and base64 data
196+ header , image_data = image_url .split (',' , 1 )
197+ mime_type = header .split (':' )[1 ].split (';' )[0 ]
198+ print (f"Detected image MIME type: { mime_type } " )
199+
200+ # Validate supported image formats
201+ supported_formats = ['image/jpeg' , 'image/jpg' , 'image/png' , 'image/gif' , 'image/webp' ]
202+ if mime_type not in supported_formats :
203+ print (f"ERROR: Unsupported image format: { mime_type } " )
204+ continue
205+
206+ # Decode the base64 image data
207+ decoded_image_data = base64 .b64decode (image_data )
208+ print (f"Successfully decoded image data: { len (decoded_image_data )} bytes" )
209+
210+ # Create the Part object with the image data
211+ image_part = Part .from_data (decoded_image_data , mime_type = mime_type )
212+ parts .append (image_part )
213+ print (f"Successfully added image part to conversation" )
214+ except Exception as e :
215+ print (f"ERROR processing image: { e } " )
216+ import traceback
217+ traceback .print_exc ()
218+ continue
164219 else :
220+ # Handle image URLs
221+ print (f"Processing external image URL: { image_url } " )
165222 parts .append (Part .from_uri (image_url ))
166223 else :
167224 parts = [Part .from_text (message ["content" ])]
225+ print (f"Added simple text message: { message ['content' ][:50 ]} ..." )
168226
169227 role = "user" if message ["role" ] == "user" else "model"
228+ print (f"Creating Content with role='{ role } ' and { len (parts )} parts" )
170229 contents .append (Content (role = role , parts = parts ))
171230
231+ print (f"Built conversation history with { len (contents )} messages" )
172232 return contents
0 commit comments