added the ability to call Ollama client seamlessly

This commit is contained in:
Ayman Hamed 2025-01-06 17:11:19 +02:00
parent 05b78e7ce1
commit 2f655da810
2 changed files with 36 additions and 16 deletions

View file

@ -42,6 +42,7 @@ dependencies = [
"pathvalidate",
"charset-normalizer",
"openai",
"ollama"
]
[project.urls]

View file

@ -13,6 +13,7 @@ import sys
import tempfile
import traceback
import zipfile
from http.client import responses
from xml.dom import minidom
from typing import Any, Dict, List, Optional, Union
from pathlib import Path
@ -1085,7 +1086,26 @@ class ImageConverter(MediaConverter):
content_type = "image/jpeg"
image_base64 = base64.b64encode(image_file.read()).decode("utf-8")
data_uri = f"data:{content_type};base64,{image_base64}"
# check if Ollama client
if str(type(client)) == "<class 'ollama._client.Client'>":
messages = [
{
"role": "user",
"content": prompt,
'images': [local_path]
}
]
response = client.chat(
model = model,
messages = messages,
)
return response.message.content
else:# use openai
messages = [
{
"role": "user",
@ -1100,7 +1120,6 @@ class ImageConverter(MediaConverter):
],
}
]
response = client.chat.completions.create(model=model, messages=messages)
return response.choices[0].message.content