Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
.venv
.env
.DS_Store
.memvid/
Expand Down
50 changes: 48 additions & 2 deletions memvid/llm_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,12 @@
ANTHROPIC_AVAILABLE = False
print("Warning: Anthropic library not available. Anthropic provider will be disabled.")

try:
from together import Together
TOGETHER_AVAILABLE = True
except ImportError:
TOGETHER_AVAILABLE = False

class LLMProvider(ABC):
"""Abstract base class for LLM providers"""

Expand Down Expand Up @@ -301,13 +307,50 @@ def _stream_response(self, response) -> Iterator[str]:
elif chunk.type == "message_stop":
break

class TogetherAIProvider(LLMProvider):
"""TogetherAI provider implementation"""

def __init__(self, api_key: str, model: str = "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"):
self.client = Together(api_key=api_key)
self.model = model

def chat(self, messages: List[Dict[str, str]], stream: bool = False, **kwargs) -> Any:
"""Send chat messages to TogetherAI"""
try:
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
stream=stream,
**kwargs
)

if stream:
return self._stream_response(response)
else:
return response.choices[0].message.content

except Exception as e:
print(f"Together error: {e}")
return None

def chat_stream(self, messages: List[Dict[str, str]], **kwargs) -> Iterator[str]:
"""Stream chat response from TogetherAI"""
return self.chat(messages, stream=True, **kwargs)

def _stream_response(self, response) -> Iterator[str]:
"""Process streaming response from TogetherAI"""
for chunk in response:
if chunk.choices[0].delta.content is not None:
yield chunk.choices[0].delta.content

class LLMClient:
"""Unified LLM client that supports multiple providers"""

PROVIDERS = {
'openai': OpenAIProvider,
'google': GoogleProvider,
'anthropic': AnthropicProvider,
'together': TogetherAIProvider
}

def __init__(self, provider: str = 'google', model: str = None, api_key: str = None):
Expand All @@ -320,7 +363,8 @@ def __init__(self, provider: str = 'google', model: str = None, api_key: str = N
availability_map = {
'openai': OPENAI_AVAILABLE,
'google': GOOGLE_AVAILABLE,
'anthropic': ANTHROPIC_AVAILABLE
'anthropic': ANTHROPIC_AVAILABLE,
'together': TOGETHER_AVAILABLE
}

if not availability_map[self.provider_name]:
Expand Down Expand Up @@ -350,6 +394,7 @@ def _get_api_key_from_env(self, provider: str) -> Optional[str]:
'openai': ['OPENAI_API_KEY'],
'google': ['GOOGLE_API_KEY'],
'anthropic': ['ANTHROPIC_API_KEY'],
'together': ['TOGETHER_API_KEY']
}

for key in env_keys.get(provider.lower(), []):
Expand Down Expand Up @@ -392,7 +437,8 @@ def list_available_providers(cls) -> List[str]:
availability_map = {
'openai': OPENAI_AVAILABLE,
'google': GOOGLE_AVAILABLE,
'anthropic': ANTHROPIC_AVAILABLE
'anthropic': ANTHROPIC_AVAILABLE,
'together': TOGETHER_AVAILABLE
}
return [provider for provider, available in availability_map.items() if available]

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ ebooklib
openai==1.82.0
google-generativeai==0.8.5
anthropic~=0.52.1
together==1.5.11
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"openai>=1.0.0",
"google-generativeai>=0.8.0",
"anthropic>=0.52.0",
"together>=1.5.11"
],
"epub": [
"beautifulsoup4>=4.0.0",
Expand Down