Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions python_gpt_po/models/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ class ModelProvider(Enum):
"""Enum for supported model providers."""
OPENAI = "openai"
ANTHROPIC = "anthropic"
OPENAI_COMPATIBLE = "openai_compatible"
DEEPSEEK = "deepseek"
AZURE_OPENAI = "azure_openai"
OLLAMA = "ollama"
Expand Down
41 changes: 30 additions & 11 deletions python_gpt_po/models/provider_clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ def __init__(self):
self.openai_client = None
self.azure_openai_client = None
self.anthropic_client = None
self.deepseek_api_key = None
self.deepseek_base_url = None
self.openai_compatible_api_key = None
self.openai_compatible_base_url = None
self.ollama_base_url = None
self.ollama_timeout = None

Expand Down Expand Up @@ -107,17 +107,35 @@ def initialize_clients(self, args: Namespace) -> Dict[str, str]:
if antropic_key:
self.anthropic_client = Anthropic(api_key=antropic_key)

# DeepSeek
deepseek_key = self._get_setting(
args, 'deepseek_key', 'DEEPSEEK_API_KEY', 'deepseek', 'api_key', ''
# OpenAI-Compatible (accepts both --openai-compatible-* and --deepseek-* args)
openai_compatible_key = self._get_setting(
args, 'openai_compatible_key', 'OPENAI_COMPATIBLE_API_KEY',
'openai_compatible', 'api_key', ''
)
if deepseek_key:
self.deepseek_api_key = deepseek_key
if not openai_compatible_key:
# Backward compatibility: accept deepseek args
openai_compatible_key = self._get_setting(
args, 'deepseek_key', 'DEEPSEEK_API_KEY', 'deepseek', 'api_key', ''
)
if openai_compatible_key:
self.openai_compatible_api_key = openai_compatible_key

# Base URL - default to DeepSeek API if using deepseek provider
provider_name = args.provider if hasattr(args, 'provider') else None
default_base_url = 'https://api.deepseek.com/v1' if provider_name == 'deepseek' else None

self.deepseek_base_url = self._get_setting(
args, 'deepseek_base_url', 'DEEPSEEK_BASE_URL',
'deepseek', 'base_url', 'https://api.deepseek.com/v1'
openai_compatible_base_url = self._get_setting(
args, 'openai_compatible_base_url', 'OPENAI_COMPATIBLE_BASE_URL',
'openai_compatible', 'base_url', None
)
if not openai_compatible_base_url:
# Backward compatibility: accept deepseek args
openai_compatible_base_url = self._get_setting(
args, 'deepseek_base_url', 'DEEPSEEK_BASE_URL',
'deepseek', 'base_url', default_base_url
)
if openai_compatible_base_url:
self.openai_compatible_base_url = openai_compatible_base_url

# Ollama
self.ollama_base_url = self._get_setting(
Expand All @@ -132,7 +150,8 @@ def initialize_clients(self, args: Namespace) -> Dict[str, str]:
return {
ModelProvider.OPENAI.value: openai_key,
ModelProvider.ANTHROPIC.value: antropic_key,
ModelProvider.DEEPSEEK.value: deepseek_key,
ModelProvider.OPENAI_COMPATIBLE.value: openai_compatible_key,
ModelProvider.DEEPSEEK.value: openai_compatible_key,
ModelProvider.AZURE_OPENAI.value: azure_openai_key,
ModelProvider.OLLAMA.value: "local", # Ollama doesn't need API key
ModelProvider.CLAUDE_SDK.value: "local",
Expand Down
83 changes: 7 additions & 76 deletions python_gpt_po/services/providers/deepseek_provider.py
Original file line number Diff line number Diff line change
@@ -1,79 +1,10 @@
"""
DeepSeek provider implementation.
DeepSeek provider implementation (legacy alias).
This module is maintained for backward compatibility.
New code should use openai_compatible_provider instead.
"""
import logging
from typing import List
# Import the new provider and create an alias
from .openai_compatible_provider import OpenAICompatibleProvider

import requests

from ...models.provider_clients import ProviderClients
from .base import ModelProviderInterface


class DeepSeekProvider(ModelProviderInterface):
"""DeepSeek model provider implementation."""

def get_models(self, provider_clients: ProviderClients) -> List[str]:
"""Retrieve available models from DeepSeek."""
models = []

if not self.is_client_initialized(provider_clients):
logging.error("DeepSeek API key not set")
return models

try:
headers = {
"Authorization": f"Bearer {provider_clients.deepseek_api_key}",
"Content-Type": "application/json"
}
response = requests.get(
f"{provider_clients.deepseek_base_url}/models",
headers=headers,
timeout=15
)
response.raise_for_status()
models = [model["id"] for model in response.json().get("data", [])]
except Exception as e:
logging.error("Error fetching DeepSeek models: %s", str(e))
models = self.get_fallback_models()

return models

def get_default_model(self) -> str:
"""Get the default DeepSeek model."""
return "deepseek-chat"

def get_preferred_models(self, task: str = "translation") -> List[str]:
"""Get preferred DeepSeek models for a task."""
return ["deepseek-chat"]

def is_client_initialized(self, provider_clients: ProviderClients) -> bool:
"""Check if DeepSeek client is initialized."""
return provider_clients.deepseek_api_key is not None

def get_fallback_models(self) -> List[str]:
"""Get fallback models for DeepSeek."""
return ["deepseek-chat", "deepseek-coder"]

def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str:
"""Get response from DeepSeek API."""
if not self.is_client_initialized(provider_clients):
raise ValueError("DeepSeek client not initialized")

headers = {
"Authorization": f"Bearer {provider_clients.deepseek_api_key}",
"Content-Type": "application/json"
}
payload = {
"model": model,
"messages": [{"role": "user", "content": content}],
"max_tokens": 4000
}
response = requests.post(
f"{provider_clients.deepseek_base_url}/chat/completions",
headers=headers,
json=payload,
timeout=30
)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"].strip()
# DeepSeekProvider is now an alias to OpenAICompatibleProvider
DeepSeekProvider = OpenAICompatibleProvider
89 changes: 89 additions & 0 deletions python_gpt_po/services/providers/openai_compatible_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""
OpenAI-compatible API provider implementation.
Supports any service that implements the OpenAI API format, including:
- DeepSeek
- LM Studio
- z.ai
- Groq
- Together.ai
- Fireworks
- And many others
"""
import logging
from typing import List

import requests

from ...models.provider_clients import ProviderClients
from .base import ModelProviderInterface


class OpenAICompatibleProvider(ModelProviderInterface):
"""OpenAI-compatible API provider implementation."""

def get_models(self, provider_clients: ProviderClients) -> List[str]:
"""Retrieve available models from the API."""
models = []

if not self.is_client_initialized(provider_clients):
logging.error("OpenAI-compatible API key not set")
return models

try:
headers = {
"Authorization": f"Bearer {provider_clients.openai_compatible_api_key}",
"Content-Type": "application/json"
}
response = requests.get(
f"{provider_clients.openai_compatible_base_url}/models",
headers=headers,
timeout=15
)
response.raise_for_status()
models = [model["id"] for model in response.json().get("data", [])]
except Exception as e:
logging.error("Error fetching models: %s", str(e))
models = self.get_fallback_models()

return models

def get_default_model(self) -> str:
"""Get the default model."""
return "gpt-3.5-turbo"

def get_preferred_models(self, task: str = "translation") -> List[str]:
"""Get preferred models for a task."""
return ["gpt-4", "gpt-3.5-turbo"]

def is_client_initialized(self, provider_clients: ProviderClients) -> bool:
"""Check if client is initialized."""
has_key = provider_clients.openai_compatible_api_key is not None
has_url = provider_clients.openai_compatible_base_url is not None
return has_key and has_url

def get_fallback_models(self) -> List[str]:
"""Get fallback models."""
return ["gpt-3.5-turbo", "gpt-4"]

def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str:
"""Get response from OpenAI-compatible API."""
if not self.is_client_initialized(provider_clients):
raise ValueError("OpenAI-compatible client not initialized")

headers = {
"Authorization": f"Bearer {provider_clients.openai_compatible_api_key}",
"Content-Type": "application/json"
}
payload = {
"model": model,
"messages": [{"role": "user", "content": content}],
"max_tokens": 4000
}
response = requests.post(
f"{provider_clients.openai_compatible_base_url}/chat/completions",
headers=headers,
json=payload,
timeout=30
)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"].strip()
5 changes: 3 additions & 2 deletions python_gpt_po/services/providers/provider_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,13 @@ def initialize_providers():
from .anthropic_provider import AnthropicProvider
from .azure_openai_provider import AzureOpenAIProvider
from .claude_sdk_provider import ClaudeSdkProvider
from .deepseek_provider import DeepSeekProvider
from .ollama_provider import OllamaProvider
from .openai_compatible_provider import OpenAICompatibleProvider
from .openai_provider import OpenAIProvider
ProviderRegistry.register(ModelProvider.OPENAI, OpenAIProvider)
ProviderRegistry.register(ModelProvider.ANTHROPIC, AnthropicProvider)
ProviderRegistry.register(ModelProvider.DEEPSEEK, DeepSeekProvider)
ProviderRegistry.register(ModelProvider.OPENAI_COMPATIBLE, OpenAICompatibleProvider)
ProviderRegistry.register(ModelProvider.DEEPSEEK, OpenAICompatibleProvider) # Alias
ProviderRegistry.register(ModelProvider.AZURE_OPENAI, AzureOpenAIProvider)
ProviderRegistry.register(ModelProvider.OLLAMA, OllamaProvider)
ProviderRegistry.register(ModelProvider.CLAUDE_SDK, ClaudeSdkProvider)
Expand Down
70 changes: 67 additions & 3 deletions python_gpt_po/tests/providers/test_deepseek_provider.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from argparse import Namespace
from unittest.mock import MagicMock, patch

import pytest

from python_gpt_po.models.provider_clients import ProviderClients
from python_gpt_po.services.providers.deepseek_provider import DeepSeekProvider
from python_gpt_po.services.providers.openai_compatible_provider import OpenAICompatibleProvider

DEEPSEEK_TRANSLATION_RESPONSE = {
"choices": [
Expand All @@ -20,12 +22,12 @@
def mock_provider_clients() -> ProviderClients:
"""Mock provider clients for testing."""
clients = ProviderClients()
clients.deepseek_api_key = "sk-deepseek-mock-key"
clients.deepseek_base_url = "https://api.deepseek.com/v1"
clients.openai_compatible_api_key = "sk-deepseek-mock-key"
clients.openai_compatible_base_url = "https://api.deepseek.com/v1"
return clients


@patch('python_gpt_po.services.providers.deepseek_provider.requests.post')
@patch('python_gpt_po.services.providers.openai_compatible_provider.requests.post')
def test_translate(mock_post: MagicMock, mock_provider_clients: ProviderClients) -> None:
"""Test translation with DeepSeek."""
# Setup mock response
Expand All @@ -42,3 +44,65 @@ def test_translate(mock_post: MagicMock, mock_provider_clients: ProviderClients)

print(type(translations))
assert translations == '```json\n["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]\n```'


def test_deepseek_is_alias_to_openai_compatible() -> None:
"""Test that DeepSeekProvider is an alias to OpenAICompatibleProvider."""
assert DeepSeekProvider is OpenAICompatibleProvider


def test_backward_compatibility_deepseek_args() -> None:
"""Test that old --deepseek-* arguments still work."""
args = Namespace(
provider='deepseek',
deepseek_key='sk-test-key',
deepseek_base_url=None,
openai_compatible_key=None,
openai_compatible_base_url=None,
folder=None
)

clients = ProviderClients()
clients.initialize_clients(args)

# Old deepseek args should set openai_compatible fields
assert clients.openai_compatible_api_key == 'sk-test-key'
# Should get DeepSeek default base URL when using deepseek provider
assert clients.openai_compatible_base_url == 'https://api.deepseek.com/v1'


def test_new_openai_compatible_args() -> None:
"""Test that new --openai-compatible-* arguments work."""
args = Namespace(
provider='openai_compatible',
deepseek_key=None,
deepseek_base_url=None,
openai_compatible_key='sk-test-key',
openai_compatible_base_url='http://localhost:1234/v1',
folder=None
)

clients = ProviderClients()
clients.initialize_clients(args)

assert clients.openai_compatible_api_key == 'sk-test-key'
assert clients.openai_compatible_base_url == 'http://localhost:1234/v1'


def test_deepseek_args_priority_over_openai_compatible() -> None:
"""Test that openai_compatible args have priority over deepseek args."""
args = Namespace(
provider='openai_compatible',
deepseek_key='sk-old-key',
deepseek_base_url='https://old.api.com/v1',
openai_compatible_key='sk-new-key',
openai_compatible_base_url='http://new.api.com/v1',
folder=None
)

clients = ProviderClients()
clients.initialize_clients(args)

# New args should take priority
assert clients.openai_compatible_api_key == 'sk-new-key'
assert clients.openai_compatible_base_url == 'http://new.api.com/v1'
4 changes: 2 additions & 2 deletions python_gpt_po/tests/test_multi_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ def mock_provider_clients() -> ProviderClients:
clients.openai_client = MagicMock()
clients.anthropic_client = MagicMock()
clients.anthropic_client.api_key = "sk-ant-mock-key"
clients.deepseek_api_key = "sk-deepseek-mock-key"
clients.deepseek_base_url = "https://api.deepseek.com/v1"
clients.openai_compatible_api_key = "sk-deepseek-mock-key"
clients.openai_compatible_base_url = "https://api.deepseek.com/v1"
clients.azure_openai_client = MagicMock()
clients.azure_openai_client.api_key = "sk-aoi-mock-key"
return clients
Expand Down
Loading