Kpenciler's picture
Upload 53 files
88435ed verified
raw
history blame
1.6 kB
from neollm.llm.abstract_llm import AbstractLLM
from neollm.types import ClientSettings
from .platform import Platform
SUPPORTED_CLAUDE_MODELS = [
"claude-3-opus",
"claude-3-sonnet",
"claude-3-haiku",
"claude-3-opus@20240229",
"claude-3-sonnet@20240229",
"claude-3-haiku@20240307",
]
SUPPORTED_GEMINI_MODELS = [
"gemini-1.5-pro-preview-0409",
"gemini-1.0-pro",
"gemini-1.0-pro-vision",
]
def get_llm(model_name: str, platform: str, client_settings: ClientSettings) -> AbstractLLM:
platform = Platform(platform)
# llmの取得
if platform == Platform.AZURE:
from neollm.llm.gpt.azure_llm import get_azure_llm
return get_azure_llm(model_name, client_settings)
if platform == Platform.OPENAI:
from neollm.llm.gpt.openai_llm import get_openai_llm
return get_openai_llm(model_name, client_settings)
if platform == Platform.ANTHROPIC:
from neollm.llm.claude.anthropic_llm import get_anthoropic_llm
return get_anthoropic_llm(model_name, client_settings)
if platform == Platform.GCP:
if model_name in SUPPORTED_CLAUDE_MODELS:
from neollm.llm.claude.gcp_llm import get_gcp_llm as get_gcp_llm_for_claude
return get_gcp_llm_for_claude(model_name, client_settings)
elif model_name in SUPPORTED_GEMINI_MODELS:
from neollm.llm.gemini.gcp_llm import get_gcp_llm as get_gcp_llm_for_gemini
return get_gcp_llm_for_gemini(model_name, client_settings)
else:
raise ValueError(f"{model_name} is not supported in GCP.")