gabrielaltay's picture
update
ac2020e
raw
history blame
1.79 kB
"""LLM provider implementations for LegisQA"""
import streamlit as st
from langchain_openai import ChatOpenAI
from langchain_anthropic import ChatAnthropic
from langchain_together import ChatTogether
from langchain_google_genai import ChatGoogleGenerativeAI
from legisqa_local.config.settings import get_secret
def get_llm(gen_config: dict):
"""Get LLM instance based on configuration"""
match gen_config["provider"]:
case "OpenAI":
llm = ChatOpenAI(
model=gen_config["model_name"],
temperature=gen_config["temperature"],
api_key=get_secret("openai_api_key"),
max_tokens=gen_config["max_output_tokens"],
)
case "Anthropic":
llm = ChatAnthropic(
model_name=gen_config["model_name"],
temperature=gen_config["temperature"],
api_key=get_secret("anthropic_api_key"),
max_tokens_to_sample=gen_config["max_output_tokens"],
)
case "Together":
llm = ChatTogether(
model=gen_config["model_name"],
temperature=gen_config["temperature"],
max_tokens=gen_config["max_output_tokens"],
api_key=get_secret("together_api_key"),
)
case "Google":
llm = ChatGoogleGenerativeAI(
model=gen_config["model_name"],
temperature=gen_config["temperature"],
api_key=get_secret("google_api_key"),
max_output_tokens=gen_config["max_output_tokens"],
)
case _:
raise ValueError(f"Unknown provider: {gen_config['provider']}")
return llm