Skip to content

LangChain

Using LangChain

from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from os import getenv
from dotenv import load_dotenv

load_dotenv()

template = """Question: {question}
Answer: Let's think step by step."""

prompt = PromptTemplate(template=template, input_variables=["question"])

llm = ChatOpenAI(
  openai_api_key="API_KEY",
  openai_api_base="https://llm.siraya.pro/v1",
  model_name="<model_name>",
  model_kwargs={
  },
)

llm_chain = LLMChain(prompt=prompt, llm=llm)

question = "What NFL team won the Super Bowl in the year Justin Beiber was born?"

print(llm_chain.run(question))
const chat = new ChatOpenAI(
  {
    modelName: '<model_name>',
    temperature: 0.8,
    streaming: true,
    openAIApiKey: '${API_KEY}',
  },
  {
    basePath: 'https://llm.siraya.pro/v1',
    baseOptions: {
    },
  },
);