Spaces:
Sleeping
Sleeping
from langchain_core.prompts import ChatPromptTemplate | |
from langchain_ollama.llms import OllamaLLM | |
template = """Question: {question} | |
Answer: Let's think step by step.""" | |
prompt = ChatPromptTemplate.from_template(template) | |
model = OllamaLLM(model="llama3.2:1b-instruct-q4_K_M") | |
chain = prompt | model | |
res = chain.invoke({"question": "What is LangChain?"}) | |
print(res) | |