|
|
from langchain_core.runnables import RunnableLambda, RunnablePassthrough |
|
|
from langchain_core.prompts import ChatPromptTemplate |
|
|
from langchain_core.output_parsers import StrOutputParser |
|
|
from langchain_huggingface import HuggingFaceEndpoint |
|
|
import os |
|
|
|
|
|
def create_chatbot_chain(): |
|
|
""" |
|
|
Creates a modular chatbot using LangChain Runnables |
|
|
""" |
|
|
|
|
|
|
|
|
llm = HuggingFaceEndpoint( |
|
|
repo_id="mistralai/Mistral-7B-Instruct-v0.2", |
|
|
task="text-generation", |
|
|
max_new_tokens=512, |
|
|
temperature=0.7, |
|
|
token=os.getenv("HUGGINGFACEHUB_API_TOKEN") |
|
|
) |
|
|
|
|
|
|
|
|
prompt = ChatPromptTemplate.from_messages([ |
|
|
("system", "You are a helpful AI assistant. You can answer questions, tell jokes, and have friendly conversations."), |
|
|
("human", "{input}") |
|
|
]) |
|
|
|
|
|
|
|
|
chain = ( |
|
|
{"input": RunnablePassthrough()} |
|
|
| prompt |
|
|
| llm |
|
|
| StrOutputParser() |
|
|
| RunnableLambda(lambda x: {"output": x}) |
|
|
) |
|
|
|
|
|
return chain |
|
|
|