nicole-ait
init w/ hf models
bea420f
raw
history blame
1.11 kB
import gradio as gr
from langchain import PromptTemplate, LLMChain
from langchain.llms import HuggingFaceHub
template = """Question: {question}
Answer: Let's think step by step."""
def run(
question: gr.Textbox = None,
repo_id: gr.Dropdown = None,
temperature: gr.Slider = 0.5,
max_length: gr.Slider = 64,
):
prompt = PromptTemplate(template=template, input_variables=["question"])
llm = HuggingFaceHub(
repo_id=repo_id,
model_kwargs={"temperature": temperature, "max_length": max_length}
)
llm_chain = LLMChain(prompt=prompt, llm=llm)
result = llm_chain.run(question)
print(result)
return result
inputs = [
gr.Textbox(label="Question"),
gr.Dropdown(["google/flan-t5-xxl", "google/flan-t5-base"],
value="google/flan-t5-xxl", label="Model", allow_custom_value=True),
gr.Slider(0.0, 1.0, value=0.5, step=0.05, label="Temperature"),
gr.Slider(10, 1000, value=64, label="Max Length"),
]
title = "Langchain w/ HF Models"
gr.Interface(
fn=run,
inputs=inputs,
outputs='label',
title=title,
).launch()