Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -17,12 +17,10 @@ import easyocr
|
|
| 17 |
app = FastAPI()
|
| 18 |
|
| 19 |
# Load AI Model for Question Answering on Documents (Mistral-7B)
|
| 20 |
-
model_name = "
|
| 21 |
-
print(f"π Loading
|
| 22 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 23 |
-
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 24 |
|
| 25 |
-
doc_qa_pipeline = pipeline("text-generation", model=model
|
| 26 |
|
| 27 |
# Load Image Captioning Model (nlpconnect/vit-gpt2-image-captioning)
|
| 28 |
image_captioning_pipeline = pipeline("image-to-text", model="nlpconnect/vit-gpt2-image-captioning")
|
|
|
|
| 17 |
app = FastAPI()
|
| 18 |
|
| 19 |
# Load AI Model for Question Answering on Documents (Mistral-7B)
|
| 20 |
+
model_name = "TinyLlama/TinyLlama-1.1B-chat"
|
| 21 |
+
print(f"π Loading models")
|
|
|
|
|
|
|
| 22 |
|
| 23 |
+
doc_qa_pipeline = pipeline("text-generation", model=model)
|
| 24 |
|
| 25 |
# Load Image Captioning Model (nlpconnect/vit-gpt2-image-captioning)
|
| 26 |
image_captioning_pipeline = pipeline("image-to-text", model="nlpconnect/vit-gpt2-image-captioning")
|