Update app.py
Browse files
app.py
CHANGED
|
@@ -5,15 +5,11 @@ import json
|
|
| 5 |
# Define tokenizer and model before using them
|
| 6 |
tokenizer = AutoTokenizer.from_pretrained("t5-base")
|
| 7 |
model = AutoModelWithLMHead.from_pretrained("t5-base", return_dict=True)
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
summary = tokenizer.batch_decode(outputs, skip_special_tokens=True)
|
| 18 |
-
|
| 19 |
-
st.text(summary[0])
|
|
|
|
| 5 |
# Define tokenizer and model before using them
|
| 6 |
tokenizer = AutoTokenizer.from_pretrained("t5-base")
|
| 7 |
model = AutoModelWithLMHead.from_pretrained("t5-base", return_dict=True)
|
| 8 |
+
texte = st.text_area("Texte à résumer", height=200)
|
| 9 |
+
bouton_ok = st.button("Résumé")
|
| 10 |
+
if bouton_ok:
|
| 11 |
+
inputs = tokenizer.encode("summarize: " + texte, return_tensors='pt', max_length=512, truncation=True)
|
| 12 |
+
outputs = model.generate(inputs, max_length=150, min_length=80, length_penalty=5, num_beams=2)
|
| 13 |
+
summary = tokenizer.decode(outputs[0])
|
| 14 |
+
st.text("Résumé :")
|
| 15 |
+
st.text(summary)
|
|
|
|
|
|
|
|
|
|
|
|