TiberiuCristianLeon commited on
Commit
0d91427
·
verified ·
1 Parent(s): 6667a14

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -5,7 +5,6 @@ import torch
5
  import os, gc
6
  import httpx
7
 
8
-
9
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
10
  # Language options and mappings
11
  favourite_langs = {"Romanian": "ro", "German": "de", "English": "en", "-----": "-----"}
@@ -146,6 +145,7 @@ class Translators:
146
  prompt = f"Translate the following segment into {self.tl}, without additional explanation.\n\n{self.input_text}."
147
  tokenizer = AutoTokenizer.from_pretrained(self.model_name)
148
  model = AutoModelForCausalLM.from_pretrained(self.model_name, device_map="auto")
 
149
  messages = [{"role": "user", "content": prompt}]
150
  tokenized_chat = tokenizer.apply_chat_template(
151
  messages,
 
5
  import os, gc
6
  import httpx
7
 
 
8
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
9
  # Language options and mappings
10
  favourite_langs = {"Romanian": "ro", "German": "de", "English": "en", "-----": "-----"}
 
145
  prompt = f"Translate the following segment into {self.tl}, without additional explanation.\n\n{self.input_text}."
146
  tokenizer = AutoTokenizer.from_pretrained(self.model_name)
147
  model = AutoModelForCausalLM.from_pretrained(self.model_name, device_map="auto")
148
+ model.tie_weights() # fp8
149
  messages = [{"role": "user", "content": prompt}]
150
  tokenized_chat = tokenizer.apply_chat_template(
151
  messages,