nugentc commited on
Commit
16098ad
·
1 Parent(s): 94f8937
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -21,7 +21,7 @@ def chat(message, history):
21
  print("new_user_input_ids", new_user_input_ids)
22
  print("sizr bot_input_ids", bot_input_ids.size())
23
  print("size new_user_input_id", new_user_input_ids.size())
24
- bot_input_ids = torch.cat([bot_input_ids, new_user_input_ids], dim=1) if bot_input_ids is not None else new_user_input_ids
25
  # generated a response while limiting the total chat history to 1000 tokens,
26
  chat_history_ids = model.generate(bot_input_ids, max_length=5000, pad_token_id=tokenizer.eos_token_id)
27
  # pretty print last ouput tokens from bot
 
21
  print("new_user_input_ids", new_user_input_ids)
22
  print("sizr bot_input_ids", bot_input_ids.size())
23
  print("size new_user_input_id", new_user_input_ids.size())
24
+ bot_input_ids = torch.cat([bot_input_ids, new_user_input_ids], dim=0) if bot_input_ids is not None else new_user_input_ids
25
  # generated a response while limiting the total chat history to 1000 tokens,
26
  chat_history_ids = model.generate(bot_input_ids, max_length=5000, pad_token_id=tokenizer.eos_token_id)
27
  # pretty print last ouput tokens from bot