artificialguybr commited on
Commit
acc1fb0
1 Parent(s): ac31486

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -43,8 +43,11 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetit
43
  # Apply the chat template
44
  gen_input = tokenizer.apply_chat_template(messages, return_tensors="pt", add_generation_prompt=True)
45
 
46
- # Extract input_ids
47
- input_ids = gen_input['input_ids']
 
 
 
48
 
49
  # Generate the output
50
  output = model.generate(input_ids=input_ids, temperature=temperature, do_sample=True, top_p=top_p, top_k=top_k, max_length=max_tokens)
@@ -58,6 +61,7 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetit
58
  return history, history, ""
59
 
60
 
 
61
  start_message = ""
62
 
63
  CSS ="""
 
43
  # Apply the chat template
44
  gen_input = tokenizer.apply_chat_template(messages, return_tensors="pt", add_generation_prompt=True)
45
 
46
+ # Debug: Print the shape of gen_input
47
+ print("Shape of gen_input:", gen_input.shape if hasattr(gen_input, 'shape') else type(gen_input))
48
+
49
+ # Extract input_ids based on the type of gen_input
50
+ input_ids = gen_input['input_ids'] if isinstance(gen_input, dict) else gen_input
51
 
52
  # Generate the output
53
  output = model.generate(input_ids=input_ids, temperature=temperature, do_sample=True, top_p=top_p, top_k=top_k, max_length=max_tokens)
 
61
  return history, history, ""
62
 
63
 
64
+
65
  start_message = ""
66
 
67
  CSS ="""