跑demo 报错

#21
by tcy6 - opened

跑的就是官方给的demo
import torch
from PIL import Image
from transformers import AutoModel, AutoTokenizer

model = AutoModel.from_pretrained('openbmb/MiniCPM-V-2', trust_remote_code=True, torch_dtype=torch.bfloat16)

For Nvidia GPUs support BF16 (like A100, H100, RTX3090)

model = model.to(device='cuda', dtype=torch.bfloat16)

For Nvidia GPUs do NOT support BF16 (like V100, T4, RTX2080)

#model = model.to(device='cuda', dtype=torch.float16)

For Mac with MPS (Apple silicon or AMD GPUs).

Run with PYTORCH_ENABLE_MPS_FALLBACK=1 python test.py

#model = model.to(device='mps', dtype=torch.float16)

tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-V-2', trust_remote_code=True)
model.eval()

image = Image.open('/home/jeeves/image.png').convert('RGB')
question = 'What is in the image?'
msgs = [{'role': 'user', 'content': question}]

res, context, _ = model.chat(
image=image,
msgs=msgs,
context=None,
tokenizer=tokenizer,
sampling=True,
temperature=0.7
)
print(res)

一跑就报错
(tcy6) jeeves@notebook-4992-rhapsody:/mnt/data/user/tc_agi/xubokai$ python /home/jeeves/tcy6/test_test.py
Loading checkpoint shards: 100%|█████████████████████████████████████████████████████████████████████| 2/2 [00:00<00:00, 7.19it/s]
The seen_tokens attribute is deprecated and will be removed in v4.41. Use the cache_position model input instead.
Traceback (most recent call last):
File "/home/jeeves/tcy6/test_test.py", line 22, in
res, context, _ = model.chat(
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpmv.py", line 375, in chat
res, vision_hidden_states = self.generate(
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpmv.py", line 308, in generate
result = self._decode(model_inputs["inputs_embeds"], tokenizer, **kwargs)
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpmv.py", line 212, in _decode
output = self.llm.generate(
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/jeeves/.local/lib/python3.10/site-packages/transformers/generation/utils.py", line 1989, in generate
result = self._sample(
File "/home/jeeves/.local/lib/python3.10/site-packages/transformers/generation/utils.py", line 2932, in _sample
outputs = self(**model_inputs, return_dict=True)
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl
return forward_call(*args, **kwargs)
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpm.py", line 1387, in forward
outputs = self.model(
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl
return forward_call(*args, **kwargs)
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpm.py", line 1262, in forward
layer_outputs = decoder_layer(
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl
return forward_call(*args, **kwargs)
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpm.py", line 973, in forward
hidden_states, self_attn_weights, present_key_value = self.self_attn(
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1532, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/home/jeeves/.conda/envs/tcy6/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1541, in _call_impl
return forward_call(*args, **kwargs)
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpm.py", line 869, in forward
query_states, key_states = apply_rotary_pos_emb(
File "/home/jeeves/.cache/huggingface/modules/transformers_modules/openbmb/MiniCPM-V-2/95a8b2d39bde471ffecdee5ef95940b166fffdfe/modeling_minicpm.py", line 284, in apply_rotary_pos_emb
cos = cos[position_ids].unsqueeze(unsqueeze_dim) # [bs, 1, seq_len, dim]
IndexError: index is out of bounds for dimension with size 0

tcy6 changed discussion status to closed

Sign up or log in to comment