Skip to content

Commit 508767d

Browse files
committedOct 15, 2024·
temp
1 parent 34d3c91 commit 508767d

File tree

1 file changed

+21
-0
lines changed

1 file changed

+21
-0
lines changed
 

‎miniCPM-V-2_6.py

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
from optimum.intel.openvino import OVModelForVisualCausalLM
2+
from transformers import AutoProcessor, AutoTokenizer
3+
from PIL import Image
4+
import requests
5+
6+
model_id = "openbmb/MiniCPM-V-2_6"
7+
8+
processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
9+
10+
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
11+
prompt = tokenizer.apply_chat_template([{"role": "user", "content": "()\nWhat is unusual on this image?"}], tokenize=False, add_generation_prompt=True)
12+
# image = Image.open(requests.get("https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11", stream=True).raw).convert('RGB')
13+
image = Image.open("/home/vzlobin/r/g/g.png").convert('RGB')
14+
15+
model = OVModelForVisualCausalLM.from_pretrained("MiniCPM-V-2_6", trust_remote_code=True)
16+
17+
inputs = processor([prompt], [image], return_tensors="pt")
18+
19+
result = model.generate(**inputs, max_new_tokens=200)
20+
21+
print(processor.tokenizer.batch_decode(result[:, inputs["input_ids"].shape[1]:]))

0 commit comments

Comments
 (0)
Please sign in to comment.