模型下载
python
from modelscope import snapshot_download
model_dir = snapshot_download('ChineseAlpacaGroup/Qwen2.5-VL-7B-Instruct-GPTQ-Int3')
相关包导入
python
import os
import numpy as np
import pandas as pd
from tqdm import tqdm
from datetime import datetime,timedelta
from functools import wraps
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from transformers.generation.utils import GenerationConfig
模型配置
python
model_path = "/modelscope/hub/models/ChineseAlpacaGroup/Qwen2.5-VL-7B-Instruct-GPTQ-Int3"
tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(model_dir, device_map="cuda", trust_remote_code=True,use_safetensors=True).eval()
模型调用
python
query = tokenizer.from_list_format([
{'image': 'https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen-VL/assets/demo.jpeg'},
{'text': '这是什么'},
])
response, history = model.chat(tokenizer, query=query, history=None)
print(response)