Skip to content

Commit b239521

Browse files
committed
disable phi3 ut
Signed-off-by: Kaihui-intel <kaihui.tang@intel.com>
1 parent b25f8f5 commit b239521

File tree

1 file changed

+15
-15
lines changed

1 file changed

+15
-15
lines changed

test/3x/torch/quantization/weight_only/test_transformers.py

+15-15
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ def test_vlm(self):
226226
bits=4,
227227
group_size=128,
228228
is_vlm=True,
229-
dataset="liuhaotian/llava_conv_58k",
229+
dataset="NeelNanda/pile-10k",
230230
iters=2,
231231
n_samples=5,
232232
seq_len=512,
@@ -248,17 +248,17 @@ def test_vlm(self):
248248
loaded_model = Qwen2VLForConditionalGeneration.from_pretrained("transformers_vlm_tmp")
249249
assert isinstance(loaded_model.model.layers[0].self_attn.k_proj, WeightOnlyQuantizedLinear), "loaing model failed."
250250

251-
# phi-3-vision-128k-instruct
252-
woq_config = AutoRoundConfig(
253-
bits=4,
254-
group_size=128,
255-
is_vlm=True,
256-
dataset="NeelNanda/pile-10k",
257-
iters=2,
258-
n_samples=5,
259-
seq_len=64,
260-
batch_size=1,
261-
)
262-
model_name = "microsoft/Phi-3-vision-128k-instruct"
263-
woq_model = AutoModelForCausalLM.from_pretrained(model_name, quantization_config=woq_config, trust_remote_code=True, attn_implementation='eager')
264-
assert isinstance(woq_model.model.layers[0].self_attn.o_proj, WeightOnlyQuantizedLinear), "quantizaion failed."
251+
# phi-3-vision-128k-instruct, disable as CI consumes too much time
252+
# woq_config = AutoRoundConfig(
253+
# bits=4,
254+
# group_size=128,
255+
# is_vlm=True,
256+
# dataset="liuhaotian/llava_conv_58k",
257+
# iters=2,
258+
# n_samples=5,
259+
# seq_len=64,
260+
# batch_size=1,
261+
# )
262+
# model_name = "microsoft/Phi-3-vision-128k-instruct"
263+
# woq_model = AutoModelForCausalLM.from_pretrained(model_name, quantization_config=woq_config, trust_remote_code=True, attn_implementation='eager')
264+
# assert isinstance(woq_model.model.layers[0].self_attn.o_proj, WeightOnlyQuantizedLinear), "quantizaion failed."

0 commit comments

Comments
 (0)