Skip to content

Commit 1778e50

Browse files
authored
[WWB]: Some fixes (openvinotoolkit#983)
1 parent efaf0c8 commit 1778e50

File tree

4 files changed

+5
-2
lines changed

4 files changed

+5
-2
lines changed

.github/workflows/causal_lm_cpp.yml

+1
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ on:
77
branches:
88
- master
99
- 'releases/**'
10+
1011
permissions: read-all # Required by https://github.com/ossf/scorecard/blob/e23b8ad91fd6a64a0a971ca4fc0a4d1650725615/docs/checks.md#token-permissions
1112
concurrency:
1213
group: ${{ github.workflow }}-${{ github.ref }}

.github/workflows/llm_bench-python.yml

+1
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ jobs:
4444
run: |
4545
# stop the build if there are Python syntax errors or undefined names
4646
python -m flake8 ${{ env.LLM_BENCH_PYPATH }} --config=${{ env.LLM_BENCH_PYPATH }}/setup.cfg
47+
python -m flake8 ${{ env.WWB_PATH }} --config=${{ env.LLM_BENCH_PYPATH }}/setup.cfg
4748
- name: Create code style diff for samples
4849
if: failure()
4950
run: |

tools/who_what_benchmark/whowhatbench/wwb.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ class GenAIModelWrapper:
3737

3838
def __init__(self, model, model_dir):
3939
self.model = model
40-
self.config = AutoConfig.from_pretrained(model_dir)
40+
self.config = AutoConfig.from_pretrained(model_dir, trust_remote_code=True)
4141

4242
def __getattr__(self, attr):
4343
if attr in self.__dict__:
@@ -199,7 +199,7 @@ def parse_args():
199199
type=str,
200200
choices=["text", "text-to-image"],
201201
default="text",
202-
help="Indicated the model type, e.g. 'text' - for LLMs, 't2im' - for text-to-image pipelines.",
202+
help="Indicated the model type, e.g. 'text' - for causal text generation, 'text-to-image' - for image generation.",
203203
)
204204
parser.add_argument(
205205
"--data-encoder",
@@ -335,6 +335,7 @@ def diff_strings(a: str, b: str, *, use_loguru_colors: bool = False) -> str:
335335
def genai_gen_answer(model, tokenizer, question, max_new_tokens, skip_question):
336336
config = openvino_genai.GenerationConfig()
337337
config.max_new_tokens = max_new_tokens
338+
config.do_sample = False
338339
out = model.generate(question, config)
339340
return out
340341

0 commit comments

Comments
 (0)