Skip to content

Commit d21e27c

Browse files
ilya-lavrenovpavel-esirWovchenat-jankowskinikita-savelyevv
authored
GHA: Use 2025.0.0 RC1 (#1578)
Also, ported the following PRs: - #1588 - #1586 - #1582 - #1573 - #1585 - #1584 --------- Signed-off-by: Tomasz Jankowski <tomasz1.jankowski@intel.com> Co-authored-by: Pavel Esir <pavel.esir@gmail.com> Co-authored-by: Vladimir Zlobin <vladimir.zlobin@intel.com> Co-authored-by: Tomasz Jankowski <tomasz1.jankowski@intel.com> Co-authored-by: Nikita Savelyev <nikita.savelyev@intel.com> Co-authored-by: guozhong wang <guozhong.wang@intel.com>
1 parent eed81fe commit d21e27c

23 files changed

+78
-58
lines changed

.github/workflows/causal_lm_cpp.yml

+7-5
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,11 @@ concurrency:
1616
cancel-in-progress: true
1717

1818
env:
19-
l_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/l_openvino_toolkit_ubuntu20_2025.0.0.dev20250109_x86_64.tgz
20-
l_u22_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/l_openvino_toolkit_ubuntu22_2025.0.0.dev20250109_x86_64.tgz
21-
m_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/m_openvino_toolkit_macos_12_6_2025.0.0.dev20250109_x86_64.tgz
22-
w_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/w_openvino_toolkit_windows_2025.0.0.dev20250109_x86_64.zip
19+
l_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/l_openvino_toolkit_ubuntu20_2025.0.0.dev20250116_x86_64.tgz
20+
l_u22_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/l_openvino_toolkit_ubuntu22_2025.0.0.dev20250116_x86_64.tgz
21+
m_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/m_openvino_toolkit_macos_12_6_2025.0.0.dev20250116_x86_64.tgz
22+
w_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/w_openvino_toolkit_windows_2025.0.0.dev20250116_x86_64.zip
23+
2324
jobs:
2425
cpp-multinomial-greedy_causal_lm-ubuntu:
2526
runs-on: ubuntu-20.04-8-cores
@@ -463,6 +464,7 @@ jobs:
463464
env:
464465
PYTHONPATH: "./build/:$PYTHONPATH"
465466
LD_LIBRARY_PATH: "./build/openvino_genai/:$LD_LIBRARY_PATH"
467+
466468
cpp-prompt_lookup_decoding_lm-ubuntu:
467469
runs-on: ubuntu-20.04-16-cores
468470
defaults:
@@ -520,6 +522,7 @@ jobs:
520522
env:
521523
PYTHONPATH: "./build/:$PYTHONPATH"
522524
LD_LIBRARY_PATH: "./build/openvino_genai/:$LD_LIBRARY_PATH"
525+
523526
cpp-Phi-1_5:
524527
runs-on: ubuntu-20.04-16-cores
525528
defaults:
@@ -697,7 +700,6 @@ jobs:
697700
diff pred2.txt ref.txt
698701
echo "Chat sample python" passed
699702
700-
701703
visual_language_chat_sample-ubuntu-minicpm_v2_6:
702704
runs-on: ubuntu-22.04-16-cores
703705
steps:

.github/workflows/genai-tools.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,12 @@ jobs:
3939
- ${{ github.workspace }}:${{ github.workspace }}
4040

4141
steps:
42-
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@master
42+
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@releases/2025/0
4343
id: openvino_download
4444
with:
4545
platform: ubuntu22
4646
commit_packages_to_provide: wheels
47-
revision: ed470e7e40129d6b2bf728bc9527316937a69ef7
47+
revision: latest_available_commit
4848

4949
llm_bench:
5050
name: 'LLM bench tests'

.github/workflows/job_vlm_sample_llava.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ on:
1111
type: string
1212

1313
env:
14-
l_u22_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/l_openvino_toolkit_ubuntu22_2025.0.0.dev20250109_x86_64.tgz
14+
l_u22_ov_link: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/l_openvino_toolkit_ubuntu22_2025.0.0.dev20250116_x86_64.tgz
1515

1616
jobs:
1717
visual_language_chat_sample-ubuntu-llava:

.github/workflows/lcm_dreamshaper_cpp.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@ concurrency:
1818

1919
env:
2020
PYTHON_VERSION: '3.9'
21-
LINUX_OV_ARCHIVE_URL: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/l_openvino_toolkit_ubuntu22_2025.0.0.dev20250109_x86_64.tgz
22-
WINDOWS_OV_ARCHIVE_URL: https://storage.openvinotoolkit.org/repositories/openvino/packages/nightly/2025.0.0-17800-91ae987c516/w_openvino_toolkit_windows_2025.0.0.dev20250109_x86_64.zip
21+
LINUX_OV_ARCHIVE_URL: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/l_openvino_toolkit_ubuntu22_2025.0.0.dev20250116_x86_64.tgz
22+
WINDOWS_OV_ARCHIVE_URL: https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/w_openvino_toolkit_windows_2025.0.0.dev20250116_x86_64.zip
2323
OV_INSTALL_DIR: ${{ github.workspace }}/ov
2424

2525
jobs:
@@ -93,7 +93,7 @@ jobs:
9393
${{ env.build_dir }}/samples/cpp/image_generation/inpainting ./models/lcm_dreamshaper_v7 "cyberpunk cityscape like Tokyo New York with tall buildings at dusk golden hour cinematic lighting" ./image.png ./mask_image.png
9494
9595
lcm_dreamshaper_v7_cpp-windows:
96-
runs-on: windows-2019
96+
runs-on: windows-2022
9797
defaults:
9898
run:
9999
shell: pwsh

.github/workflows/linux.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -47,12 +47,12 @@ jobs:
4747
- ${{ github.workspace }}:${{ github.workspace }}
4848

4949
steps:
50-
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@master
50+
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@releases/2025/0
5151
id: openvino_download
5252
with:
5353
platform: ubuntu22
5454
commit_packages_to_provide: wheels
55-
revision: ed470e7e40129d6b2bf728bc9527316937a69ef7
55+
revision: latest_available_commit
5656

5757
- name: Clone docker tag from OpenVINO repo
5858
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2

.github/workflows/mac.yml

+10-10
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ concurrency:
1717

1818
env:
1919
PYTHON_VERSION: '3.10'
20-
OV_BRANCH: 'master'
21-
OV_TARBALL: ''
20+
OV_BRANCH: 'releases/2025/0'
21+
OV_TARBALL: 'https://storage.openvinotoolkit.org/repositories/openvino/packages/pre-release/2025.0.0rc1/m_openvino_toolkit_macos_12_6_2025.0.0.dev20250116_x86_64.tgz'
2222

2323
jobs:
2424
openvino_download:
@@ -224,17 +224,17 @@ jobs:
224224
- name: Test bindings
225225
run: |
226226
source ${OV_INSTALL_DIR}/setupvars.sh
227-
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] -r ./tests/python_tests/requirements.txt --find-links ${OV_INSTALL_DIR}/wheels
227+
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] -r ./tests/python_tests/requirements.txt --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
228228
python -m pytest -v ./tests/python_tests/test_tokenizer.py::test_set_chat_template
229229
env:
230230
PYTHONPATH: "./build/:$PYTHONPATH"
231231

232232
- name: Test bindings (wheel)
233233
run: |
234234
source ${OV_INSTALL_DIR}/setupvars.sh
235-
python -m pip install . --verbose --find-links ${OV_INSTALL_DIR}/wheels
235+
python -m pip install . --verbose --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
236236
python -c "from openvino_genai import LLMPipeline"
237-
python -m pip install ./tools/who_what_benchmark --find-links ${OV_INSTALL_DIR}/wheels
237+
python -m pip install ./tools/who_what_benchmark --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
238238
python -m pytest -v ./tests/python_tests/ --ignore ./tests/python_tests/test_whisper_pipeline.py --ignore ./tests/python_tests/test_vlm_pipeline.py -k "not test_set_chat_template"
239239
240240
genai_python_lib_whisper:
@@ -289,17 +289,17 @@ jobs:
289289
- name: Test bindings
290290
run: |
291291
source ${OV_INSTALL_DIR}/setupvars.sh
292-
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] -r ./tests/python_tests/requirements.txt --find-links ${OV_INSTALL_DIR}/wheels
292+
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] -r ./tests/python_tests/requirements.txt --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
293293
python -m pytest -v ./tests/python_tests/test_whisper_pipeline.py -k test_smoke
294294
env:
295295
PYTHONPATH: "./build/:$PYTHONPATH"
296296

297297
- name: Test bindings (wheel)
298298
run: |
299299
source ${OV_INSTALL_DIR}/setupvars.sh
300-
python -m pip install . --verbose --find-links ${OV_INSTALL_DIR}/wheels
300+
python -m pip install . --verbose --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
301301
python -c "from openvino_genai import LLMPipeline"
302-
python -m pip install ./tools/who_what_benchmark --find-links ${OV_INSTALL_DIR}/wheels
302+
python -m pip install ./tools/who_what_benchmark --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
303303
python -m pytest -v ./tests/python_tests/test_whisper_pipeline.py -k "not test_smoke"
304304
305305
genai_package:
@@ -355,8 +355,8 @@ jobs:
355355
- name: Build and Install dependencies
356356
run: |
357357
source ${OV_INSTALL_DIR}/setupvars.sh
358-
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] --find-links ${OV_INSTALL_DIR}/wheels
359-
python -m pip install -r ./samples/requirements.txt --find-links ${OV_INSTALL_DIR}/wheels
358+
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
359+
python -m pip install -r ./samples/requirements.txt --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
360360
optimum-cli export openvino --trust-remote-code --model TinyLlama/TinyLlama-1.1B-Chat-v1.0 TinyLlama-1.1B-Chat-v1.0
361361
optimum-cli export openvino --trust-remote-code --model openai/whisper-tiny whisper-tiny
362362

.github/workflows/stable_diffusion_1_5_cpp.yml

+5-5
Original file line numberDiff line numberDiff line change
@@ -40,12 +40,12 @@ jobs:
4040
- ${{ github.workspace }}:${{ github.workspace }}
4141

4242
steps:
43-
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@master
43+
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@releases/2025/0
4444
id: openvino_download
4545
with:
4646
platform: ubuntu22
4747
commit_packages_to_provide: wheels
48-
revision: ed470e7e40129d6b2bf728bc9527316937a69ef7
48+
revision: 2025.0.0rc1
4949

5050
openvino_download_windows:
5151
name: Download OpenVINO for Windows
@@ -66,12 +66,12 @@ jobs:
6666
- ${{ github.workspace }}:${{ github.workspace }}
6767

6868
steps:
69-
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@master
69+
- uses: openvinotoolkit/openvino/.github/actions/openvino_provider@releases/2025/0
7070
id: openvino_download
7171
with:
7272
platform: windows
7373
commit_packages_to_provide: wheels
74-
revision: ed470e7e40129d6b2bf728bc9527316937a69ef7
74+
revision: 2025.0.0rc1
7575

7676
stable_diffusion_1_5_cpp-linux:
7777
runs-on: ubuntu-22.04-8-cores
@@ -153,7 +153,7 @@ jobs:
153153

154154
stable_diffusion_1_5_cpp-windows:
155155
needs: [ openvino_download_windows ]
156-
runs-on: windows-2019
156+
runs-on: windows-2022
157157
defaults:
158158
run:
159159
shell: pwsh

.github/workflows/windows.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ concurrency:
1717

1818
env:
1919
PYTHON_VERSION: '3.11'
20-
OV_BRANCH: 'ed470e7e40129d6b2bf728bc9527316937a69ef7'
20+
OV_BRANCH: 'releases/2025/0'
2121
OV_TARBALL: ''
2222

2323
jobs:

samples/cpp/image_generation/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ Users can change the sample code and play with the following generation paramete
2828

2929
The `--upgrade-strategy eager` option is needed to ensure `optimum-intel` is upgraded to the latest version.
3030

31-
It's not required to install [../../export-requirements.txt](../../export requirements.txt) for deployment if the model has already been exported.
31+
It's not required to install [../../export-requirements.txt](../../export-requirements.txt) for deployment if the model has already been exported.
3232

3333
```sh
3434
pip install --upgrade-strategy eager -r ../../requirements.txt

samples/cpp/visual_language_chat/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ There are two sample files:
1212

1313
The `--upgrade-strategy eager` option is needed to ensure `optimum-intel` is upgraded to the latest version.
1414

15-
It's not required to install [../../export-requirements.txt](../../export requirements.txt) for deployment if the model has already been exported.
15+
It's not required to install [../../export-requirements.txt](../../export-requirements.txt) for deployment if the model has already been exported.
1616

1717
```sh
1818
pip install --upgrade-strategy eager -r ../../requirements.txt

samples/cpp/whisper_speech_recognition/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ This example showcases inference of speech recognition Whisper Models. The appli
66

77
The `--upgrade-strategy eager` option is needed to ensure `optimum-intel` is upgraded to the latest version.
88

9-
It's not required to install [../../export-requirements.txt](../../export requirements.txt) for deployment if the model has already been exported.
9+
It's not required to install [../../export-requirements.txt](../../export-requirements.txt) for deployment if the model has already been exported.
1010

1111
```sh
1212
pip install --upgrade-strategy eager -r ../../requirements.txt

src/cpp/src/llm_pipeline_static.cpp

+2-1
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ namespace {
3434
namespace opp = ov::pass::pattern;
3535
class TransposeValueTensors : public ov::pass::MatcherPass {
3636
public:
37+
OPENVINO_MATCHER_PASS_RTTI("TransposeValueTensors");
3738
struct Context {
3839
std::vector<std::shared_ptr<ov::opset13::Parameter>> new_params;
3940
std::vector<std::shared_ptr<ov::opset13::Parameter>> old_params;
@@ -95,7 +96,7 @@ class TransposeValueTensors : public ov::pass::MatcherPass {
9596

9697
class ScaledDotProductAttentionDecomposition : public ov::pass::MatcherPass {
9798
public:
98-
OPENVINO_RTTI("ScaledDotProductAttentionDecomposition", "0");
99+
OPENVINO_MATCHER_PASS_RTTI("ScaledDotProductAttentionDecomposition");
99100
ScaledDotProductAttentionDecomposition() {
100101
auto pattern_node = ov::pass::pattern::wrap_type<ov::op::v13::ScaledDotProductAttention>();
101102

src/cpp/src/lora_adapter.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -473,7 +473,7 @@ struct LoRAWeightStateGetter {
473473
class LoRATransformBase : public ov::pass::MatcherPass {
474474
public:
475475

476-
OPENVINO_RTTI("LoRATransformBase");
476+
OPENVINO_MATCHER_PASS_RTTI("LoRATransformBase");
477477

478478
LoRATransformBase(const LoRAWeightByNodeGetter& lora_weight_getter) {
479479
register_matcher(
@@ -693,7 +693,7 @@ class LoRAFuseTransform : public LoRATransformBase {
693693

694694
public:
695695

696-
OPENVINO_RTTI("LoRAFuseTransform");
696+
OPENVINO_RTTI("LoRAFuseTransform", "genai", LoRATransformBase);
697697

698698
LoRAFuseTransform(const LoRAWeightByNodeGetter& lora_weight_getter, const std::string& device_for_fusion = "CPU") :
699699
LoRATransformBase(lora_weight_getter),
@@ -763,7 +763,7 @@ class LoRAFuseTransform : public LoRATransformBase {
763763
class LoRASeparateTransform : public LoRATransformBase {
764764
public:
765765

766-
OPENVINO_RTTI("LoRASeparateTransform");
766+
OPENVINO_RTTI("LoRASeparateTransform", "genai", LoRATransformBase);
767767

768768
LoRASeparateTransform(const LoRAWeightByNodeGetter& lora_getter) : LoRATransformBase(lora_getter) {}
769769

src/cpp/src/make_tokenizer_stateful.hpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ namespace genai {
3434
**/
3535
class MakeCombineSegmentsSatateful : public ov::pass::ModelPass {
3636
public:
37-
OPENVINO_RTTI("MakeCombineSegmentsSatateful", "0");
37+
OPENVINO_MODEL_PASS_RTTI("MakeCombineSegmentsSatateful");
3838
bool run_on_model(const std::shared_ptr<ov::Model>& model) override;
3939
};
4040

@@ -70,7 +70,7 @@ class MakeCombineSegmentsSatateful : public ov::pass::ModelPass {
7070
**/
7171
class MakeVocabDecoderSatateful : public ov::pass::ModelPass {
7272
public:
73-
OPENVINO_RTTI("MakeVocabDecoderSatateful", "0");
73+
OPENVINO_MODEL_PASS_RTTI("MakeVocabDecoderSatateful");
7474
bool run_on_model(const std::shared_ptr<ov::Model>& model) override;
7575
};
7676

src/cpp/src/whisper_pipeline_static.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -347,7 +347,7 @@ void add_attention_mask_input(std::shared_ptr<ov::Model> model) {
347347
using namespace ov::op;
348348
class AttentionMaskInput : public ov::pass::MatcherPass {
349349
public:
350-
OPENVINO_RTTI("AttentionMaskInput");
350+
OPENVINO_MATCHER_PASS_RTTI("AttentionMaskInput");
351351

352352
AttentionMaskInput(std::shared_ptr<ov::Model> model) {
353353
auto range = wrap_type<v4::Range>();

src/docs/BUILD.md

+3-1
Original file line numberDiff line numberDiff line change
@@ -191,9 +191,11 @@ The path to the openvino install directory is referred as <INSTALL_DIR> througho
191191
```
192192
4. Build the wheel in the `dist` directory:
193193
```sh
194-
python -m pip wheel . -w dist/ --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release
194+
python -m pip wheel . -w dist/ --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/pre-release --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
195195
```
196196

197+
> **NOTE**: You'd need to build ABI compatible OpenVINO and OpenVINO Tokenizers for Ubuntu instead of downloading them from PyPI. See [OpenVINO™ GenAI Dependencies](../README.md#openvino-genai-dependencies) for the explanation.
198+
197199
### Install OpenVINO GenAI From Source
198200
199201
1. Clone OpenVINO GenAI repository and init submodules:

tests/python_tests/requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
--extra-index-url https://download.pytorch.org/whl/cpu
22
diffusers==0.32.2
3-
optimum-intel @ git+https://github.com/eaidova/optimum-intel@ea/stateful_seq2seq
3+
optimum-intel @ git+https://github.com/huggingface/optimum-intel@main
44
numpy<2.0.0; platform_system == "Darwin" and platform_machine == "x86_64"
55
onnx==1.17.0
66
pytest

tests/python_tests/test_continuous_batching.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def test_cb_streamer_vs_return_vs_stateful(prompt):
117117
@pytest.mark.parametrize("model_descr", get_chat_models_list())
118118
@pytest.mark.precommit
119119
def test_chat_scenario_vs_stateful(model_descr, generation_config_kwargs: Dict):
120-
model_id, models_path, hf_tokenizer, opt_model, ov_pipe = read_model((model_descr[0], model_descr[1] / '_test_chat'))
120+
model_id, models_path, hf_tokenizer, opt_model, ov_pipe = read_model((model_descr[0], model_descr[1]))
121121
cb_pipe = get_continuous_batching(models_path)
122122

123123
ov_pipe.start_chat()

tests/python_tests/test_llm_pipeline.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def test_chat_scenario(model_descr, generation_config_kwargs: Dict):
129129
chat_history_hf = []
130130
chat_history_ov = []
131131

132-
model_id, path, tokenizer, opt_model, ov_pipe = read_model((model_descr[0], model_descr[1] / '_test_chat'))
132+
model_id, path, tokenizer, opt_model, ov_pipe = read_model((model_descr[0], model_descr[1]))
133133

134134
ov_generation_config = GenerationConfig(**generation_config_kwargs)
135135
hf_generation_config = convert_to_hf(opt_model.generation_config, ov_generation_config)

tests/python_tests/test_sampling.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -65,12 +65,15 @@ def test_stop_strings(tmp_path, generation_config):
6565
'What is OpenVINO?',
6666
'table is made of',
6767
'The Sun is yellow because',
68-
'你好! 你好嗎?'
68+
'你好! 你好嗎?',
6969
'I have an interview about product speccing with the company Weekend Health. Give me an example of a question they might ask with regards about a new feature'
7070
])
7171
@pytest.mark.parametrize("use_cb", [True, False])
7272
def test_greedy(tmp_path, generation_config, prompt, use_cb):
7373
model_id : str = "katuni4ka/tiny-random-phi3"
74+
if sys.platform.startswith('win') and prompt.startswith('你'):
75+
pytest.skip("For unknown reason this prompt fails on Win")
76+
7477
run_llm_pipeline_with_ref(model_id=model_id,
7578
prompts=[prompt],
7679
generation_config=generation_config,

0 commit comments

Comments
 (0)