fix sdxl refiner export (#2133) #965
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Offline usage / Python - Test | |
on: | |
push: | |
branches: [main] | |
pull_request: | |
branches: [main] | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | |
cancel-in-progress: true | |
jobs: | |
build: | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ['3.9'] | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v4 | |
- name: Setup Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install dependencies for pytorch export | |
run: | | |
pip install .[tests,exporters,onnxruntime] | |
- name: Test with pytest | |
run: | | |
HF_HOME=/tmp/ huggingface-cli download hf-internal-testing/tiny-random-gpt2 | |
HF_HOME=/tmp/ HF_HUB_OFFLINE=1 optimum-cli export onnx --model hf-internal-testing/tiny-random-gpt2 gpt2_onnx --task text-generation | |
huggingface-cli download hf-internal-testing/tiny-random-gpt2 | |
HF_HUB_OFFLINE=1 optimum-cli export onnx --model hf-internal-testing/tiny-random-gpt2 gpt2_onnx --task text-generation | |
pytest tests/onnxruntime/test_modeling.py -k "test_load_model_from_hub and not from_hub_onnx" -s -vvvvv | |
HF_HUB_OFFLINE=1 pytest tests/onnxruntime/test_modeling.py -k "test_load_model_from_hub and not from_hub_onnx" -s -vvvvv |