llama.cpp/.github/workflows/test.yaml
2023-09-12 18:05:44 -04:00

79 lines
No EOL
2.1 KiB
YAML

name: Tests
on:
pull_request:
branches:
- main
push:
branches:
- main
jobs:
build-linux:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v3
with:
submodules: "true"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install .[all] -v
- name: Test with pytest
run: |
python3 -m pytest
build-windows:
runs-on: windows-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v3
with:
submodules: "true"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install .[all] -v
- name: Test with pytest
run: |
python3 -m pytest
build-macos:
runs-on: macos-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v3
with:
submodules: "true"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
CMAKE_ARGS="-DLLAMA_AVX=OFF -DLLAMA_AVX2=OFF -DLLAMA_AVX512=OFF -DLLAMA_AVX512_VBMI=OFF -DLLAMA_AVX512_VNNI=OFF -DLLAMA_FMA=OFF -DLLAMA_F16C=OFF -DLLAMA_ACCELERATE=OFF -DLLAMA_METAL=OFF" python3 -m pip install .[all] --verbose --config-settings=cmake.verbose=true --config-settings=logging.level=INFO
- name: Test with pytest
run: |
python3 -m pytest