Skip to content

Commit

Permalink
Delete generate tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Robin Picard authored and rlouf committed Feb 27, 2025
1 parent e66e5ef commit e7f7b63
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 479 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ test = [
"jax",
"ollama",
"dottxt",
"sentencepiece",
]
test-gpu=["outlines[test]", "vllm; sys_platform == 'linux'"]
serve = [
Expand Down
41 changes: 0 additions & 41 deletions tests/generate/conftest.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,6 @@
from importlib import reload

import pytest
import torch


def is_metal_available():
try:
import mlx.core as mx
import mlx_lm # noqa: F401

assert mx.metal.is_available()
except (ImportError, AssertionError):
return False
return True


def pytest_collection_modifyitems(config, items):
"""
If mlxlm and Metal aren't available, skip mlxlm tests
If CUDA isn't available, skip vllm and transformers_vision
"""
if not torch.cuda.is_available():
skip_marker = pytest.mark.skip(
reason="Skipping test because CUDA is not available"
)
for item in items:
if "model_fixture" in item.fixturenames:
model_param = item.callspec.params.get("model_fixture", None)
if (
model_param.startswith("model_transformers_vision")
or model_param.startswith("model_vllm")
):
item.add_marker(skip_marker)

if not is_metal_available():
skip_marker = pytest.mark.skip(
reason="Skipping test because mlx-lm or Metal are not available"
)
for item in items:
if "model_fixture" in item.fixturenames:
model_param = item.callspec.params.get("model_fixture", None)
if model_param.startswith("model_mlxlm"):
item.add_marker(skip_marker)


@pytest.fixture
Expand Down
Loading

0 comments on commit e7f7b63

Please sign in to comment.