Skip to content

Commit bfd6f49

Browse files
Replace unittest skipTest from transformers with pytest.skip (#4297)
1 parent 712f6a9 commit bfd6f49

13 files changed

+82
-39
lines changed

tests/slow/test_dpo_slow.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,12 @@
1919
from accelerate.utils.memory import release_memory
2020
from datasets import load_dataset
2121
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
22-
from transformers.testing_utils import backend_empty_cache, require_torch_accelerator, torch_device
22+
from transformers.testing_utils import backend_empty_cache, torch_device
2323
from transformers.utils import is_peft_available
2424

2525
from trl import DPOConfig, DPOTrainer
2626

27-
from ..testing_utils import TrlTestCase, require_bitsandbytes, require_peft
27+
from ..testing_utils import TrlTestCase, require_bitsandbytes, require_peft, require_torch_accelerator
2828
from .testing_constants import DPO_LOSS_TYPES, DPO_PRECOMPUTE_LOGITS, GRADIENT_CHECKPOINTING_KWARGS, MODELS_TO_TEST
2929

3030

tests/slow/test_grpo_slow.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -30,19 +30,21 @@
3030
AutoTokenizer,
3131
BitsAndBytesConfig,
3232
)
33-
from transformers.testing_utils import (
34-
backend_empty_cache,
35-
require_flash_attn,
36-
require_liger_kernel,
37-
require_torch_accelerator,
38-
torch_device,
39-
)
33+
from transformers.testing_utils import backend_empty_cache, torch_device
4034
from transformers.utils import is_peft_available
4135

4236
from trl import GRPOConfig, GRPOTrainer
4337
from trl.trainer.utils import get_kbit_device_map
4438

45-
from ..testing_utils import TrlTestCase, require_bitsandbytes, require_peft, require_vllm
39+
from ..testing_utils import (
40+
TrlTestCase,
41+
require_bitsandbytes,
42+
require_flash_attn,
43+
require_liger_kernel,
44+
require_peft,
45+
require_torch_accelerator,
46+
require_vllm,
47+
)
4648
from .testing_constants import MODELS_TO_TEST
4749

4850

tests/slow/test_sft_slow.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -19,18 +19,19 @@
1919
from accelerate.utils.memory import release_memory
2020
from datasets import load_dataset
2121
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
22-
from transformers.testing_utils import (
23-
backend_empty_cache,
24-
require_liger_kernel,
25-
require_torch_accelerator,
26-
require_torch_multi_accelerator,
27-
torch_device,
28-
)
22+
from transformers.testing_utils import backend_empty_cache, torch_device
2923
from transformers.utils import is_peft_available
3024

3125
from trl import SFTConfig, SFTTrainer
3226

33-
from ..testing_utils import TrlTestCase, require_bitsandbytes, require_peft
27+
from ..testing_utils import (
28+
TrlTestCase,
29+
require_bitsandbytes,
30+
require_liger_kernel,
31+
require_peft,
32+
require_torch_accelerator,
33+
require_torch_multi_accelerator,
34+
)
3435
from .testing_constants import DEVICE_MAP_OPTIONS, GRADIENT_CHECKPOINTING_KWARGS, MODELS_TO_TEST, PACKING_OPTIONS
3536

3637

tests/test_activation_offloading.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,12 @@
1616
import torch
1717
from torch import nn
1818
from transformers import AutoModelForCausalLM
19-
from transformers.testing_utils import require_torch_accelerator, torch_device
19+
from transformers.testing_utils import torch_device
2020
from transformers.utils import is_peft_available
2121

2222
from trl.models.activation_offloading import NoOpManager, OffloadActivations
2323

24-
from .testing_utils import TrlTestCase, require_peft
24+
from .testing_utils import TrlTestCase, require_peft, require_torch_accelerator
2525

2626

2727
if is_peft_available():

tests/test_callbacks.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818

1919
from datasets import load_dataset
2020
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig, Trainer, TrainingArguments
21-
from transformers.testing_utils import require_wandb
2221
from transformers.trainer_utils import get_last_checkpoint
2322
from transformers.utils import is_peft_available
2423

@@ -33,7 +32,7 @@
3332
)
3433
from trl.mergekit_utils import MergeConfig
3534

36-
from .testing_utils import TrlTestCase, require_comet, require_mergekit, require_peft
35+
from .testing_utils import TrlTestCase, require_comet, require_mergekit, require_peft, require_wandb
3736

3837

3938
if is_peft_available():

tests/test_dpo_trainer.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,16 +29,14 @@
2929
PreTrainedTokenizerBase,
3030
is_vision_available,
3131
)
32-
from transformers.testing_utils import (
33-
get_device_properties,
34-
require_liger_kernel,
35-
)
32+
from transformers.testing_utils import get_device_properties
3633

3734
from trl import DPOConfig, DPOTrainer, FDivergenceType
3835

3936
from .testing_utils import (
4037
TrlTestCase,
4138
require_bitsandbytes,
39+
require_liger_kernel,
4240
require_no_wandb,
4341
require_peft,
4442
require_torch_gpu_if_bnb_not_multi_backend_enabled,

tests/test_gkd_trainer.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,11 @@
1919
import torch.nn.functional as F
2020
from datasets import load_dataset
2121
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
22-
from transformers.testing_utils import require_liger_kernel
2322

2423
from trl import GKDConfig, GKDTrainer
2524
from trl.trainer.utils import SIMPLE_CHAT_TEMPLATE
2625

27-
from .testing_utils import TrlTestCase
26+
from .testing_utils import TrlTestCase, require_liger_kernel
2827

2928

3029
class TestGKDTrainerGenerateOnPolicy(TrlTestCase):

tests/test_grpo_trainer.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
AutoModelForSequenceClassification,
2424
AutoTokenizer,
2525
)
26-
from transformers.testing_utils import require_liger_kernel
2726
from transformers.utils import is_peft_available
2827

2928
from trl import GRPOConfig, GRPOTrainer
@@ -34,7 +33,7 @@
3433
)
3534
from trl.experimental.gspo_token import GRPOTrainer as GSPOTokenTrainer
3635

37-
from .testing_utils import TrlTestCase, require_peft, require_vision, require_vllm
36+
from .testing_utils import TrlTestCase, require_liger_kernel, require_peft, require_vision, require_vllm
3837

3938

4039
if is_peft_available():

tests/test_kto_trainer.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,11 @@
1717
import torch
1818
from datasets import load_dataset
1919
from transformers import AutoModelForCausalLM, AutoModelForSeq2SeqLM, AutoTokenizer
20-
from transformers.testing_utils import require_liger_kernel
2120

2221
from trl import KTOConfig, KTOTrainer
2322
from trl.trainer.kto_trainer import _get_kl_dataset, _process_tokens, _tokenize
2423

25-
from .testing_utils import TrlTestCase, require_no_wandb, require_peft
24+
from .testing_utils import TrlTestCase, require_liger_kernel, require_no_wandb, require_peft
2625

2726

2827
class TestKTOTrainer(TrlTestCase):

tests/test_online_dpo_trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
from datasets import Dataset, features, load_dataset
1818
from packaging.version import Version
1919
from transformers import AutoModelForCausalLM, AutoModelForSequenceClassification, AutoTokenizer
20-
from transformers.testing_utils import require_torch_accelerator
2120
from transformers.utils import is_peft_available, is_vision_available
2221

2322
from trl import OnlineDPOConfig, OnlineDPOTrainer
@@ -27,6 +26,7 @@
2726
TrlTestCase,
2827
require_llm_blender,
2928
require_peft,
29+
require_torch_accelerator,
3030
require_vision,
3131
require_vllm,
3232
)

0 commit comments

Comments
 (0)