Skip to content

Commit 93a056f

Browse files
Fix imports from model_config (#443)
1 parent 0135c2e commit 93a056f

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

docs/source/using-the-python-api.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ After that, simply run the pipeline and save the results.
1111
```python
1212
import lighteval
1313
from lighteval.logging.evaluation_tracker import EvaluationTracker
14-
from lighteval.models.model_config import VLLMModelConfig
14+
from lighteval.models.vllm.vllm_model import VLLMModelConfig
1515
from lighteval.pipeline import ParallelismManager, Pipeline, PipelineParameters
1616
from lighteval.utils.utils import EnvConfig
1717
from lighteval.utils.imports import is_accelerate_available

src/lighteval/main_endpoint.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def openai(
9393
Evaluate OPENAI models.
9494
"""
9595
from lighteval.logging.evaluation_tracker import EvaluationTracker
96-
from lighteval.models.model_config import OpenAIModelConfig
96+
from lighteval.models.endpoints.openai_model import OpenAIModelConfig
9797
from lighteval.pipeline import EnvConfig, ParallelismManager, Pipeline, PipelineParameters
9898

9999
env_config = EnvConfig(token=TOKEN, cache_dir=cache_dir)
@@ -317,7 +317,7 @@ def tgi(
317317
import yaml
318318

319319
from lighteval.logging.evaluation_tracker import EvaluationTracker
320-
from lighteval.models.model_config import TGIModelConfig
320+
from lighteval.models.endpoints.tgi_model import TGIModelConfig
321321
from lighteval.pipeline import EnvConfig, ParallelismManager, Pipeline, PipelineParameters
322322

323323
env_config = EnvConfig(token=TOKEN, cache_dir=cache_dir)

0 commit comments

Comments
 (0)