Skip to content

Commit

Permalink
move vllm to optional-dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
matheper committed Aug 23, 2024
1 parent 1d6949a commit 9d72763
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 4 deletions.
4 changes: 3 additions & 1 deletion mttl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
except ImportError:
from mttl.logging import logger

logger.info("Flash Attention not available")
logger.info(
'Flash Attention not available. You can install it with `pip install -e ".[flash_attn]`".'
)

import torch

Expand Down
4 changes: 3 additions & 1 deletion mttl/vllm_engines/engines.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
from vllm.model_executor.parallel_utils.parallel_state import destroy_model_parallel
except ImportError:
LLM = object
logger.warning("VLLM is not installed. Please install it to use LLMEngine.")
logger.warning(
'VLLM is not installed. Please install it with `pip install -e ".[vllm]"` to use LLMEngine.'
)


def save_merged_model(model, model_path, hf_path="/tmp/merged"):
Expand Down
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,14 @@ dev = [
"isort",
]

flash-att = [
flash-attn = [
"flash-attn>=2.6.0",
]

vllm = [
"vllm",
]

[project.urls]
"Homepage" = "https://github.com/microsoft/mttl"
"Bug Tracker" = "https://github.com/microsoft/mttl/issues"
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ matplotlib
openai
ray
nevergrad
vllm
evaluate
seaborn
azure-storage-blob
Expand Down

0 comments on commit 9d72763

Please sign in to comment.