Skip to content

Commit

Permalink
[Frontend] Disallow passing model as both argument and option (vllm…
Browse files Browse the repository at this point in the history
  • Loading branch information
DarkLight1337 authored Aug 12, 2024
1 parent efa865c commit 690e7e7
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
4 changes: 2 additions & 2 deletions vllm/engine/arg_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def nullable_str(val: str):
@dataclass
class EngineArgs:
"""Arguments for vLLM engine."""
model: str
model: str = 'facebook/opt-125m'
served_model_name: Optional[Union[List[str]]] = None
tokenizer: Optional[str] = None
skip_tokenizer_init: bool = False
Expand Down Expand Up @@ -133,7 +133,7 @@ def add_cli_args(parser: FlexibleArgumentParser) -> FlexibleArgumentParser:
parser.add_argument(
'--model',
type=str,
default='facebook/opt-125m',
default=EngineArgs.model,
help='Name or path of the huggingface model to use.')
parser.add_argument(
'--tokenizer',
Expand Down
7 changes: 7 additions & 0 deletions vllm/scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from openai import OpenAI
from openai.types.chat import ChatCompletionMessageParam

from vllm.engine.arg_utils import EngineArgs
from vllm.entrypoints.openai.api_server import run_server
from vllm.entrypoints.openai.cli_args import make_arg_parser
from vllm.utils import FlexibleArgumentParser
Expand All @@ -24,6 +25,12 @@ def signal_handler(sig, frame):


def serve(args: argparse.Namespace) -> None:
# The default value of `--model`
if args.model != EngineArgs.model:
raise ValueError(
"With `vllm serve`, you should provide the model as a "
"positional argument instead of via the `--model` option.")

# EngineArgs expects the model name to be passed as --model.
args.model = args.model_tag

Expand Down

0 comments on commit 690e7e7

Please sign in to comment.