Skip to content

Commit

Permalink
http_server: compatibility fixes for vllm>0.6.1.post1
Browse files Browse the repository at this point in the history
  • Loading branch information
dtrifiro committed Sep 17, 2024
1 parent 20db20b commit 43f3678
Showing 1 changed file with 13 additions and 4 deletions.
17 changes: 13 additions & 4 deletions src/vllm_tgis_adapter/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
from typing import TYPE_CHECKING

from vllm.entrypoints.launcher import serve_http
from vllm.entrypoints.openai.api_server import (
init_app,
)
from vllm.entrypoints.openai.api_server import build_app
from vllm.logger import init_logger

try:
from vllm.entrypoints.openai.api_server import init_app
except ImportError: # vllm > 0.6.1.post2
from vllm.entrypoints.openai.api_server import init_app_state


if TYPE_CHECKING:
import argparse

Expand All @@ -27,7 +31,12 @@ async def run_http_server(
# modified copy of vllm.entrypoints.openai.api_server.run_server that
# allows passing of the engine

app = await init_app(engine, args) # type: ignore[arg-type]
try:
app = await init_app(engine, args) # type: ignore[arg-type]
except NameError: # vllm > 0.6.1.post2
app = build_app(args)
model_config = await engine.get_model_config()
app = await init_app_state(engine, model_config, app.staste, args)

serve_kwargs = {
"host": args.host,
Expand Down

0 comments on commit 43f3678

Please sign in to comment.