1313
1414import torch
1515from packaging .version import Version , parse
16- from setuptools import Extension , find_packages , setup
16+ from setuptools import Extension , setup
1717from setuptools .command .build_ext import build_ext
1818from setuptools_scm import get_version
1919from torch .utils .cpp_extension import CUDA_HOME , ROCM_HOME
@@ -499,9 +499,7 @@ def get_gaudi_sw_version():
499499
500500
501501def get_vllm_version () -> str :
502- version = get_version (
503- write_to = "vllm/_version.py" , # TODO: move this to pyproject.toml
504- )
502+ version = get_version ()
505503 sep = "+" if "+" not in version else "." # dev versions might contain +
506504
507505 if _no_device ():
@@ -549,16 +547,6 @@ def get_vllm_version() -> str:
549547 return version
550548
551549
552- def read_readme () -> str :
553- """Read the README file if present."""
554- p = get_path ("README.md" )
555- if os .path .isfile (p ):
556- with open (get_path ("README.md" ), encoding = "utf-8" ) as f :
557- return f .read ()
558- else :
559- return ""
560-
561-
562550def get_requirements () -> List [str ]:
563551 """Get Python package dependencies from requirements.txt."""
564552
@@ -649,36 +637,10 @@ def _read_requirements(filename: str) -> List[str]:
649637 }
650638
651639setup (
652- name = "vllm" ,
640+ # static metadata should rather go in pyproject.toml
653641 version = get_vllm_version (),
654- author = "vLLM Team" ,
655- license = "Apache 2.0" ,
656- description = ("A high-throughput and memory-efficient inference and "
657- "serving engine for LLMs" ),
658- long_description = read_readme (),
659- long_description_content_type = "text/markdown" ,
660- url = "https://github.com/vllm-project/vllm" ,
661- project_urls = {
662- "Homepage" : "https://github.com/vllm-project/vllm" ,
663- "Documentation" : "https://vllm.readthedocs.io/en/latest/" ,
664- },
665- classifiers = [
666- "Programming Language :: Python :: 3.9" ,
667- "Programming Language :: Python :: 3.10" ,
668- "Programming Language :: Python :: 3.11" ,
669- "Programming Language :: Python :: 3.12" ,
670- "License :: OSI Approved :: Apache Software License" ,
671- "Intended Audience :: Developers" ,
672- "Intended Audience :: Information Technology" ,
673- "Intended Audience :: Science/Research" ,
674- "Topic :: Scientific/Engineering :: Artificial Intelligence" ,
675- "Topic :: Scientific/Engineering :: Information Analysis" ,
676- ],
677- packages = find_packages (exclude = ("benchmarks" , "csrc" , "docs" , "examples" ,
678- "tests*" )),
679- python_requires = ">=3.9" ,
680- install_requires = get_requirements (),
681642 ext_modules = ext_modules ,
643+ install_requires = get_requirements (),
682644 extras_require = {
683645 "tensorizer" : ["tensorizer>=2.9.0" ],
684646 "runai" : ["runai-model-streamer" , "runai-model-streamer-s3" , "boto3" ],
@@ -687,9 +649,4 @@ def _read_requirements(filename: str) -> List[str]:
687649 },
688650 cmdclass = cmdclass ,
689651 package_data = package_data ,
690- entry_points = {
691- "console_scripts" : [
692- "vllm=vllm.entrypoints.cli.main:main" ,
693- ],
694- },
695652)
0 commit comments