2023-07-15 08:54:28 +00:00
|
|
|
import os
|
|
|
|
import re
|
2024-03-08 16:14:48 +00:00
|
|
|
|
|
|
|
from setuptools import find_packages, setup
|
2023-07-15 08:54:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_version():
|
2024-06-05 17:39:02 +00:00
|
|
|
with open(os.path.join("src", "llamafactory", "extras", "env.py"), "r", encoding="utf-8") as f:
|
2023-07-15 08:54:28 +00:00
|
|
|
file_content = f.read()
|
2024-05-15 16:57:16 +00:00
|
|
|
pattern = r"{}\W*=\W*\"([^\"]+)\"".format("VERSION")
|
2024-03-08 16:14:48 +00:00
|
|
|
(version,) = re.findall(pattern, file_content)
|
2023-07-15 08:54:28 +00:00
|
|
|
return version
|
|
|
|
|
|
|
|
|
|
|
|
def get_requires():
|
|
|
|
with open("requirements.txt", "r", encoding="utf-8") as f:
|
|
|
|
file_content = f.read()
|
|
|
|
lines = [line.strip() for line in file_content.strip().split("\n") if not line.startswith("#")]
|
|
|
|
return lines
|
|
|
|
|
|
|
|
|
2024-03-07 16:44:51 +00:00
|
|
|
extra_require = {
|
2024-05-09 08:52:27 +00:00
|
|
|
"torch": ["torch>=1.13.1"],
|
2024-06-06 08:59:18 +00:00
|
|
|
"torch-npu": ["torch==2.1.0", "torch-npu==2.1.0.post3", "decorator"],
|
2024-03-07 16:44:51 +00:00
|
|
|
"metrics": ["nltk", "jieba", "rouge-chinese"],
|
2024-05-08 08:37:54 +00:00
|
|
|
"deepspeed": ["deepspeed>=0.10.0,<=0.14.0"],
|
2024-05-06 13:47:00 +00:00
|
|
|
"bitsandbytes": ["bitsandbytes>=0.39.0"],
|
2024-06-03 10:38:36 +00:00
|
|
|
"vllm": ["vllm>=0.4.3"],
|
2024-04-03 13:56:43 +00:00
|
|
|
"galore": ["galore-torch"],
|
2024-04-16 09:10:02 +00:00
|
|
|
"badam": ["badam"],
|
2024-03-07 16:44:51 +00:00
|
|
|
"gptq": ["optimum>=1.16.0", "auto-gptq>=0.5.0"],
|
|
|
|
"awq": ["autoawq"],
|
2024-03-25 14:38:56 +00:00
|
|
|
"aqlm": ["aqlm[gpu]>=1.1.0"],
|
2024-06-05 08:56:54 +00:00
|
|
|
"qwen": ["transformers_stream_generator"],
|
2024-04-11 12:08:51 +00:00
|
|
|
"modelscope": ["modelscope"],
|
2024-06-07 17:57:36 +00:00
|
|
|
"dev": ["ruff", "pytest"],
|
2024-03-07 16:44:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-07-15 08:54:28 +00:00
|
|
|
def main():
|
|
|
|
setup(
|
2024-05-16 10:39:08 +00:00
|
|
|
name="llamafactory",
|
2023-07-15 08:54:28 +00:00
|
|
|
version=get_version(),
|
|
|
|
author="hiyouga",
|
|
|
|
author_email="hiyouga" "@" "buaa.edu.cn",
|
2023-10-13 05:53:43 +00:00
|
|
|
description="Easy-to-use LLM fine-tuning framework",
|
2023-07-15 08:54:28 +00:00
|
|
|
long_description=open("README.md", "r", encoding="utf-8").read(),
|
|
|
|
long_description_content_type="text/markdown",
|
|
|
|
keywords=["LLaMA", "BLOOM", "Falcon", "LLM", "ChatGPT", "transformer", "pytorch", "deep learning"],
|
|
|
|
license="Apache 2.0 License",
|
2023-10-13 05:53:43 +00:00
|
|
|
url="https://github.com/hiyouga/LLaMA-Factory",
|
2023-07-15 08:54:28 +00:00
|
|
|
package_dir={"": "src"},
|
|
|
|
packages=find_packages("src"),
|
|
|
|
python_requires=">=3.8.0",
|
|
|
|
install_requires=get_requires(),
|
2024-03-07 16:44:51 +00:00
|
|
|
extras_require=extra_require,
|
2024-05-16 10:39:08 +00:00
|
|
|
entry_points={"console_scripts": ["llamafactory-cli = llamafactory.cli:main"]},
|
2023-07-15 08:54:28 +00:00
|
|
|
classifiers=[
|
2024-03-08 16:14:48 +00:00
|
|
|
"Development Status :: 4 - Beta",
|
2023-07-15 08:54:28 +00:00
|
|
|
"Intended Audience :: Developers",
|
|
|
|
"Intended Audience :: Education",
|
|
|
|
"Intended Audience :: Science/Research",
|
|
|
|
"License :: OSI Approved :: Apache Software License",
|
|
|
|
"Operating System :: OS Independent",
|
|
|
|
"Programming Language :: Python :: 3",
|
|
|
|
"Programming Language :: Python :: 3.8",
|
|
|
|
"Programming Language :: Python :: 3.9",
|
|
|
|
"Programming Language :: Python :: 3.10",
|
2024-03-08 16:14:48 +00:00
|
|
|
"Programming Language :: Python :: 3.11",
|
2023-07-15 08:54:28 +00:00
|
|
|
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
2024-03-08 16:14:48 +00:00
|
|
|
],
|
2023-07-15 08:54:28 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|