-
Notifications
You must be signed in to change notification settings - Fork 54
Closed as not planned
Description
Fix the Compatibility of peft, numpy and pyarrow
In the Readme.md, the installation method could be
pip install torch==2.2.2 torchvision==0.17.2 xformers --index-url https://download.pytorch.org/whl/cu118
cd dexbotic
pip install -e .
# Install FlashAttention
pip install ninja packaging
However, this could lead to wrong version of some packages and the dexbotic could not run correctly. Thus, I rewrite the pyproject.toml as follows
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[tool.uv.sources]
torch = [
{ index = "pytorch-cu118", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
]
torchvision = [
{ index = "pytorch-cu118", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
]
[project]
name = "dexbotic"
version = "0.0.1"
description = "Training and Serving Large Language Action Model in Dexmal"
authors = [
{ name = "Yucheng Zhao", email = "zyc@dexmal.com" }
]
readme = "README.md"
requires-python = "==3.10.19"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
]
dependencies = [
# Core PyTorch (must be first)
"torch==2.2.2",
"torchvision==0.17.2",
"xformers",
"ninja",
"packaging",
"pyarrow==20.0.0",
# flash-attn",
# Ref: https://til.simonwillison.net/python/installing-flash-attention
# Core ML/AI dependencies
"transformers==4.51.0",
"accelerate",
"peft==0.13.2",
"bitsandbytes",
"deepspeed==0.14.1",
"einops",
"einops-exts",
"tokenizers",
"sentencepiece",
"tiktoken",
"timm",
# Data processing and datasets
"datasets",
"numpy==1.26.4",
"scikit-learn",
"decord",
"av",
"albumentations",
"diffusers",
"botocore==1.35.66",
"boto3==1.35.18",
"megfile",
"easydict",
# Web framework and API
"fastapi",
"uvicorn",
"flask",
"httpx",
"requests",
"pydantic==2.10.6",
# UI and visualization
"gradio",
"gradio_client",
"markdown2",
"tabulate",
"tqdm",
"wandb",
# Utilities
"pyramid==1.5",
"numpydantic==1.6.7",
"protobuf",
"pypandoc",
"shortuuid",
"openpyxl",
"debugpy",
"loguru",
# Code formatting and linting
"autopep8>=2.0.0",
"pycodestyle>=2.10.0",
"black>=23.0.0",
"flake8>=6.0.0",
"isort>=5.12.0",
# Type checking
"mypy>=1.0.0",
"types-requests>=2.28.0",
# Testing
"pytest>=7.0.0",
"pytest-cov>=4.0.0",
# Pre-commit hooks
"pre-commit>=3.0.0"
]
[project.optional-dependencies]
attention = [
"flash_attn",
"xformers"
]
[tool.setuptools]
packages = ["dexbotic"]
[tool.autopep8]
max_line_length = 88
aggressive = 1
experimental = false
recursive = true
in-place = false
jobs = 0
pep8_passes = 2
ignore = ["E226", "E302", "E41"]
select = ["E", "W", "F"]
verbose = 0
diff = false
exclude = ".git,__pycache__,build,dist,*.egg-info,wandb,test_data"
[[tool.uv.index]]
name = "pytorch-cu118"
url = "https://download.pytorch.org/whl/cu118"
In this way, users can use uv to install it fastly.
uv lock
uv export --format requirements-txt > requirements.txt
uv pip install --system -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu118 --index-strategy unsafe-best-match --only-binary=matplotlib
ALL IS WELL DOWN NOW !
A fast way to install flash-attn
In the Readme.md, flash-attn MUST BE BUILDED by person
pip install flash-attn --no-build-isolation
A fast way to install it by wheel is following the guild : https://til.simonwillison.net/python/installing-flash-attention
Metadata
Metadata
Assignees
Labels
No labels