Spaces:
Running
on
Zero
Running
on
Zero
| torch==2.4.0 | |
| torchvision | |
| deepspeed==0.16.9 | |
| transformers==4.44.2 | |
| tokenizers | |
| attrdict | |
| diffusers==0.31.0 | |
| sentencepiece==0.1.99 | |
| shortuuid | |
| accelerate | |
| peft | |
| bitsandbytes | |
| requests | |
| httpx==0.23.3 | |
| uvicorn | |
| fastapi | |
| einops==0.6.1 | |
| einops-exts==0.0.4 | |
| timm==0.9.12 | |
| tensorboardX | |
| tensorboard | |
| tiktoken | |
| loguru | |
| pydantic==2.11.5 | |
| pydantic_core==2.33.2 | |
| markdown2[all] | |
| numpy | |
| scikit-learn==1.2.2 | |
| einx==0.3.0 | |
| Pillow==9.0.1 | |
| tenacity | |
| sqlitedict | |
| evaluate | |
| sacrebleu | |
| hf_transfer | |
| scikit-image | |
| torch_fidelity | |
| imagesize | |
| # flash attention | |
| https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl |