Commit 
							
							·
						
						6a27da1
	
1
								Parent(s):
							
							01fe26e
								
better fix
Browse files- requirements.txt +1 -2
    	
        requirements.txt
    CHANGED
    
    | @@ -31,8 +31,7 @@ torchdata==0.10.1 | |
| 31 | 
             
            torchao==0.9.0
         | 
| 32 |  | 
| 33 | 
             
            # for torch 2.6, we must use torchcodec 0.2
         | 
| 34 | 
            -
            -- | 
| 35 | 
            -
            torchcodec==0.2.1
         | 
| 36 | 
             
            flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
         | 
| 37 |  | 
| 38 | 
             
            # something broke in Transformers > 4.55.4
         | 
|  | |
| 31 | 
             
            torchao==0.9.0
         | 
| 32 |  | 
| 33 | 
             
            # for torch 2.6, we must use torchcodec 0.2
         | 
| 34 | 
            +
            torchcodec==0.2.1 --index-url=https://download.pytorch.org/whl/cu128
         | 
|  | |
| 35 | 
             
            flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
         | 
| 36 |  | 
| 37 | 
             
            # something broke in Transformers > 4.55.4
         | 
