File size: 9,025 Bytes
1bad0bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 |
[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"
[project]
name = "unsloth"
dynamic = ["version"]
description = "2-5X faster LLM finetuning"
readme = "README.md"
requires-python = ">=3.9"
license = {file = "LICENSE"}
keywords = ["ai", "llm",]
authors = [
{email = "[email protected]"},
{name = "Unsloth AI team"},
]
maintainers = [
{name = "Daniel Han", email = "[email protected]"},
{name = "Michael Han", email = "[email protected]"},
]
classifiers = [
"Programming Language :: Python",
]
[tool.setuptools.dynamic]
version = {attr = "unsloth.models._utils.__version__"}
[tool.setuptools]
include-package-data = false
[tool.setuptools.packages.find]
exclude = ["images*"]
[project.optional-dependencies]
huggingface = [
"tyro",
"transformers>=4.38.2",
"datasets>=2.16.0",
"sentencepiece>=0.2.0",
"tqdm",
"psutil",
"wheel>=0.42.0",
"numpy",
"accelerate>=0.26.1",
"trl>=0.7.9",
"peft>=0.7.1",
"protobuf<4.0.0",
]
cu118only = [
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.22.post7%2Bcu118-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.22.post7%2Bcu118-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.22.post7%2Bcu118-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu121only = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.22.post7-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.22.post7-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.22.post7-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu118onlytorch211 = [
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23%2Bcu118-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23%2Bcu118-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23%2Bcu118-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu121onlytorch211 = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu118onlytorch212 = [
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23.post1%2Bcu118-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23.post1%2Bcu118-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23.post1%2Bcu118-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu121onlytorch212 = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23.post1-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23.post1-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23.post1-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu118onlytorch220 = [
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.24%2Bcu118-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.24%2Bcu118-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.24%2Bcu118-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu121onlytorch220 = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.24-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.24-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.24-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu118onlytorch230 = [
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.26.post1%2Bcu118-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.26.post1%2Bcu118-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.26.post1%2Bcu118-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu121onlytorch230 = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.26.post1-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.26.post1-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.26.post1-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu118 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118only]",
]
cu121 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121only]",
]
cu118-torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch211]",
]
cu121-torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch211]",
]
cu118-torch212 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch212]",
]
cu121-torch212 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch212]",
]
cu118-torch220 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch220]",
]
cu121-torch220 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch220]",
]
cu118-torch230 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch230]",
]
cu121-torch230 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch230]",
]
kaggle = [
"unsloth[huggingface]",
]
kaggle-new = [
"unsloth[huggingface]",
"bitsandbytes",
]
conda = [
"unsloth[huggingface]",
]
colab-torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch211]",
]
colab-ampere-torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch211]",
"packaging",
"ninja",
"flash-attn",
]
colab-torch220 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch220]",
]
colab-ampere-torch220 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch220]",
"packaging",
"ninja",
"flash-attn",
]
colab-new = [
"tyro",
"transformers>=4.38.2",
"datasets>=2.16.0",
"sentencepiece",
"tqdm",
"psutil",
"wheel>=0.42.0",
"numpy",
"protobuf<4.0.0",
]
colab-no-deps = [
"accelerate>=0.26.1",
"trl>=0.7.9",
"peft>=0.7.1",
"xformers",
"bitsandbytes",
"protobuf<4.0.0",
]
colab = [
"unsloth[cu121]",
]
colab-ampere = [
"unsloth[colab-ampere-torch220]",
"packaging",
"ninja",
"flash-attn",
]
cu118-ampere = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118only]",
"packaging",
"ninja",
"flash-attn",
]
cu121-ampere = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121only]",
"packaging",
"ninja",
"flash-attn",
]
cu118-ampere-torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch211]",
"packaging",
"ninja",
"flash-attn",
]
cu121-ampere-torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch211]",
"packaging",
"ninja",
"flash-attn",
]
cu118-ampere-torch220 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch220]",
"packaging",
"ninja",
"flash-attn",
]
cu121-ampere-torch220 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch220]",
"packaging",
"ninja",
"flash-attn",
]
cu118-ampere-torch230 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118onlytorch230]",
"packaging",
"ninja",
"flash-attn",
]
cu121-ampere-torch230 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121onlytorch230]",
"packaging",
"ninja",
"flash-attn",
]
[project.urls]
homepage = "http://www.unsloth.ai"
documentation = "https://github.com/unslothai/unsloth"
repository = "https://github.com/unslothai/unsloth"
|