Spaces:
Sleeping
Sleeping
Commit
·
be1328a
1
Parent(s):
d43222c
nil
Browse files
app.py
CHANGED
@@ -5,14 +5,16 @@ import torch
|
|
5 |
# deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct
|
6 |
# model_id = "deepseek-ai/deepseek-coder-1.3b-instruct"
|
7 |
# model_id = "deepseek-ai/deepseek-coder-6.7b-instruct"
|
8 |
-
model_id = "deepseek-ai/
|
|
|
9 |
tokenizer = AutoTokenizer.from_pretrained(model_id) # Or your own!
|
10 |
model = AutoModelForCausalLM.from_pretrained(model_id,
|
11 |
# device_map=None,
|
12 |
# torch_dtype=torch.float32,
|
13 |
device_map="auto",
|
14 |
torch_dtype=torch.float16,
|
15 |
-
trust_remote_code=True
|
|
|
16 |
# model.to("cpu")
|
17 |
|
18 |
def generate_code(prompt, style="Clean & Pythonic"):
|
|
|
5 |
# deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct
|
6 |
# model_id = "deepseek-ai/deepseek-coder-1.3b-instruct"
|
7 |
# model_id = "deepseek-ai/deepseek-coder-6.7b-instruct"
|
8 |
+
model_id = "deepseek-ai/deepseek-coder-33b-instruct"
|
9 |
+
# model_id = "deepseek-ai/DeepSeek-Coder-V2-Instruct"
|
10 |
tokenizer = AutoTokenizer.from_pretrained(model_id) # Or your own!
|
11 |
model = AutoModelForCausalLM.from_pretrained(model_id,
|
12 |
# device_map=None,
|
13 |
# torch_dtype=torch.float32,
|
14 |
device_map="auto",
|
15 |
torch_dtype=torch.float16,
|
16 |
+
trust_remote_code=True
|
17 |
+
)
|
18 |
# model.to("cpu")
|
19 |
|
20 |
def generate_code(prompt, style="Clean & Pythonic"):
|