Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -27,17 +27,19 @@ subprocess.run([
|
|
27 |
model_id = "OpenVINO/Qwen2.5-7B-Instruct-int4-ov"
|
28 |
model_path = "ov"
|
29 |
|
30 |
-
config = ov_genai.GenerationConfig()
|
31 |
-
config.max_new_tokens = 4096
|
32 |
-
config.top_p = 0.9;
|
33 |
-
config.top_k = 30;
|
34 |
|
35 |
-
hf_hub.snapshot_download(model_id, local_dir=model_path)
|
36 |
'''
|
37 |
'''
|
|
|
|
|
38 |
model_path = "ov"
|
39 |
pipe = ov_genai.LLMPipeline(model_path, "CPU")
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
pipe = OVModelForCausalLM.from_pretrained(
|
43 |
model_id,
|
|
|
27 |
model_id = "OpenVINO/Qwen2.5-7B-Instruct-int4-ov"
|
28 |
model_path = "ov"
|
29 |
|
|
|
|
|
|
|
|
|
30 |
|
|
|
31 |
'''
|
32 |
'''
|
33 |
+
hf_hub.snapshot_download(model_id, local_dir=model_path)
|
34 |
+
'''
|
35 |
model_path = "ov"
|
36 |
pipe = ov_genai.LLMPipeline(model_path, "CPU")
|
37 |
+
|
38 |
+
config = ov_genai.GenerationConfig()
|
39 |
+
config.max_new_tokens = 4096
|
40 |
+
config.top_p = 0.9;
|
41 |
+
config.top_k = 30;
|
42 |
+
|
43 |
|
44 |
pipe = OVModelForCausalLM.from_pretrained(
|
45 |
model_id,
|