dror201031 commited on
Commit
29ab382
verified
1 Parent(s): cabac3d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -3
app.py CHANGED
@@ -1,7 +1,53 @@
 
1
  import gradio as gr
 
 
2
 
3
- def hello(name):
4
- return f"砖诇讜诐 {name}!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- demo = gr.Interface(fn=hello, inputs="text", outputs="text")
7
  demo.launch()
 
1
+ import os
2
  import gradio as gr
3
+ from huggingface_hub import login
4
+ from transformers import pipeline
5
 
6
+ # 讛转讞讘专讜转 诇-Hugging Face 注诐 诪驻转讞 诪讛讙讚专讜转 讛住讜讚讜转
7
+ token = os.environ.get("HF_TOKEN")
8
+ if token:
9
+ login(token)
10
+ print("讛转讞讘专讜转 诇-Hugging Face 讛爪诇讬讞讛!")
11
+ else:
12
+ print("讗讝讛专讛: 诇讗 谞诪爪讗 诪驻转讞 HF_TOKEN!")
13
+
14
+ # 讟注讬谞转 讛诪讜讚诇
15
+ model_name = "google/gemma-3n-E2B-it-litert-preview"
16
+ print(f"讟讜注谉 诪讜讚诇 {model_name}...")
17
+
18
+ try:
19
+ generator = pipeline(
20
+ "text-generation",
21
+ model=model_name,
22
+ device_map="auto",
23
+ torch_dtype="auto",
24
+ model_kwargs={"quantization_config": {"load_in_4bit": True}}
25
+ )
26
+ print("讛诪讜讚诇 谞讟注谉 讘讛爪诇讞讛!")
27
+ except Exception as e:
28
+ print(f"砖讙讬讗讛 讘讟注讬谞转 讛诪讜讚诇: {str(e)}")
29
+ generator = None
30
+
31
+ def ask_model(prompt):
32
+ if generator is None:
33
+ return "讛诪讜讚诇 诇讗 谞讟注谉 讻专讗讜讬. 讘讚讜拽 讗转 讛诇讜讙讬诐."
34
+
35
+ try:
36
+ outputs = generator(
37
+ [{"role": "user", "content": prompt}],
38
+ max_new_tokens=200,
39
+ return_full_text=False
40
+ )
41
+ return outputs[0]["generated_text"]
42
+ except Exception as e:
43
+ return f"砖讙讬讗讛 讘拽讘诇转 转砖讜讘讛: {str(e)}"
44
+
45
+ demo = gr.Interface(
46
+ fn=ask_model,
47
+ inputs=gr.Textbox(lines=3, placeholder="砖讗诇 砖讗诇讛..."),
48
+ outputs=gr.Textbox(label="转砖讜讘讛"),
49
+ title="Genie - 爪'讗讟讘讜讟 Gemma 3n",
50
+ description="爪'讗讟讘讜讟 诪讘讜住住 注诇 诪讜讚诇 Gemma 3n 砖诇 Google"
51
+ )
52
 
 
53
  demo.launch()