AbstractPhil commited on
Commit
26e7d80
Β·
1 Parent(s): e7eb866

removed oauth

Browse files
Files changed (1) hide show
  1. app.py +44 -44
app.py CHANGED
@@ -29,31 +29,31 @@ LOAD_4BIT = os.getenv("LOAD_4BIT", "0") == "1"
29
  # Optional: HF auth for private/private repos (Spaces Secrets friendly)
30
  HF_TOKEN: Optional[str] = None
31
 
32
- def _hf_login() -> None:
33
- """Login to HF Hub using common env secret names.
34
- Works on Spaces with a single secret set. No CUDA touched here.
35
- """
36
- global HF_TOKEN
37
- HF_TOKEN = (
38
- os.getenv("HF_TOKEN")
39
- or os.getenv("HUGGING_FACE_HUB_TOKEN")
40
- or os.getenv("HUGGINGFACEHUB_API_TOKEN")
41
- )
42
- if HF_TOKEN:
43
- try:
44
- from huggingface_hub import login, whoami
45
- login(token=HF_TOKEN, add_to_git_credential=True)
46
- try:
47
- who = whoami(token=HF_TOKEN)
48
- print(f"[hf] logged in as: {who.get('name') or who.get('email') or who.get('id')}")
49
- except Exception:
50
- pass
51
- except Exception as e:
52
- print(f"[hf] login failed: {e}")
53
- else:
54
- print("[hf] no token found; accessing only public repos")
55
-
56
- _hf_login()
57
 
58
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
59
  # Is HF OAuth configured for this Space? (set automatically when README has `hf_oauth: true`)
@@ -152,7 +152,7 @@ class RoseGuidedLogits(torch.nn.Module):
152
  def forward(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
153
  return scores + self.alpha * self.bias_vec.to(scores.device)
154
 
155
- @spaces.GPU()
156
  def zerogpu_generate(full_prompt: str,
157
  gen_kwargs: Dict[str, Any],
158
  rose_map: Optional[Dict[str, float]],
@@ -310,24 +310,24 @@ def generate_stream(message: Any, history: List[Any], system_prompt: str,
310
  # Helper: login status banner (HF OAuth)
311
  # -----------------------
312
 
313
- def _login_status(profile: gr.OAuthProfile | None) -> str:
314
- """Show whether the visitor is logged in to Hugging Face.
315
- This affects ZeroGPU quotas (logged-in users get their own token/quota).
316
- Requires the Space to have `hf_oauth: true` in README metadata.
317
- """
318
- # If OAuth isn't configured on the Space, inform clearly
319
- if not os.getenv("OAUTH_CLIENT_ID"):
320
- return (
321
- "ℹ️ OAuth is not configured on this Space. Add `hf_oauth: true` to README metadata "
322
- "so users can sign in and ZeroGPU can use their account quota."
323
- )
324
- if profile is None:
325
- return (
326
- "πŸ”’ Not signed in to Hugging Face β€” ZeroGPU will count as anonymous (lower quota). "
327
- "Click **Sign in with HF** above."
328
- )
329
- name = getattr(profile, "name", None) or getattr(profile, "preferred_username", None) or getattr(profile, "id", "user")
330
- return f"πŸ”“ Signed in as **{name}** β€” ZeroGPU will use your account quota."
331
 
332
  # -----------------------
333
  # UI
 
29
  # Optional: HF auth for private/private repos (Spaces Secrets friendly)
30
  HF_TOKEN: Optional[str] = None
31
 
32
+ #def _hf_login() -> None:
33
+ # """Login to HF Hub using common env secret names.
34
+ # Works on Spaces with a single secret set. No CUDA touched here.
35
+ # """
36
+ # global HF_TOKEN
37
+ # HF_TOKEN = (
38
+ # os.getenv("HF_TOKEN")
39
+ # or os.getenv("HUGGING_FACE_HUB_TOKEN")
40
+ # or os.getenv("HUGGINGFACEHUB_API_TOKEN")
41
+ # )
42
+ # if HF_TOKEN:
43
+ # try:
44
+ # from huggingface_hub import login, whoami
45
+ # login(token=HF_TOKEN, add_to_git_credential=True)
46
+ # try:
47
+ # who = whoami(token=HF_TOKEN)
48
+ # print(f"[hf] logged in as: {who.get('name') or who.get('email') or who.get('id')}")
49
+ # except Exception:
50
+ # pass
51
+ # except Exception as e:
52
+ # print(f"[hf] login failed: {e}")
53
+ # else:
54
+ # print("[hf] no token found; accessing only public repos")
55
+ #
56
+ #_hf_login()
57
 
58
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
59
  # Is HF OAuth configured for this Space? (set automatically when README has `hf_oauth: true`)
 
152
  def forward(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
153
  return scores + self.alpha * self.bias_vec.to(scores.device)
154
 
155
+ @spaces.GPU
156
  def zerogpu_generate(full_prompt: str,
157
  gen_kwargs: Dict[str, Any],
158
  rose_map: Optional[Dict[str, float]],
 
310
  # Helper: login status banner (HF OAuth)
311
  # -----------------------
312
 
313
+ #def _login_status(profile: gr.OAuthProfile | None) -> str:
314
+ # """Show whether the visitor is logged in to Hugging Face.
315
+ # This affects ZeroGPU quotas (logged-in users get their own token/quota).
316
+ # Requires the Space to have `hf_oauth: true` in README metadata.
317
+ # """
318
+ # # If OAuth isn't configured on the Space, inform clearly
319
+ # if not os.getenv("OAUTH_CLIENT_ID"):
320
+ # return (
321
+ # "ℹ️ OAuth is not configured on this Space. Add `hf_oauth: true` to README metadata "
322
+ # "so users can sign in and ZeroGPU can use their account quota."
323
+ # )
324
+ # if profile is None:
325
+ # return (
326
+ # "πŸ”’ Not signed in to Hugging Face β€” ZeroGPU will count as anonymous (lower quota). "
327
+ # "Click **Sign in with HF** above."
328
+ # )
329
+ # name = getattr(profile, "name", None) or getattr(profile, "preferred_username", None) or getattr(profile, "id", "user")
330
+ # return f"πŸ”“ Signed in as **{name}** β€” ZeroGPU will use your account quota."
331
 
332
  # -----------------------
333
  # UI