Spaces:
Running
on
Zero
Running
on
Zero
2025-08-01 10:04 ๐
Browse files
app.py
CHANGED
@@ -2,6 +2,7 @@ import torch
|
|
2 |
from torch import nn
|
3 |
import torch.nn.functional as F
|
4 |
import torchvision.transforms.functional as TF
|
|
|
5 |
|
6 |
from torch import Tensor
|
7 |
import spaces
|
@@ -28,19 +29,19 @@ pretrained_models = [
|
|
28 |
"ZIP-B @ ShanghaiTech B @ MAE", "ZIP-B @ ShanghaiTech B @ NAE",
|
29 |
"ZIP-B @ UCF-QNRF @ MAE", "ZIP-B @ UCF-QNRF @ NAE",
|
30 |
"ZIP-B @ NWPU-Crowd @ MAE", "ZIP-B @ NWPU-Crowd @ NAE",
|
31 |
-
"
|
32 |
"ZIP-S @ ShanghaiTech A @ MAE", "ZIP-S @ ShanghaiTech A @ NAE",
|
33 |
"ZIP-S @ ShanghaiTech B @ MAE", "ZIP-S @ ShanghaiTech B @ NAE",
|
34 |
"ZIP-S @ UCF-QNRF @ MAE", "ZIP-S @ UCF-QNRF @ NAE",
|
35 |
-
"
|
36 |
"ZIP-T @ ShanghaiTech A @ MAE", "ZIP-T @ ShanghaiTech A @ NAE",
|
37 |
"ZIP-T @ ShanghaiTech B @ MAE", "ZIP-T @ ShanghaiTech B @ NAE",
|
38 |
"ZIP-T @ UCF-QNRF @ MAE", "ZIP-T @ UCF-QNRF @ NAE",
|
39 |
-
"
|
40 |
"ZIP-N @ ShanghaiTech A @ MAE", "ZIP-N @ ShanghaiTech A @ NAE",
|
41 |
"ZIP-N @ ShanghaiTech B @ MAE", "ZIP-N @ ShanghaiTech B @ NAE",
|
42 |
"ZIP-N @ UCF-QNRF @ MAE", "ZIP-N @ UCF-QNRF @ NAE",
|
43 |
-
"
|
44 |
"ZIP-P @ ShanghaiTech A @ MAE", "ZIP-P @ ShanghaiTech A @ NAE",
|
45 |
"ZIP-P @ ShanghaiTech B @ MAE", "ZIP-P @ ShanghaiTech B @ NAE",
|
46 |
"ZIP-P @ UCF-QNRF @ MAE", "ZIP-P @ UCF-QNRF @ NAE",
|
@@ -238,7 +239,7 @@ def _sliding_window_predict(
|
|
238 |
model.eval()
|
239 |
pi_maps, lambda_maps = [], []
|
240 |
for i in range(0, len(windows), max_num_windows):
|
241 |
-
with torch.no_grad():
|
242 |
image_feats = model.backbone(windows[i: min(i + max_num_windows, len(windows))])
|
243 |
pi_image_feats, lambda_image_feats = model.pi_head(image_feats), model.lambda_head(image_feats)
|
244 |
pi_image_feats = F.normalize(pi_image_feats.permute(0, 2, 3, 1), p=2, dim=-1) # shape (B, H, W, C)
|
@@ -535,6 +536,62 @@ option[value*="โโโโโโ"] {
|
|
535 |
text-align: center;
|
536 |
}
|
537 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
538 |
/* ๅบ็ก็ปไปถๆ ทๅผ */
|
539 |
.gr-group {
|
540 |
background: white;
|
@@ -597,7 +654,10 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(), title="ZIP Crowd Counting") as d
|
|
597 |
choices=pretrained_models,
|
598 |
value="ZIP-B @ NWPU-Crowd @ MAE",
|
599 |
label="๐๏ธ Select Model & Dataset",
|
600 |
-
info="Choose model variant, dataset, and evaluation metric"
|
|
|
|
|
|
|
601 |
)
|
602 |
|
603 |
with gr.Column(scale=1):
|
@@ -687,9 +747,9 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(), title="ZIP Crowd Counting") as d
|
|
687 |
if "โโโโโโ" in variant_dataset_metric:
|
688 |
return "โ ๏ธ Please select a valid model configuration"
|
689 |
result = update_model_if_needed(variant_dataset_metric)
|
690 |
-
if "Model
|
691 |
return f"โ
{result}"
|
692 |
-
elif "Model
|
693 |
return f"๐ {result}"
|
694 |
else:
|
695 |
return f"โ {result}"
|
|
|
2 |
from torch import nn
|
3 |
import torch.nn.functional as F
|
4 |
import torchvision.transforms.functional as TF
|
5 |
+
from torch.amp import autocast
|
6 |
|
7 |
from torch import Tensor
|
8 |
import spaces
|
|
|
29 |
"ZIP-B @ ShanghaiTech B @ MAE", "ZIP-B @ ShanghaiTech B @ NAE",
|
30 |
"ZIP-B @ UCF-QNRF @ MAE", "ZIP-B @ UCF-QNRF @ NAE",
|
31 |
"ZIP-B @ NWPU-Crowd @ MAE", "ZIP-B @ NWPU-Crowd @ NAE",
|
32 |
+
"โโโโโโโโโโโโโโโโโโโโโโโโโโโโ",
|
33 |
"ZIP-S @ ShanghaiTech A @ MAE", "ZIP-S @ ShanghaiTech A @ NAE",
|
34 |
"ZIP-S @ ShanghaiTech B @ MAE", "ZIP-S @ ShanghaiTech B @ NAE",
|
35 |
"ZIP-S @ UCF-QNRF @ MAE", "ZIP-S @ UCF-QNRF @ NAE",
|
36 |
+
"โโโโโโโโโโโโโโโโโโโโโโโโโโโโ",
|
37 |
"ZIP-T @ ShanghaiTech A @ MAE", "ZIP-T @ ShanghaiTech A @ NAE",
|
38 |
"ZIP-T @ ShanghaiTech B @ MAE", "ZIP-T @ ShanghaiTech B @ NAE",
|
39 |
"ZIP-T @ UCF-QNRF @ MAE", "ZIP-T @ UCF-QNRF @ NAE",
|
40 |
+
"โโโโโโโโโโโโโโโโโโโโโโโโโโโโ",
|
41 |
"ZIP-N @ ShanghaiTech A @ MAE", "ZIP-N @ ShanghaiTech A @ NAE",
|
42 |
"ZIP-N @ ShanghaiTech B @ MAE", "ZIP-N @ ShanghaiTech B @ NAE",
|
43 |
"ZIP-N @ UCF-QNRF @ MAE", "ZIP-N @ UCF-QNRF @ NAE",
|
44 |
+
"โโโโโโโโโโโโโโโโโโโโโโโโโโโโ",
|
45 |
"ZIP-P @ ShanghaiTech A @ MAE", "ZIP-P @ ShanghaiTech A @ NAE",
|
46 |
"ZIP-P @ ShanghaiTech B @ MAE", "ZIP-P @ ShanghaiTech B @ NAE",
|
47 |
"ZIP-P @ UCF-QNRF @ MAE", "ZIP-P @ UCF-QNRF @ NAE",
|
|
|
239 |
model.eval()
|
240 |
pi_maps, lambda_maps = [], []
|
241 |
for i in range(0, len(windows), max_num_windows):
|
242 |
+
with torch.no_grad(), autocast(device_type="cuda" if torch.cuda.is_available() else "cpu"):
|
243 |
image_feats = model.backbone(windows[i: min(i + max_num_windows, len(windows))])
|
244 |
pi_image_feats, lambda_image_feats = model.pi_head(image_feats), model.lambda_head(image_feats)
|
245 |
pi_image_feats = F.normalize(pi_image_feats.permute(0, 2, 3, 1), p=2, dim=-1) # shape (B, H, W, C)
|
|
|
536 |
text-align: center;
|
537 |
}
|
538 |
|
539 |
+
/* ไธๆๆกๆปๅจๆกๆ ทๅผ */
|
540 |
+
.gr-dropdown select {
|
541 |
+
max-height: 200px;
|
542 |
+
overflow-y: auto;
|
543 |
+
}
|
544 |
+
|
545 |
+
/* ไธๆๆก้้กนๅฎนๅจๆ ทๅผ */
|
546 |
+
.gr-dropdown .choices__list {
|
547 |
+
max-height: 200px;
|
548 |
+
overflow-y: auto;
|
549 |
+
}
|
550 |
+
|
551 |
+
.gr-dropdown .choices__list--dropdown {
|
552 |
+
max-height: 200px;
|
553 |
+
overflow-y: auto;
|
554 |
+
border: 1px solid #e5e7eb;
|
555 |
+
border-radius: 6px;
|
556 |
+
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
557 |
+
}
|
558 |
+
|
559 |
+
/* ็กฎไฟไธๆๆกๅฎนๅจไธ่ขซ่ฃๅช */
|
560 |
+
.gr-dropdown {
|
561 |
+
position: relative;
|
562 |
+
z-index: 1000;
|
563 |
+
}
|
564 |
+
|
565 |
+
/* ่ชๅฎไนๆปๅจๆกๆ ทๅผ - WebKitๆต่งๅจ */
|
566 |
+
.gr-dropdown select::-webkit-scrollbar,
|
567 |
+
.gr-dropdown .choices__list::-webkit-scrollbar {
|
568 |
+
width: 8px;
|
569 |
+
}
|
570 |
+
|
571 |
+
.gr-dropdown select::-webkit-scrollbar-track,
|
572 |
+
.gr-dropdown .choices__list::-webkit-scrollbar-track {
|
573 |
+
background: #f1f1f1;
|
574 |
+
border-radius: 4px;
|
575 |
+
}
|
576 |
+
|
577 |
+
.gr-dropdown select::-webkit-scrollbar-thumb,
|
578 |
+
.gr-dropdown .choices__list::-webkit-scrollbar-thumb {
|
579 |
+
background: #c1c1c1;
|
580 |
+
border-radius: 4px;
|
581 |
+
}
|
582 |
+
|
583 |
+
.gr-dropdown select::-webkit-scrollbar-thumb:hover,
|
584 |
+
.gr-dropdown .choices__list::-webkit-scrollbar-thumb:hover {
|
585 |
+
background: #a1a1a1;
|
586 |
+
}
|
587 |
+
|
588 |
+
/* Firefoxๆปๅจๆกๆ ทๅผ */
|
589 |
+
.gr-dropdown select,
|
590 |
+
.gr-dropdown .choices__list {
|
591 |
+
scrollbar-width: thin;
|
592 |
+
scrollbar-color: #c1c1c1 #f1f1f1;
|
593 |
+
}
|
594 |
+
|
595 |
/* ๅบ็ก็ปไปถๆ ทๅผ */
|
596 |
.gr-group {
|
597 |
background: white;
|
|
|
654 |
choices=pretrained_models,
|
655 |
value="ZIP-B @ NWPU-Crowd @ MAE",
|
656 |
label="๐๏ธ Select Model & Dataset",
|
657 |
+
info="Choose model variant, dataset, and evaluation metric",
|
658 |
+
allow_custom_value=False,
|
659 |
+
filterable=True,
|
660 |
+
max_choices=None
|
661 |
)
|
662 |
|
663 |
with gr.Column(scale=1):
|
|
|
747 |
if "โโโโโโ" in variant_dataset_metric:
|
748 |
return "โ ๏ธ Please select a valid model configuration"
|
749 |
result = update_model_if_needed(variant_dataset_metric)
|
750 |
+
if "Model configuration set:" in result:
|
751 |
return f"โ
{result}"
|
752 |
+
elif "Model configuration:" in result:
|
753 |
return f"๐ {result}"
|
754 |
else:
|
755 |
return f"โ {result}"
|