kylanoconnor commited on
Commit
5eb92ed
·
1 Parent(s): 2b58889

Fix Gradio 5.x API endpoints - replace TabbedInterface with Blocks

Browse files
Files changed (1) hide show
  1. app.py +72 -44
app.py CHANGED
@@ -81,51 +81,79 @@ Mean Longitude: {mean_lon:.6f} ± {std_lon:.6f}
81
  except Exception as e:
82
  return f"Error during prediction: {str(e)}"
83
 
84
- # Create the Gradio interface for simple prediction
85
- simple_interface = gr.Interface(
86
- fn=predict_geolocation,
87
- inputs=gr.Image(type="pil", label="Upload an image"),
88
- outputs=gr.Textbox(label="Predicted Location", lines=4),
89
- title="🗺️ PLONK: Global Visual Geolocation",
90
- description="""
91
- Upload an image and PLONK will predict where it was taken!
92
 
93
- This uses the PLONK_YFCC model trained on the YFCC100M dataset.
94
- The model predicts latitude and longitude coordinates based on visual content.
95
-
96
- **Note**: This is running on CPU, so processing may take 300-500ms per image.
97
- """,
98
- examples=[
99
- ["demo/examples/condor.jpg"],
100
- ["demo/examples/Kilimanjaro.jpg"],
101
- ["demo/examples/pigeon.png"]
102
- ] if any(Path("demo/examples").glob("*")) else None
103
- )
104
-
105
- # Create advanced interface with sampling options
106
- advanced_interface = gr.Interface(
107
- fn=predict_geolocation_with_samples,
108
- inputs=[
109
- gr.Image(type="pil", label="Upload an image"),
110
- gr.Slider(1, 256, value=64, step=1, label="Number of samples"),
111
- gr.Slider(0.0, 5.0, value=0.0, step=0.1, label="Guidance scale (CFG)")
112
- ],
113
- outputs=gr.Textbox(label="Detailed Results", lines=10),
114
- title="🗺️ PLONK: Advanced Geolocation with Uncertainty",
115
- description="""
116
- Advanced interface showing prediction uncertainty through multiple samples.
117
-
118
- - **Number of samples**: More samples = better uncertainty estimation (but slower)
119
- - **Guidance scale**: Higher values = more confident predictions (try 2.0 for best single guess)
120
- """,
121
- )
122
-
123
- # Create tabbed interface
124
- demo = gr.TabbedInterface(
125
- [simple_interface, advanced_interface],
126
- ["Simple Prediction", "Advanced Analysis"],
127
- title="PLONK: Around the World in 80 Timesteps"
128
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
129
 
130
  if __name__ == "__main__":
131
  demo.launch()
 
81
  except Exception as e:
82
  return f"Error during prediction: {str(e)}"
83
 
84
+ # Create the Gradio app using Blocks for proper API support
85
+ with gr.Blocks(title="PLONK: Around the World in 80 Timesteps") as demo:
86
+ gr.Markdown("# 🗺️ PLONK: Around the World in 80 Timesteps")
87
+ gr.Markdown("A generative approach to global visual geolocation. Upload an image and PLONK will predict where it was taken!")
 
 
 
 
88
 
89
+ with gr.Tabs():
90
+ with gr.TabItem("Simple Prediction"):
91
+ gr.Markdown("""
92
+ ### 🗺️ PLONK: Global Visual Geolocation
93
+
94
+ Upload an image and PLONK will predict where it was taken!
95
+
96
+ This uses the PLONK_YFCC model trained on the YFCC100M dataset.
97
+ The model predicts latitude and longitude coordinates based on visual content.
98
+
99
+ **Note**: This is running on CPU, so processing may take 300-500ms per image.
100
+ """)
101
+
102
+ with gr.Row():
103
+ with gr.Column():
104
+ image_input = gr.Image(type="pil", label="Upload an image")
105
+ predict_btn = gr.Button("Predict Location", variant="primary")
106
+
107
+ with gr.Column():
108
+ output_text = gr.Textbox(label="Predicted Location", lines=4)
109
+
110
+ # Add examples if they exist
111
+ if any(Path("demo/examples").glob("*")):
112
+ gr.Examples(
113
+ examples=[
114
+ ["demo/examples/condor.jpg"],
115
+ ["demo/examples/Kilimanjaro.jpg"],
116
+ ["demo/examples/pigeon.png"]
117
+ ],
118
+ inputs=image_input,
119
+ outputs=output_text,
120
+ fn=predict_geolocation,
121
+ cache_examples=False
122
+ )
123
+
124
+ predict_btn.click(
125
+ fn=predict_geolocation,
126
+ inputs=image_input,
127
+ outputs=output_text,
128
+ api_name="predict" # This creates the /api/predict endpoint
129
+ )
130
+
131
+ with gr.TabItem("Advanced Analysis"):
132
+ gr.Markdown("""
133
+ ### 🗺️ PLONK: Advanced Geolocation with Uncertainty
134
+
135
+ Advanced interface showing prediction uncertainty through multiple samples.
136
+
137
+ - **Number of samples**: More samples = better uncertainty estimation (but slower)
138
+ - **Guidance scale**: Higher values = more confident predictions (try 2.0 for best single guess)
139
+ """)
140
+
141
+ with gr.Row():
142
+ with gr.Column():
143
+ adv_image_input = gr.Image(type="pil", label="Upload an image")
144
+ samples_slider = gr.Slider(1, 256, value=64, step=1, label="Number of samples")
145
+ cfg_slider = gr.Slider(0.0, 5.0, value=0.0, step=0.1, label="Guidance scale (CFG)")
146
+ advanced_btn = gr.Button("Analyze with Uncertainty", variant="primary")
147
+
148
+ with gr.Column():
149
+ advanced_output = gr.Textbox(label="Detailed Results", lines=10)
150
+
151
+ advanced_btn.click(
152
+ fn=predict_geolocation_with_samples,
153
+ inputs=[adv_image_input, samples_slider, cfg_slider],
154
+ outputs=advanced_output,
155
+ api_name="predict_advanced" # This creates the /api/predict_advanced endpoint
156
+ )
157
 
158
  if __name__ == "__main__":
159
  demo.launch()