File size: 16,092 Bytes
7588956
 
 
 
 
 
 
 
7ad4ea6
07ca3ed
7588956
 
 
7ad4ea6
7588956
7ad4ea6
b14190d
7588956
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b14190d
7588956
 
 
 
 
 
 
 
 
 
 
 
 
 
b14190d
7588956
 
 
 
 
b14190d
 
 
 
 
 
 
 
 
 
 
7588956
b14190d
7588956
b14190d
7588956
b14190d
 
7588956
 
 
b14190d
7588956
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7ad4ea6
 
07ca3ed
b14190d
 
 
 
7588956
 
 
b14190d
7588956
 
 
 
 
 
 
 
 
 
b14190d
7ad4ea6
7588956
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7ad4ea6
 
 
 
7588956
 
 
7ad4ea6
7588956
7ad4ea6
 
 
7588956
7ad4ea6
7588956
07ca3ed
7588956
7ad4ea6
7588956
 
 
 
 
b14190d
7588956
 
 
b14190d
7588956
 
 
 
 
 
 
 
b14190d
 
 
 
7588956
b14190d
7588956
07ca3ed
 
 
b14190d
07ca3ed
7588956
 
7ad4ea6
7588956
b14190d
07ca3ed
7588956
 
 
 
 
 
 
 
 
 
 
 
 
 
07ca3ed
7588956
 
 
 
 
 
 
 
07ca3ed
7588956
7ad4ea6
7588956
 
 
 
 
 
 
 
 
 
 
7ad4ea6
07ca3ed
7588956
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
# ─────────────────────────────────────────────────────────────────────────────
# app.py ─ Weather Umbrella Advisor (Streamlit + Claude 3 + OpenWeatherMap)
# ─────────────────────────────────────────────────────────────────────────────
import os
import json
import requests
import boto3
import streamlit as st
from dotenv import load_dotenv

# ─────────────────────────────────────────────────────────────────────────────
# 1) Load environment variables (for local .env / HF Secrets)
# ─────────────────────────────────────────────────────────────────────────────
load_dotenv()

OPENWEATHERMAP_API_KEY = os.getenv("OPENWEATHERMAP_API_KEY")
AWS_REGION            = os.getenv("AWS_REGION", "us-east-1")
AWS_ACCESS_KEY_ID     = os.getenv("AWS_ACCESS_KEY_ID")       # may be None
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")   # may be None

# ─────────────────────────────────────────────────────────────────────────────
# 2) Helper to mask credentials (so we can print a hint in the UI)
# ─────────────────────────────────────────────────────────────────────────────
def _mask(val: str) -> str:
    """
    Returns a masked version of `val`, showing first 4 + last 4 chars
    e.g. AKIA1234abcd...WXYZ5678
    """
    if not val:
        return "None"
    if len(val) <= 8:
        return val
    return val[:4] + "..." + val[-4:]

# ─────────────────────────────────────────────────────────────────────────────
# 3) Detect if keys are reversed: we expect ACCESS_KEY_ID to start with AKIA/ASIA
#    If not, but SECRET_ACCESS_KEY starts with AKIA/ASIA, swap them.
# ─────────────────────────────────────────────────────────────────────────────
def _looks_like_access_key(key: str) -> bool:
    """
    AWS access key IDs typically start with 'AKIA' or 'ASIA' and are 20 characters long.
    """
    return bool(key) and (key.startswith("AKIA") or key.startswith("ASIA")) and len(key) == 20

# If ACCESS_KEY_ID doesn’t look like an AKIA/ASIA but SECRET_ACCESS_KEY does, swap:
if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
    if not _looks_like_access_key(AWS_ACCESS_KEY_ID) and _looks_like_access_key(AWS_SECRET_ACCESS_KEY):
        AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = AWS_SECRET_ACCESS_KEY, AWS_ACCESS_KEY_ID

# ─────────────────────────────────────────────────────────────────────────────
# 4) Initialize boto3 Session / Bedrock client
#    – If both AWS_ACCESS_KEY_ID & AWS_SECRET_ACCESS_KEY exist, pass them explicitly
#    – Otherwise, fall back to default credential chain (IAM role, container credentials, etc.)
# ─────────────────────────────────────────────────────────────────────────────
if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
    session = boto3.Session(
        aws_access_key_id     = AWS_ACCESS_KEY_ID,
        aws_secret_access_key = AWS_SECRET_ACCESS_KEY,
        region_name           = AWS_REGION,
    )
else:
    session = boto3.Session(region_name=AWS_REGION)

bedrock = session.client("bedrock-runtime")

# Quick sanity‐check: if credentials are still invalid, this will raise immediately.
try:
    _ = bedrock.meta.region_name  # just to force the client to exist
except Exception as e:
    st.error(f"⚠️ Credential problem: {e}")
    st.stop()

# ─────────────────────────────────────────────────────────────────────────────
# 5) Streamlit Page Configuration & Header
# ─────────────────────────────────────────────────────────────────────────────
st.set_page_config(page_title="🌀️ Umbrella Advisor", page_icon="β˜”", layout="centered")

st.markdown(
    """
    <div style="text-align:center">
      <h1 style="color:#3c79f5;">β˜” Weather Umbrella Advisor</h1>
      <p style="font-size:18px">
        Ask if you need an umbrella tomorrow – powered by <b>Claude 3 Sonnet (Bedrock)</b> + <b>OpenWeatherMap</b>.
      </p>
    </div>
    """,
    unsafe_allow_html=True,
)

# ─────────────────────────────────────────────────────────────────────────────
# 6) Show masked credentials for debugging (so you can see if the swap logic worked)
# ─────────────────────────────────────────────────────────────────────────────
st.markdown(
    f"""
    **Debug** (masked credentials):  
    β€’ AWS_ACCESS_KEY_ID = `{_mask(AWS_ACCESS_KEY_ID)}`  
    β€’ AWS_SECRET_ACCESS_KEY = `{_mask(AWS_SECRET_ACCESS_KEY)}`
    """,
    unsafe_allow_html=True,
)

# ─────────────────────────────────────────────────────────────────────────────
# 7) Conversation state
# ─────────────────────────────────────────────────────────────────────────────
if "messages" not in st.session_state:
    st.session_state.messages = []

for m in st.session_state.messages:
    with st.chat_message(m["role"]):
        st.markdown(m["content"])

# ─────────────────────────────────────────────────────────────────────────────
# 8) Helper: get_weather(city) β†’ calls OpenWeatherMap and returns JSON
# ─────────────────────────────────────────────────────────────────────────────
def get_weather(city: str):
    """
    Fetches a 24‐hour forecast (8 x 3‐hour intervals) for `city`.
    Returns either:
      { "location": "CityName", "forecast": [ ... ] }
    or
      { "error": "Error message" }
    """
    city = city.strip()
    if not city:
        return {"error": "Please provide a valid city name."}

    try:
        # 1) Get lat/lon
        geo_url = (
            f"http://api.openweathermap.org/geo/1.0/direct"
            f"?q={city}&limit=1&appid={OPENWEATHERMAP_API_KEY}"
        )
        geo_resp = requests.get(geo_url, timeout=10).json()
        if not geo_resp:
            return {"error": f"City '{city}' not found."}
        lat, lon = geo_resp[0]["lat"], geo_resp[0]["lon"]

        # 2) Get 5‐day / 3hr forecast
        weather_url = (
            f"http://api.openweathermap.org/data/2.5/forecast"
            f"?lat={lat}&lon={lon}"
            f"&appid={OPENWEATHERMAP_API_KEY}&units=metric"
        )
        weather_data = requests.get(weather_url, timeout=10).json()
        if "list" not in weather_data:
            return {"error": f"Unable to fetch forecast for '{city}'."}

        forecast = []
        for f in weather_data["list"][:8]:  # Next 24 hours β‰ˆ 8 slots at 3 each
            forecast.append({
                "time": f["dt_txt"],
                "description": f["weather"][0]["description"].capitalize(),
                "rain_probability": round(f.get("pop", 0) * 100, 1),
                "temp": f["main"]["temp"],
                "humidity": f["main"]["humidity"]
            })

        return {"location": city.title(), "forecast": forecast}

    except Exception as ex:
        return {"error": str(ex)}

# ─────────────────────────────────────────────────────────────────────────────
# 9) ReAct System Prompt & Helper to ask Claude (Bedrock)
# ─────────────────────────────────────────────────────────────────────────────
SYSTEM_PROMPT = """
You are a helpful umbrella advisor using the ReAct (Reasoning + Acting) methodology.

Steps:
1. Think about the user’s question.
2. Act by calling get_weather(location) if needed.
3. Observe the weather result.
4. Reason and respond.

When you need weather data, respond _exactly_ in this JSON format (no extra text):
{
  "thought": "…",
  "action": "get_weather",
  "action_input": {"location": "CityName"}
}

If no location is provided, ask the user to specify one.

Once you have the forecast, give a final, friendly answer such as:
"You do not need an umbrella tomorrow in London because it will be sunny with 0% chance of rain."
"""

def ask_claude(user_input: str, history: str = "") -> str:
    """
    1. Send the initial ReAct prompt to Claude, including user_input + history.
    2. Parse Claude’s JSON: if action == "get_weather", call get_weather(…).
    3. Feed the weather data back into Claude for final reasoning.
    4. Return Claude’s final text reply.
    """
    # Step 1: Initial ReAct call
    body1 = {
        "anthropic_version": "bedrock-2023-05-31",
        "max_tokens": 1000,
        "temperature": 0.7,
        "top_p": 0.9,
        "messages": [
            {"role": "user", "content": f"{SYSTEM_PROMPT}\n\nHistory:\n{history}\n\nUser: {user_input}"}
        ]
    }
    resp1 = bedrock.invoke_model(
        modelId="anthropic.claude-3-sonnet-20240229-v1:0",
        contentType="application/json",
        accept="application/json",
        body=json.dumps(body1)
    )
    text1 = json.loads(resp1["body"].read())["content"][0]["text"].strip()

    # Step 2: Try parsing as JSON
    try:
        parsed = json.loads(text1)
        if parsed.get("action") == "get_weather":
            city = parsed["action_input"].get("location", "").strip()
            if not city:
                return "🌍 I need a city nameβ€”could you please tell me which city you mean?"

            wx = get_weather(city)
            if "error" in wx:
                return wx["error"]

            # Step 3: Ask Claude to reason over the weather data
            weather_json = json.dumps(wx, indent=2)
            prompt2 = (
                f"Here is the forecast for {wx['location']}:\n\n"
                f"{weather_json}\n\n"
                "Based on this data, answer whether the user should carry an umbrella tomorrow "
                "in a friendly, conversational way (YES/NO + reasoning)."
            )
            body2 = {
                "anthropic_version": "bedrock-2023-05-31",
                "max_tokens": 500,
                "temperature": 0.7,
                "messages": [{"role": "user", "content": prompt2}]
            }
            resp2 = bedrock.invoke_model(
                modelId="anthropic.claude-3-sonnet-20240229-v1:0",
                contentType="application/json",
                accept="application/json",
                body=json.dumps(body2)
            )
            return json.loads(resp2["body"].read())["content"][0]["text"].strip()

    except json.JSONDecodeError:
        # If it wasn’t valid JSON, just return whatever Claude replied
        pass

    return text1

# ─────────────────────────────────────────────────────────────────────────────
# 10) Build conversation history helper
# ─────────────────────────────────────────────────────────────────────────────
def _build_history(n: int = 4) -> str:
    """
    Returns the last n messages formatted as:
      User: ...
      Assistant: ...
    so that Claude sees recent turns.
    """
    hist = st.session_state.messages[-n:]
    return "\n".join(f"{m['role'].capitalize()}: {m['content']}" for m in hist)

# ─────────────────────────────────────────────────────────────────────────────
# 11) Main Chat Input / Display Loop
# ─────────────────────────────────────────────────────────────────────────────
if user_query := st.chat_input("Ask: Do I need an umbrella tomorrow?"):
    # 1) Append user message locally
    st.session_state.messages.append({"role": "user", "content": user_query})
    with st.chat_message("user"):
        st.markdown(user_query)

    # 2) Get assistant reply
    with st.chat_message("assistant"):
        with st.spinner("πŸ€” Thinking…"):
            history = _build_history()
            assistant_reply = ask_claude(user_query, history)
            st.markdown(assistant_reply)

    # 3) Append assistant reply to state
    st.session_state.messages.append({"role": "assistant", "content": assistant_reply})

# ─────────────────────────────────────────────────────────────────────────────
# 12) Sidebar (Branding / Help)
# ─────────────────────────────────────────────────────────────────────────────
with st.sidebar:
    st.image("https://img.icons8.com/clouds/100/umbrella.png", width=100)
    st.markdown("## β˜€οΈ About")
    st.markdown(
        """
        **Weather Umbrella Advisor**  
        - Uses **OpenWeatherMap** for real‐time forecast  
        - Uses **Claude 3 Sonnet (AWS Bedrock)** to reason via ReAct  
        - Provides clear YES/NO umbrella advice with reasoning

        **Try these:**
        - "Should I bring an umbrella tomorrow?"
        - "Will it rain in Delhi tomorrow?"
        - "Do I need an umbrella in Tokyo?"
        """
    )