File size: 18,692 Bytes
e60c00e
1167b29
 
27579c4
525d347
1167b29
bd5de4f
1167b29
67360ee
 
1167b29
 
 
c7006a6
 
1167b29
bdb2b8a
1167b29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7006a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1167b29
 
c7006a6
 
 
 
 
 
 
 
 
 
 
 
 
1167b29
 
 
c7006a6
 
1167b29
 
 
 
c7006a6
 
 
 
 
 
1167b29
 
 
27579c4
 
1167b29
 
27579c4
 
 
 
 
 
1167b29
 
 
bdb2b8a
 
 
 
 
 
 
 
 
 
1167b29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7006a6
1167b29
 
c7006a6
 
1167b29
 
c528af3
 
 
 
 
 
 
 
c7006a6
 
 
 
 
9546831
c7006a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bdb2b8a
 
 
 
1167b29
27579c4
20f1559
 
 
 
 
 
 
 
 
 
 
 
1167b29
 
 
 
 
 
 
 
e967d89
1167b29
 
20f1559
e967d89
1167b29
 
 
 
27579c4
20f1559
bdb2b8a
 
27579c4
bdb2b8a
 
20f1559
 
 
 
 
 
 
 
 
 
 
 
 
27579c4
c7006a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d850d3
c7006a6
 
 
2d850d3
c7006a6
2d850d3
 
c7006a6
 
 
 
 
 
 
27579c4
1167b29
 
 
c528af3
 
1167b29
 
 
c7006a6
 
27579c4
1167b29
 
 
27579c4
c7006a6
 
1167b29
c7006a6
1167b29
 
 
 
 
 
 
 
 
c7006a6
1167b29
c528af3
 
 
 
 
c7006a6
 
2d850d3
 
c7006a6
 
2d850d3
c7006a6
 
 
 
 
 
 
 
 
 
 
 
 
1167b29
c7006a6
c528af3
 
2d850d3
c528af3
 
 
 
 
2d850d3
c528af3
 
c7006a6
c528af3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7006a6
c528af3
 
 
1167b29
 
 
 
 
 
 
 
 
 
27579c4
1167b29
 
3a85524
94cf5e8
 
2826661
94cf5e8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
import os
import base64
import logging
import threading
import pandas as pd
from io import BytesIO, StringIO
from docx import Document
from PyPDF2 import PdfReader
import dash
import dash_bootstrap_components as dbc
from dash import html, dcc, Input, Output, State, dash_table, callback_context

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("microhealth-pws")

ANTHROPIC_KEY = os.environ.get("ANTHROPIC_API_KEY", "")
import anthropic
anthropic_client = anthropic.Anthropic(api_key=ANTHROPIC_KEY)
CLAUDE3_SONNET_MODEL = "claude-3-7-sonnet-20250219"
CLAUDE3_MAX_CONTEXT_TOKENS = 200_000
CLAUDE3_MAX_OUTPUT_TOKENS = 64_000

document_types = {
    "Shred": "Ignore all other instructions and generate only requirements spreadsheet of the Project Work Statement (PWS) identified by action words like shall, will, perform etc. by pws section, requirement.  Do not write as if you're responding to the proposal.  Its a spreadsheet to distill the requirements, not microhealth's approach",
    "Pink": "Create a highly detailed Pink Team document based on the PWS outline.  Your goal is to be compliant and compelling.  Focus on describing the approach and how it will be done, the steps, workflow, people, processes and technology based on well known industry standards to accomplish the task.  Be sure to demonstrate innovation.",
    "Pink Review": "Ignore all other instructions and generate and evaluate compliance of the Pink Team document against the requirements and output only a spreadsheet of non compliant findings by pws number, the goal of that pws section, what made it non compliant and your recommendations for recovery. you must also take into account section L&M of the document which is the evaluation criteria to be sure we address them.",
    "Red": "Produce a  highly detailed Red Team document based on the Pink Review by pws sections. Your goal is to be compliant and compelling by recovering all the findings in Pink Review. Focus on describing the approach and how it will be done, the steps, workflow, people, processes and technology to accomplish the task. Be sure to refer to research that validates the approach and cite sources with measurable outcomes",
    "Red Review": "Ignore all other instructions and generate and evaluate compliance of the Red Team document against the requirements and output a only a spreadsheet of non compliant findings by pws number, the goal of that pws section, what made it non compliant and your recommendations for recovery. you must also take into account section L&M of the document which is the evaluation criteria to be sure we address them",
    "Gold": "Create a highly detailed Gold Team document based on the PWS response by pws sections. Your goal is to be compliant and compelling by recovering all the findings in Red Review.  Focus on describing the approach and how it will be done, the steps, workflow, people, processes and technology to accomplish the task. Be sure to refer to research that validates the approach and cite sources with measurable outcomes and improve on innovations of the approach",
    "Gold Review": "Ignore all other instructions and generate and perform a final compliance review against the requirements and output only a spreadsheet of non compliant findings by pws number, the goal of that pws section, what made it non compliant and your recommendations for recovery. you must also take into account section L&M of the document which is the evaluation criteria to be sure we address them",
    "Virtual Board": "Ignore all other instructions and generate and based on the requirements and in particular the evaulation criteria, you will evaluate the proposal as if you were a contracting office and provide section by section evaluation as unsatisfactory, satisfactory, good, very good, excellent and why and produce only spreadsheet",
    "LOE": "Ignore all other instructions and generate and generate a Level of Effort (LOE) breakdown and produce only spreadsheet"
}

def process_document(contents, filename):
    try:
        content_type, content_string = contents.split(',')
        decoded = base64.b64decode(content_string)
        if filename.lower().endswith('.docx'):
            doc = Document(BytesIO(decoded))
            text = "\n".join([p.text for p in doc.paragraphs])
            return text
        elif filename.lower().endswith('.pdf'):
            pdf = PdfReader(BytesIO(decoded))
            text = "".join(page.extract_text() or "" for page in pdf.pages)
            return text
        else:
            return f"Unsupported file format: {filename}"
    except Exception as e:
        logger.error(f"Error processing document {filename}: {e}")
        return f"Failed to process document: {e}"

def call_claude(prompt, max_tokens=2048):
    try:
        res = anthropic_client.messages.create(
            model=CLAUDE3_SONNET_MODEL,
            max_tokens=max_tokens,
            temperature=0.1,
            system="You are a world class proposal consultant and proposal manager.",
            messages=[{"role": "user", "content": prompt}]
        )
        logger.info("Anthropic API call successful.")
        return res.content[0].text if hasattr(res, "content") else str(res)
    except Exception as e:
        logger.error(f"Anthropic API error: {e}")
        return f"Anthropic API error: {e}"

def spreadsheet_to_df(text):
    lines = [l.strip() for l in text.splitlines() if '|' in l]
    if not lines:
        return pd.DataFrame()
    header = lines[0].strip('|').split('|')
    data = [l.strip('|').split('|') for l in lines[1:]]
    return pd.DataFrame(data, columns=[h.strip() for h in header])

def generate_content(document, doc_type, instructions=""):
    prompt = f"{document_types[doc_type]}\n\n"
    if instructions:
        prompt += f"Additional Instructions:\n{instructions}\n\n"
    prompt += f"Document:\n{document}\n\nOutput only one spreadsheet table, use | as column separator."
    logger.info(f"Generating content for {doc_type} with prompt length {len(prompt)}")
    response = call_claude(prompt, max_tokens=4096)
    df = spreadsheet_to_df(response)
    return response, df

def parse_markdown(doc, content):
    for para in content.split('\n\n'):
        doc.add_paragraph(para)

def create_docx(content):
    doc = Document()
    parse_markdown(doc, content)
    return doc

app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP], suppress_callback_exceptions=True)
app.title = "MicroHealth PWS Analyzer"

main_tabs = [
    {"id": "shred", "label": "Shred"},
    {"id": "pink", "label": "Pink"},
    {"id": "pink-review", "label": "Pink Review"},
    {"id": "red", "label": "Red"},
    {"id": "red-review", "label": "Red Review"},
    {"id": "gold", "label": "Gold"},
    {"id": "gold-review", "label": "Gold Review"},
    {"id": "virtual-board", "label": "Virtual Board"},
    {"id": "loe", "label": "LOE"},
]

def make_upload(btn_id):
    return dcc.Upload(
        id=f'{btn_id}-upload',
        children=html.Div(['Drag and Drop or ', html.A('Select Files')]),
        style={'width': '100%', 'height': '60px', 'lineHeight': '60px', 'borderWidth': '1px', 'borderStyle': 'dashed', 'borderRadius': '5px', 'textAlign': 'center', 'margin': '10px'},
        multiple=False
    )

def make_textarea(btn_id, placeholder):
    return dbc.Textarea(
        id=f'{btn_id}-instructions',
        placeholder=placeholder,
        style={'height': '80px', 'marginBottom': '10px', 'width': '100%', 'whiteSpace': 'pre-wrap', 'overflowWrap': 'break-word'}
    )

def make_shred_doc_preview():
    return dbc.Card(
        dbc.CardBody([
            html.Div(id="shred-upload-preview", style={"whiteSpace": "pre-wrap", "overflowWrap": "break-word"}),
        ]), className="mb-2", id="shred-doc-preview-card", style={"display": "none"}
    )

def make_shred_controls():
    return html.Div([
        dbc.Button("Delete Document", id="shred-delete-btn", className="mt-2 btn-tertiary", n_clicks=0, style={'marginRight': '8px'}),
        dbc.Button("Generate Shred", id='shred-btn', className="mt-2 btn-primary", n_clicks=0, style={'marginRight': '8px'}),
        dbc.Button("Download Shred Report", id="shred-download-btn", className="mt-2 btn-secondary", n_clicks=0),
        dcc.Download(id="shred-download"),
    ], style={'display': 'flex', 'flexWrap': 'wrap', 'gap': '8px', 'marginTop': '10px', 'marginBottom': '10px'})

def make_tab(tab_id, label):
    if tab_id == "shred":
        return dbc.Card(
            dbc.CardBody([
                make_textarea(tab_id, f"Instructions for {label} (optional)"),
                make_shred_controls(),
                make_upload(tab_id),
                make_shred_doc_preview(),
                dcc.Loading(html.Div(id=f'{tab_id}-output'), id="loading", type="default", parent_style={'justifyContent': 'center'}),
                dcc.Store(id="shred-upload-store")
            ]), className="mb-4"
        )
    else:
        return dbc.Card(
            dbc.CardBody([
                make_textarea(tab_id, f"Instructions for {label} (optional)"),
                make_upload(tab_id),
                dbc.Button(f"Generate {label}", id=f'{tab_id}-btn', className="mt-2 btn-primary", n_clicks=0),
                dcc.Loading(html.Div(id=f'{tab_id}-output'), type="default", parent_style={'justifyContent': 'center'}),
                dbc.Button(f"Download {label} Report", id=f"{tab_id}-download-btn", className="mt-2 btn-secondary", n_clicks=0),
                dcc.Download(id=f"{tab_id}-download")
            ]), className="mb-4"
        )

tab_cards = {tab["id"]: make_tab(tab["id"], tab["label"]) for tab in main_tabs}

nav_items = [
    dbc.NavLink(tab["label"], href="#", id=f"nav-{tab['id']}", active=(tab["id"] == "shred")) for tab in main_tabs
]

def all_tabs_div():
    return html.Div(
        [
            html.Div(
                tab_cards[tab["id"]],
                id=f"tabdiv-{tab['id']}",
                style={"display": "block" if tab["id"] == "shred" else "none"}
            )
            for tab in main_tabs
        ]
    )

app.layout = dbc.Container([
    html.H1("MicroHealth PWS Analysis and Response Generator", className="my-3"),
    dbc.Row([
        dbc.Col(
            dbc.Card(
                dbc.CardBody([
                    html.Div(nav_items, className="nav flex-column"),
                ])
            ), width=2, style={'minWidth': '150px'}
        ),
        dbc.Col(
            html.Div(all_tabs_div(), id="main-content"),
            width=10
        )
    ])
], fluid=True)

@app.callback(
    [Output(f"tabdiv-{tab['id']}", "style") for tab in main_tabs],
    [Input(f"nav-{tab['id']}", "n_clicks") for tab in main_tabs],
    prevent_initial_call=False
)
def display_tab(*nav_clicks):
    triggered = callback_context.triggered
    idx = 0
    if triggered:
        for i, tab in enumerate(main_tabs):
            if triggered[0]["prop_id"].startswith(f"nav-{tab['id']}"):
                idx = i
                break
    styles = []
    for i, tab in enumerate(main_tabs):
        if i == idx:
            styles.append({"display": "block"})
        else:
            styles.append({"display": "none"})
    return styles

@app.callback(
    [
        Output("shred-upload-store", "data"),
        Output("shred-upload-preview", "children"),
        Output("shred-doc-preview-card", "style"),
    ],
    [
        Input("shred-upload", "contents"),
        Input("shred-delete-btn", "n_clicks")
    ],
    [
        State("shred-upload", "filename"),
        State("shred-upload-store", "data"),
    ],
    prevent_initial_call=True
)
def update_shred_upload(contents, delete_clicks, filename, stored_data):
    triggered = callback_context.triggered
    logger.info("Shred upload callback triggered.")
    if not triggered:
        return dash.no_update, dash.no_update, dash.no_update
    trig_id = triggered[0]["prop_id"].split(".")[0]
    if trig_id == "shred-upload":
        if contents and filename:
            logger.info(f"Document uploaded in Shred: {filename}")
            full_text = process_document(contents, filename)
            preview = html.Div([
                html.B(f"Uploaded: {filename}"),
                html.Br(),
                html.Div(full_text[:2000] + ("..." if len(full_text) > 2000 else ""), style={"whiteSpace": "pre-wrap", "overflowWrap": "break-word", "fontSize": "small"})
            ])
            # Store the full document text, filename, and preview
            return {"contents": contents, "filename": filename, "full_text": full_text, "preview": full_text[:2000]}, preview, {"display": "block"}
        else:
            return None, "", {"display": "none"}
    elif trig_id == "shred-delete-btn":
        logger.info("Shred document deleted by user.")
        return None, "", {"display": "none"}
    return dash.no_update, dash.no_update, dash.no_update

@app.callback(
    [Output(f'{tab_id}-output', 'children') for tab_id in tab_cards] +
    [Output(f"{tab_id}-download", "data") for tab_id in tab_cards],
    [Input(f'{tab_id}-btn', 'n_clicks') for tab_id in tab_cards] +
    [Input(f"{tab_id}-download-btn", "n_clicks") for tab_id in tab_cards] +
    [Input("shred-btn", "n_clicks"), Input("shred-download-btn", "n_clicks")],
    [State(f'{tab_id}-upload', 'contents') for tab_id in tab_cards] +
    [State(f'{tab_id}-upload', 'filename') for tab_id in tab_cards] +
    [State(f'{tab_id}-instructions', 'value') for tab_id in tab_cards] +
    [State(f'{tab_id}-output', 'children') for tab_id in tab_cards] +
    [State("shred-upload-store", "data")]
)
def handle_all_tabs(*args):
    n = len(tab_cards)
    outputs = [None] * (n * 2)
    ctx = callback_context
    if not ctx.triggered:
        return outputs
    trig = ctx.triggered[0]['prop_id']
    logger.info(f"Main callback triggered by {trig}")
    for idx, tab_id in enumerate(tab_cards):
        gen_btn = f"{tab_id}-btn.n_clicks"
        dl_btn = f"{tab_id}-download-btn.n_clicks"
        out_idx = idx
        dl_idx = idx + n
        upload_idx = idx
        filename_idx = idx + n
        instr_idx = idx + 2 * n
        prev_output_idx = idx + 3 * n
        shred_upload_store_idx = 4 * n

        if tab_id == "shred":
            shred_gen_btn = "shred-btn.n_clicks"
            shred_dl_btn = "shred-download-btn.n_clicks"
            if trig == shred_gen_btn:
                logger.info(f"Generate Shred button pressed for {tab_id}")
                shred_data = args[shred_upload_store_idx]
                instr = args[instr_idx] or ""
                if shred_data and "full_text" in shred_data and shred_data["full_text"]:
                    doc = shred_data["full_text"]
                    logger.info(f"Shred document will be sent to Anthropic with instructions: {instr}")
                    content, df = generate_content(doc, "Shred", instr)
                    # Store Anthropic result in outputs
                    if not df.empty:
                        outputs[out_idx] = dash_table.DataTable(
                            data=df.to_dict('records'),
                            columns=[{'name': i, 'id': i} for i in df.columns],
                            style_table={'overflowX': 'auto'},
                            style_cell={'textAlign': 'left', 'padding': '5px'},
                            style_header={'fontWeight': 'bold'}
                        )
                    else:
                        outputs[out_idx] = html.Div([
                            html.B("Anthropic Response Preview:"),
                            dcc.Markdown(content)
                        ])
                else:
                    outputs[out_idx] = "Please upload a document to begin."
            elif trig == shred_dl_btn:
                prev_output = args[prev_output_idx]
                if (hasattr(prev_output, 'props') and 'data' in prev_output.props):
                    df = pd.DataFrame(prev_output.props['data'])
                    buffer = BytesIO()
                    df.to_csv(buffer, index=False)
                    outputs[dl_idx] = dcc.send_bytes(buffer.getvalue(), f"{tab_id}_report.csv")
                elif prev_output:
                    # If prev_output is markdown or text
                    buffer = BytesIO(prev_output.encode("utf-8") if isinstance(prev_output, str) else b"")
                    outputs[dl_idx] = dcc.send_bytes(buffer.getvalue(), f"{tab_id}_report.txt")
                else:
                    outputs[dl_idx] = None
        if trig == gen_btn:
            logger.info(f"Generate button pressed for {tab_id}")
            upload = args[upload_idx]
            filename = args[filename_idx]
            instr = args[instr_idx] or ""
            doc_type = tab_id.replace('-', ' ').title().replace(' ', '')
            doc_type = next((k for k in document_types if k.lower().replace(' ', '') == tab_id.replace('-', '')), tab_id.title())
            if upload and filename:
                doc = process_document(upload, filename)
            else:
                doc = ""
            if doc or tab_id == "virtual-board":
                content, df = generate_content(doc, doc_type, instr)
                if not df.empty:
                    outputs[out_idx] = dash_table.DataTable(
                        data=df.to_dict('records'),
                        columns=[{'name': i, 'id': i} for i in df.columns],
                        style_table={'overflowX': 'auto'},
                        style_cell={'textAlign': 'left', 'padding': '5px'},
                        style_header={'fontWeight': 'bold'}
                    )
                else:
                    outputs[out_idx] = dcc.Markdown(content)
            else:
                outputs[out_idx] = "Please upload a document to begin."
        elif trig == dl_btn:
            prev_output = args[prev_output_idx]
            if prev_output and hasattr(prev_output, 'props') and 'data' in prev_output.props:
                df = pd.DataFrame(prev_output.props['data'])
                buffer = BytesIO()
                df.to_csv(buffer, index=False)
                outputs[dl_idx] = dcc.send_bytes(buffer.getvalue(), f"{tab_id}_report.csv")
            elif prev_output:
                buffer = BytesIO(prev_output.encode("utf-8") if isinstance(prev_output, str) else b"")
                outputs[dl_idx] = dcc.send_bytes(buffer.getvalue(), f"{tab_id}_report.txt")
            else:
                outputs[dl_idx] = None
    return outputs

if __name__ == '__main__':
    print("Starting the Dash application...")
    app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)
    print("Dash application has finished running.")