Update modules/database/database.py
Browse files- modules/database/database.py +25 -18
modules/database/database.py
CHANGED
|
@@ -256,34 +256,33 @@ def store_morphosyntax_result(username, text, repeated_words, arc_diagrams, pos_
|
|
| 256 |
################################################################################################################
|
| 257 |
def store_semantic_result(username, text, analysis_result):
|
| 258 |
if analysis_collection is None:
|
| 259 |
-
|
| 260 |
return False
|
| 261 |
-
|
| 262 |
try:
|
|
|
|
|
|
|
|
|
|
| 263 |
# Convertir el gráfico a imagen base64
|
| 264 |
-
buf =
|
| 265 |
analysis_result['relations_graph'].savefig(buf, format='png')
|
| 266 |
buf.seek(0)
|
| 267 |
img_str = base64.b64encode(buf.getvalue()).decode('utf-8')
|
| 268 |
-
|
| 269 |
-
# Convertir los conceptos clave a una lista de tuplas
|
| 270 |
-
key_concepts = [(concept, float(frequency)) for concept, frequency in analysis_result['key_concepts']]
|
| 271 |
-
|
| 272 |
analysis_document = {
|
| 273 |
'username': username,
|
| 274 |
'timestamp': datetime.utcnow(),
|
| 275 |
-
'text': text,
|
| 276 |
'key_concepts': key_concepts,
|
| 277 |
-
'
|
| 278 |
'analysis_type': 'semantic'
|
| 279 |
}
|
| 280 |
-
|
| 281 |
result = analysis_collection.insert_one(analysis_document)
|
| 282 |
-
|
| 283 |
-
logger.info(f"Longitud de la imagen guardada: {len(img_str)}")
|
| 284 |
return True
|
| 285 |
except Exception as e:
|
| 286 |
-
|
|
|
|
|
|
|
| 287 |
return False
|
| 288 |
|
| 289 |
###############################################################################################################
|
|
@@ -320,6 +319,10 @@ def store_discourse_analysis_result(username, text1, text2, analysis_result):
|
|
| 320 |
img_str_combined = base64.b64encode(buf_combined.getvalue()).decode('utf-8')
|
| 321 |
plt.close(fig)
|
| 322 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 323 |
# Crear el documento para guardar
|
| 324 |
analysis_document = {
|
| 325 |
'username': username,
|
|
@@ -329,6 +332,8 @@ def store_discourse_analysis_result(username, text1, text2, analysis_result):
|
|
| 329 |
'graph1': img_str1,
|
| 330 |
'graph2': img_str2,
|
| 331 |
'combined_graph': img_str_combined,
|
|
|
|
|
|
|
| 332 |
'analysis_type': 'discourse'
|
| 333 |
}
|
| 334 |
|
|
@@ -366,7 +371,6 @@ def get_student_data(username):
|
|
| 366 |
if analysis_collection is None or chat_collection is None:
|
| 367 |
logger.error("La conexión a MongoDB no está inicializada")
|
| 368 |
return None
|
| 369 |
-
|
| 370 |
formatted_data = {
|
| 371 |
"username": username,
|
| 372 |
"entries": [],
|
|
@@ -376,7 +380,6 @@ def get_student_data(username):
|
|
| 376 |
"discourse_analyses": [],
|
| 377 |
"chat_history": []
|
| 378 |
}
|
| 379 |
-
|
| 380 |
try:
|
| 381 |
logger.info(f"Buscando datos de análisis para el usuario: {username}")
|
| 382 |
cursor = analysis_collection.find({"username": username})
|
|
@@ -384,12 +387,12 @@ def get_student_data(username):
|
|
| 384 |
for entry in cursor:
|
| 385 |
formatted_entry = {
|
| 386 |
"timestamp": entry.get("timestamp", datetime.utcnow()),
|
| 387 |
-
"text": entry.get("text", ""),
|
| 388 |
"analysis_type": entry.get("analysis_type", "morphosyntax")
|
| 389 |
}
|
| 390 |
|
| 391 |
if formatted_entry["analysis_type"] == "morphosyntax":
|
| 392 |
formatted_entry.update({
|
|
|
|
| 393 |
"word_count": entry.get("word_count", {}),
|
| 394 |
"arc_diagrams": entry.get("arc_diagrams", [])
|
| 395 |
})
|
|
@@ -397,13 +400,18 @@ def get_student_data(username):
|
|
| 397 |
formatted_data["word_count"][category] = formatted_data["word_count"].get(category, 0) + count
|
| 398 |
|
| 399 |
elif formatted_entry["analysis_type"] == "semantic":
|
| 400 |
-
formatted_entry
|
|
|
|
|
|
|
|
|
|
| 401 |
formatted_data["semantic_analyses"].append(formatted_entry)
|
| 402 |
|
| 403 |
elif formatted_entry["analysis_type"] == "discourse":
|
| 404 |
formatted_entry.update({
|
| 405 |
"text1": entry.get("text1", ""),
|
| 406 |
"text2": entry.get("text2", ""),
|
|
|
|
|
|
|
| 407 |
"graph1": entry.get("graph1", ""),
|
| 408 |
"graph2": entry.get("graph2", ""),
|
| 409 |
"combined_graph": entry.get("combined_graph", "")
|
|
@@ -435,6 +443,5 @@ def get_student_data(username):
|
|
| 435 |
|
| 436 |
except Exception as e:
|
| 437 |
logger.error(f"Error al obtener historial de chat del estudiante {username}: {str(e)}")
|
| 438 |
-
|
| 439 |
logger.info(f"Datos formateados para {username}: {formatted_data}")
|
| 440 |
return formatted_data
|
|
|
|
| 256 |
################################################################################################################
|
| 257 |
def store_semantic_result(username, text, analysis_result):
|
| 258 |
if analysis_collection is None:
|
| 259 |
+
print("La conexión a MongoDB no está inicializada")
|
| 260 |
return False
|
|
|
|
| 261 |
try:
|
| 262 |
+
# Convertir los conceptos clave a una lista de tuplas
|
| 263 |
+
key_concepts = [(concept, float(frequency)) for concept, frequency in analysis_result['key_concepts']]
|
| 264 |
+
|
| 265 |
# Convertir el gráfico a imagen base64
|
| 266 |
+
buf = BytesIO()
|
| 267 |
analysis_result['relations_graph'].savefig(buf, format='png')
|
| 268 |
buf.seek(0)
|
| 269 |
img_str = base64.b64encode(buf.getvalue()).decode('utf-8')
|
| 270 |
+
|
|
|
|
|
|
|
|
|
|
| 271 |
analysis_document = {
|
| 272 |
'username': username,
|
| 273 |
'timestamp': datetime.utcnow(),
|
|
|
|
| 274 |
'key_concepts': key_concepts,
|
| 275 |
+
'graph': img_str,
|
| 276 |
'analysis_type': 'semantic'
|
| 277 |
}
|
| 278 |
+
|
| 279 |
result = analysis_collection.insert_one(analysis_document)
|
| 280 |
+
print(f"Análisis semántico guardado con ID: {result.inserted_id} para el usuario: {username}")
|
|
|
|
| 281 |
return True
|
| 282 |
except Exception as e:
|
| 283 |
+
print(f"Error al guardar el análisis semántico para el usuario {username}: {str(e)}")
|
| 284 |
+
print(f"Tipo de excepción: {type(e).__name__}")
|
| 285 |
+
print(f"Detalles de la excepción: {e.args}")
|
| 286 |
return False
|
| 287 |
|
| 288 |
###############################################################################################################
|
|
|
|
| 319 |
img_str_combined = base64.b64encode(buf_combined.getvalue()).decode('utf-8')
|
| 320 |
plt.close(fig)
|
| 321 |
|
| 322 |
+
# Convertir los conceptos clave a listas de tuplas
|
| 323 |
+
key_concepts1 = [(concept, float(frequency)) for concept, frequency in analysis_result['key_concepts1']]
|
| 324 |
+
key_concepts2 = [(concept, float(frequency)) for concept, frequency in analysis_result['key_concepts2']]
|
| 325 |
+
|
| 326 |
# Crear el documento para guardar
|
| 327 |
analysis_document = {
|
| 328 |
'username': username,
|
|
|
|
| 332 |
'graph1': img_str1,
|
| 333 |
'graph2': img_str2,
|
| 334 |
'combined_graph': img_str_combined,
|
| 335 |
+
'key_concepts1': key_concepts1,
|
| 336 |
+
'key_concepts2': key_concepts2,
|
| 337 |
'analysis_type': 'discourse'
|
| 338 |
}
|
| 339 |
|
|
|
|
| 371 |
if analysis_collection is None or chat_collection is None:
|
| 372 |
logger.error("La conexión a MongoDB no está inicializada")
|
| 373 |
return None
|
|
|
|
| 374 |
formatted_data = {
|
| 375 |
"username": username,
|
| 376 |
"entries": [],
|
|
|
|
| 380 |
"discourse_analyses": [],
|
| 381 |
"chat_history": []
|
| 382 |
}
|
|
|
|
| 383 |
try:
|
| 384 |
logger.info(f"Buscando datos de análisis para el usuario: {username}")
|
| 385 |
cursor = analysis_collection.find({"username": username})
|
|
|
|
| 387 |
for entry in cursor:
|
| 388 |
formatted_entry = {
|
| 389 |
"timestamp": entry.get("timestamp", datetime.utcnow()),
|
|
|
|
| 390 |
"analysis_type": entry.get("analysis_type", "morphosyntax")
|
| 391 |
}
|
| 392 |
|
| 393 |
if formatted_entry["analysis_type"] == "morphosyntax":
|
| 394 |
formatted_entry.update({
|
| 395 |
+
"text": entry.get("text", ""),
|
| 396 |
"word_count": entry.get("word_count", {}),
|
| 397 |
"arc_diagrams": entry.get("arc_diagrams", [])
|
| 398 |
})
|
|
|
|
| 400 |
formatted_data["word_count"][category] = formatted_data["word_count"].get(category, 0) + count
|
| 401 |
|
| 402 |
elif formatted_entry["analysis_type"] == "semantic":
|
| 403 |
+
formatted_entry.update({
|
| 404 |
+
"key_concepts": entry.get("key_concepts", []),
|
| 405 |
+
"graph": entry.get("graph", "")
|
| 406 |
+
})
|
| 407 |
formatted_data["semantic_analyses"].append(formatted_entry)
|
| 408 |
|
| 409 |
elif formatted_entry["analysis_type"] == "discourse":
|
| 410 |
formatted_entry.update({
|
| 411 |
"text1": entry.get("text1", ""),
|
| 412 |
"text2": entry.get("text2", ""),
|
| 413 |
+
"key_concepts1": entry.get("key_concepts1", []),
|
| 414 |
+
"key_concepts2": entry.get("key_concepts2", []),
|
| 415 |
"graph1": entry.get("graph1", ""),
|
| 416 |
"graph2": entry.get("graph2", ""),
|
| 417 |
"combined_graph": entry.get("combined_graph", "")
|
|
|
|
| 443 |
|
| 444 |
except Exception as e:
|
| 445 |
logger.error(f"Error al obtener historial de chat del estudiante {username}: {str(e)}")
|
|
|
|
| 446 |
logger.info(f"Datos formateados para {username}: {formatted_data}")
|
| 447 |
return formatted_data
|