cuongnguyen910's picture
Upload folder using huggingface_hub
5120311 verified
raw
history blame
2.93 kB
from entity import Docs, Cluster, Preprocess, SummaryInput
from fastapi import FastAPI
import time
import hashlib
import json
from fastapi.middleware.cors import CORSMiddleware
from iclibs.ic_rabbit import ICRabbitMQ
from get_config import config_params
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def get_hash_id(item: Docs):
str_hash = ""
for it in item.response["docs"]:
str_hash += it["url"]
str_hash += str(item.top_cluster)
str_hash += str(item.top_sentence)
str_hash += str(item.topn_summary)
str_hash += str(item.top_doc)
str_hash += str(item.threshold)
if item.sorted_field.strip():
str_hash += str(item.sorted_field)
return hashlib.sha224(str_hash.encode("utf-8")).hexdigest()
try:
with open("log_mnews/log.txt") as f:
data_dict = json.load(f)
except Exception as ve:
print(ve)
data_dict = {}
def init_rabbit_queue(usr, passw, host, vir_host, queue_name, durable, max_priority, exchange=""):
connection = ICRabbitMQ(host, vir_host, usr, passw)
connection.init_connection()
channel = connection.init_queue(
queue_name, exchange=exchange, durable=durable, max_priority=max_priority)
return channel, connection, queue_name
@app.post("/mnews/topic_clustering")
async def topic_clustering_v2(item: Docs):
print("command id: ", item.command_id)
docs = item.response["docs"]
meta = item.response.get('meta', {})
# threshold = item.threshold
print("start ")
print("len doc: ", len(docs))
st_time = time.time()
top_cluster = item.top_cluster
top_sentence = item.top_sentence
topn_summary = item.topn_summary
hash_str = get_hash_id(item)
# threshold = 0.1
# item.threshold = threshold
# with open("log/input_{0}.txt".format(st_time), "w+") as f:
# f.write(json.dumps({"docs": docs, "key": item.keyword}))
data_push = {
"threshold": item.threshold,
"top_cluster": top_cluster,
"top_sentence": top_sentence,
"topn_summary": topn_summary,
"hash_str": hash_str,
"st_time": st_time,
"command_id": item.command_id,
"docs": docs,
"meta": meta
}
params = config_params['queue_topic_clustering_mnews']
usr_name = params["usr_name"]
password = str(params["password"])
host = params["host"]
virtual_host = params["virtual_host"]
queue_name = params["queue_name"]
channel_consumer, rb_consumer, queue_consumer = init_rabbit_queue(usr_name, password, host, virtual_host, queue_name, True, 10)
ICRabbitMQ.publish_message(channel_consumer, queue_consumer, data_push, priority= 1,delivery_mode=2, exchange='')
return {"message":"success"}