Spaces:
Sleeping
Sleeping
Add scrap patent
Browse files
app.py
CHANGED
@@ -4,11 +4,17 @@ from mcp.server.fastmcp import FastMCP
|
|
4 |
|
5 |
server = FastMCP(name="streamable-http-mcp-server-test", json_response=False, stateless_http=False)
|
6 |
|
7 |
-
async def make_request(url: str, data: Dict[str, Any]):
|
8 |
headers = {"Accept": "application/json"}
|
9 |
async with httpx.AsyncClient(verify=False) as client:
|
10 |
try:
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
response.raise_for_status()
|
13 |
return response.json()
|
14 |
except:
|
@@ -46,7 +52,7 @@ async def get_document_url(doc_id: str) -> str:
|
|
46 |
Returns the URL (also scope + version if doc is a specification [not all specifications have a version or scope])
|
47 |
Arguments: doc_id -> string
|
48 |
"""
|
49 |
-
response = await make_request('https://organizedprogrammers-docfinder.hf.space/find/single', {"doc_id": doc_id})
|
50 |
if not response:
|
51 |
return "Unable to find document/specification"
|
52 |
version = response.get('version', 'unavailable')
|
@@ -64,7 +70,7 @@ async def search_specifications_with_keywords(keywords: str, threshold: int = 60
|
|
64 |
- source -> string (either '3GPP', 'ETSI' or 'all', by default, set to 'all')
|
65 |
- spec_type -> string (either 'TS' or 'TR' or None, by default, set to None)
|
66 |
"""
|
67 |
-
response = await make_request('https://organizedprogrammers-docfinder.hf.space/search/bm25', {"keywords": keywords, "threshold": threshold, "source": source, "spec_type": spec_type})
|
68 |
if not response:
|
69 |
return "Unable to search specifications | No specifications has been found"
|
70 |
results = response["results"]
|
@@ -78,7 +84,7 @@ async def get_spec_text(spec_id: str) -> str:
|
|
78 |
Returns a dictionary k:v where k is the section (1., 2.2.1, ...) and v, the content of k, or a string if failed
|
79 |
Args: spec_id -> string
|
80 |
"""
|
81 |
-
response = await make_request('https://organizedprogrammers-specsplitter.hf.space/extract_text/structured', {"spec_id": spec_id})
|
82 |
if not response:
|
83 |
return "Unable to extract specification text"
|
84 |
return "\n".join([f"{k}: {v}" for k, v in response.keys()])
|
@@ -93,8 +99,27 @@ async def search_google_patents(queries: List[str], n_results: int) -> str:
|
|
93 |
Returns a list of patents from queries, for each query, {n_results} patents will be retrieved
|
94 |
Args: queries -> list of string, n_results -> integer [by default: 10]
|
95 |
"""
|
96 |
-
response = await make_request("https://organizedprogrammers-serpent.hf.space/serp/search_patents", {"queries": queries, "n_results": n_results})
|
97 |
if not response:
|
98 |
return "Unable to fetch patents"
|
99 |
return "\n".join(f"[Patent ID: {patent['id']} | Title: {patent['title']} | Body: {patent['body']}]" for patent in response.results)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
app = server.streamable_http_app
|
|
|
4 |
|
5 |
server = FastMCP(name="streamable-http-mcp-server-test", json_response=False, stateless_http=False)
|
6 |
|
7 |
+
async def make_request(url: str, method: Literal["GET", "POST"], data: Dict[str, Any] = {}):
|
8 |
headers = {"Accept": "application/json"}
|
9 |
async with httpx.AsyncClient(verify=False) as client:
|
10 |
try:
|
11 |
+
if method == "GET":
|
12 |
+
response = await client.get(url, headers=headers)
|
13 |
+
elif method == "POST":
|
14 |
+
response = await client.post(url, headers=headers, json=data)
|
15 |
+
else:
|
16 |
+
print("Method not allowed !")
|
17 |
+
return None
|
18 |
response.raise_for_status()
|
19 |
return response.json()
|
20 |
except:
|
|
|
52 |
Returns the URL (also scope + version if doc is a specification [not all specifications have a version or scope])
|
53 |
Arguments: doc_id -> string
|
54 |
"""
|
55 |
+
response = await make_request('https://organizedprogrammers-docfinder.hf.space/find/single', "POST", {"doc_id": doc_id})
|
56 |
if not response:
|
57 |
return "Unable to find document/specification"
|
58 |
version = response.get('version', 'unavailable')
|
|
|
70 |
- source -> string (either '3GPP', 'ETSI' or 'all', by default, set to 'all')
|
71 |
- spec_type -> string (either 'TS' or 'TR' or None, by default, set to None)
|
72 |
"""
|
73 |
+
response = await make_request('https://organizedprogrammers-docfinder.hf.space/search/bm25', "POST", {"keywords": keywords, "threshold": threshold, "source": source, "spec_type": spec_type})
|
74 |
if not response:
|
75 |
return "Unable to search specifications | No specifications has been found"
|
76 |
results = response["results"]
|
|
|
84 |
Returns a dictionary k:v where k is the section (1., 2.2.1, ...) and v, the content of k, or a string if failed
|
85 |
Args: spec_id -> string
|
86 |
"""
|
87 |
+
response = await make_request('https://organizedprogrammers-specsplitter.hf.space/extract_text/structured', "POST", {"spec_id": spec_id})
|
88 |
if not response:
|
89 |
return "Unable to extract specification text"
|
90 |
return "\n".join([f"{k}: {v}" for k, v in response.keys()])
|
|
|
99 |
Returns a list of patents from queries, for each query, {n_results} patents will be retrieved
|
100 |
Args: queries -> list of string, n_results -> integer [by default: 10]
|
101 |
"""
|
102 |
+
response = await make_request("https://organizedprogrammers-serpent.hf.space/serp/search_patents", "POST", {"queries": queries, "n_results": n_results})
|
103 |
if not response:
|
104 |
return "Unable to fetch patents"
|
105 |
return "\n".join(f"[Patent ID: {patent['id']} | Title: {patent['title']} | Body: {patent['body']}]" for patent in response.results)
|
106 |
+
|
107 |
+
@server.tool()
|
108 |
+
async def scrap_google_patents(patent_ids: List[str]) -> str:
|
109 |
+
"""
|
110 |
+
Scrap patents from one or many patents from Google Patents
|
111 |
+
Returns a list of patents with their title, abstract, description, claims, field of invention and background
|
112 |
+
Args: patent_ids -> list of strings corresponding to Google Patent ID [min. 1]
|
113 |
+
"""
|
114 |
+
if len(patent_ids) > 1:
|
115 |
+
response = await make_request("https://organizedprogrammers-serpent.hf.space/scrap/scrap_patents_bulk", "POST", {"patent_ids": patent_ids})
|
116 |
+
if not response:
|
117 |
+
return "Unable to scrap patents"
|
118 |
+
return "\n---\n".join([f"Title: {pat['title']}\nAbstract: {pat['abstract']}\nDescription: {pat['description']}\nClaims: {pat['claims']}\nField of invention{pat['field_of_invention']}\nBackground: {pat['background']}" for pat in response['patents']])
|
119 |
+
elif len(patent_ids) == 1:
|
120 |
+
response = await make_request("https://organizedprogrammers-serpent.hf.space/scrap/scrap_patent/"+patent_ids[0], "GET")
|
121 |
+
if not response:
|
122 |
+
return "Unable to scrap patent"
|
123 |
+
return f"Title: {response['title']}\nAbstract: {response['abstract']}\nDescription: {response['description']}\nClaims: {response['claims']}\nField of invention{response['field_of_invention']}\nBackground: {response['background']}"
|
124 |
+
|
125 |
app = server.streamable_http_app
|