Spaces:
Sleeping
Sleeping
Alexandre Gazola
commited on
Commit
·
30003b4
1
Parent(s):
0426c84
pergunta do xadrez
Browse files- analyse_chess_position_tool.py +37 -0
- constants.py +1 -0
- convert_to_fen_tool.py +0 -40
- image_to_text_tool.py +6 -4
- langchain_agent.py +2 -1
analyse_chess_position_tool.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def get_best_move(fen: str) -> str:
|
4 |
+
"""
|
5 |
+
Given the description of a chessboard using FEN notation, returns the next best move.
|
6 |
+
|
7 |
+
Args:
|
8 |
+
fen (str): The description of the chessboard position in FEN notation.
|
9 |
+
|
10 |
+
Returns:
|
11 |
+
str: The description of the next best move.
|
12 |
+
"""
|
13 |
+
url = "https://lichess.org/api/cloud-eval"
|
14 |
+
params = {
|
15 |
+
"fen": fen,
|
16 |
+
"multiPv": 1, # only top move
|
17 |
+
"syzygy": 5 # enable tablebase for 5-piece positions
|
18 |
+
}
|
19 |
+
|
20 |
+
response = requests.get(url, params=params)
|
21 |
+
|
22 |
+
if response.status_code != 200:
|
23 |
+
return f"API error: {response.status_code} - {response.text}"
|
24 |
+
|
25 |
+
data = response.json()
|
26 |
+
if not data.get("pvs"):
|
27 |
+
return "No best move found."
|
28 |
+
|
29 |
+
best_line = data["pvs"][0]
|
30 |
+
score = best_line["cp"] if "cp" in best_line else f"Mate in {best_line.get('mate')}"
|
31 |
+
move = best_line["moves"].split()[0]
|
32 |
+
|
33 |
+
# Optional: threshold to define a "guaranteed win"
|
34 |
+
if "mate" in best_line or best_line.get("cp", 0) > 500:
|
35 |
+
return move
|
36 |
+
|
37 |
+
return f"No clearly winning move. Best suggestion: {move} (score: {score})"
|
constants.py
CHANGED
@@ -21,6 +21,7 @@ PROMPT_LIMITADOR_LLM = """
|
|
21 |
Do not attempt to:
|
22 |
- Access external websites or databases without the tools that I provide.
|
23 |
- Use any internal knowledge base beyond what I provide.
|
|
|
24 |
- Categorize food items based on your own knowledge. Use the provided tools. Run the provided tools on every item of the grocery list!
|
25 |
- Make assumptions or inferences based on information not explicitly given.
|
26 |
- Utilize any built-in tools or functions that I have not specifically presented.
|
|
|
21 |
Do not attempt to:
|
22 |
- Access external websites or databases without the tools that I provide.
|
23 |
- Use any internal knowledge base beyond what I provide.
|
24 |
+
- You do not know how to interpret an image on your own. If you need to analyse an image, use the tool that converts the image into text.
|
25 |
- Categorize food items based on your own knowledge. Use the provided tools. Run the provided tools on every item of the grocery list!
|
26 |
- Make assumptions or inferences based on information not explicitly given.
|
27 |
- Utilize any built-in tools or functions that I have not specifically presented.
|
convert_to_fen_tool.py
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
import cv2
|
2 |
-
import numpy as np
|
3 |
-
import constants
|
4 |
-
from langchain_core.prompts import ChatPromptTemplate
|
5 |
-
from langchain_google_genai import ChatGoogleGenerativeAI
|
6 |
-
from langgraph.graph import StateGraph, END
|
7 |
-
from typing import TypedDict, List, Dict
|
8 |
-
import google.generativeai as genai
|
9 |
-
|
10 |
-
def image_to_fen(image_bytes):
|
11 |
-
|
12 |
-
genai.configure(api_key=constants.API_KEY)
|
13 |
-
|
14 |
-
model = genai.GenerativeModel(constants.MODEL)
|
15 |
-
|
16 |
-
response = model.generate_content([
|
17 |
-
{"mime_type": "image/jpeg", "data": image_bytes},
|
18 |
-
"Describe the chessboard in this image and provide the FEN notation."
|
19 |
-
])
|
20 |
-
|
21 |
-
print(response.text)
|
22 |
-
|
23 |
-
return ''
|
24 |
-
|
25 |
-
if __name__ == '__main__':
|
26 |
-
# Example usage:
|
27 |
-
# 1. Load an image from a file (replace with your image path)
|
28 |
-
image_path = r"C:\Users\agazo\Downloads\cca530fc-4052-43b2-b130-b30968d8aa44_file.png" # Replace with a valid image path
|
29 |
-
try:
|
30 |
-
with open(image_path, "rb") as image_file:
|
31 |
-
image_bytes = image_file.read()
|
32 |
-
except FileNotFoundError:
|
33 |
-
print(f"Error: File not found at {image_path}. Please make sure the path is correct and the file exists.")
|
34 |
-
exit()
|
35 |
-
|
36 |
-
# 2. Call the function
|
37 |
-
fen = image_to_fen(image_bytes)
|
38 |
-
print(f"FEN: {fen}")
|
39 |
-
|
40 |
-
#resultado esperado do FEN; 1K6/1PP5/P2RBBqP/4n3/Q7/p2b4/1pp3pp/1k2r3 w - - 0 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
image_to_text_tool.py
CHANGED
@@ -3,11 +3,12 @@ import google.generativeai as genai
|
|
3 |
from langchain_core.tools import tool
|
4 |
from PIL import Image
|
5 |
import io
|
|
|
6 |
|
7 |
@tool
|
8 |
-
def image_to_text(
|
9 |
"""
|
10 |
-
Generates a text describing an image.
|
11 |
|
12 |
Args:
|
13 |
image_bytes (bytes): the bytes of the image to de described.
|
@@ -16,6 +17,8 @@ def image_to_text(image_bytes, instructions):
|
|
16 |
Returns:
|
17 |
str: A string describing the image according to the instructions given.
|
18 |
"""
|
|
|
|
|
19 |
|
20 |
genai.configure(api_key=constants.API_KEY)
|
21 |
model = genai.GenerativeModel(constants.MODEL)
|
@@ -51,5 +54,4 @@ if __name__ == '__main__':
|
|
51 |
)
|
52 |
print(f"FEN: {text}")
|
53 |
|
54 |
-
#resultado esperado do FEN;
|
55 |
-
# 3r2k1/pp4pp/4b2p/Q7/3n4/PqBBR2P/PP4P1/K7 w - - 0 1
|
|
|
3 |
from langchain_core.tools import tool
|
4 |
from PIL import Image
|
5 |
import io
|
6 |
+
import base64
|
7 |
|
8 |
@tool
|
9 |
+
def image_to_text(image_base64_str, instructions):
|
10 |
"""
|
11 |
+
Generates a text describing an image provided as a base64 string.
|
12 |
|
13 |
Args:
|
14 |
image_bytes (bytes): the bytes of the image to de described.
|
|
|
17 |
Returns:
|
18 |
str: A string describing the image according to the instructions given.
|
19 |
"""
|
20 |
+
|
21 |
+
image_bytes = base64.b64decode(image_base64_str)
|
22 |
|
23 |
genai.configure(api_key=constants.API_KEY)
|
24 |
model = genai.GenerativeModel(constants.MODEL)
|
|
|
54 |
)
|
55 |
print(f"FEN: {text}")
|
56 |
|
57 |
+
#resultado esperado do FEN; `3r2k1/pp3pp1/4b2p/7Q/3n4/PqBBR2P/5PP1/6K1 w - - 0 1`
|
|
langchain_agent.py
CHANGED
@@ -27,7 +27,8 @@ class LangChainAgent:
|
|
27 |
image_to_text,
|
28 |
internet_search,
|
29 |
get_botanical_classification,
|
30 |
-
parse_excel
|
|
|
31 |
]
|
32 |
|
33 |
prompt = ChatPromptTemplate.from_messages([
|
|
|
27 |
image_to_text,
|
28 |
internet_search,
|
29 |
get_botanical_classification,
|
30 |
+
parse_excel,
|
31 |
+
get_best_move
|
32 |
]
|
33 |
|
34 |
prompt = ChatPromptTemplate.from_messages([
|