Files changed (4) hide show
  1. .python-version +0 -1
  2. Dockerfile +6 -2
  3. app.py +15 -3
  4. mcp_server.py +11 -2
.python-version DELETED
@@ -1 +0,0 @@
1
- 3.11
 
 
Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM python:3.11
2
 
3
  # Set working directory
4
  WORKDIR /app
@@ -6,6 +6,7 @@ WORKDIR /app
6
  # Install system dependencies
7
  RUN apt-get update && apt-get install -y \
8
  git \
 
9
  && rm -rf /var/lib/apt/lists/*
10
 
11
  # Copy project files
@@ -14,6 +15,7 @@ COPY server.py .
14
  COPY mcp_server.py .
15
  COPY env.example .
16
  COPY README.md .
 
17
 
18
  # Install Python dependencies
19
  RUN pip install --no-cache-dir -e .
@@ -24,10 +26,12 @@ USER appuser
24
 
25
  # Expose port
26
  EXPOSE 8000
 
27
 
28
  # Health check
29
  HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
30
  CMD curl -f http://localhost:8000/ || exit 1
31
 
 
32
  # Run the application
33
- CMD ["python", "app.py"]
 
1
+ FROM python:3.11-slim
2
 
3
  # Set working directory
4
  WORKDIR /app
 
6
  # Install system dependencies
7
  RUN apt-get update && apt-get install -y \
8
  git \
9
+ mc \
10
  && rm -rf /var/lib/apt/lists/*
11
 
12
  # Copy project files
 
15
  COPY mcp_server.py .
16
  COPY env.example .
17
  COPY README.md .
18
+ COPY tag.log .
19
 
20
  # Install Python dependencies
21
  RUN pip install --no-cache-dir -e .
 
26
 
27
  # Expose port
28
  EXPOSE 8000
29
+ EXPOSE 443
30
 
31
  # Health check
32
  HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
33
  CMD curl -f http://localhost:8000/ || exit 1
34
 
35
+ ENTRYPOINT ["python", "app.py"]
36
  # Run the application
37
+ CMD ["-c"]
app.py CHANGED
@@ -11,13 +11,21 @@ import uvicorn
11
  from pydantic import BaseModel
12
  from huggingface_hub.inference._mcp.agent import Agent
13
  from dotenv import load_dotenv
 
 
 
 
 
 
 
 
14
 
15
  load_dotenv()
16
 
17
  # Configuration
18
  WEBHOOK_SECRET = os.getenv("WEBHOOK_SECRET", "your-webhook-secret")
19
  HF_TOKEN = os.getenv("HF_TOKEN")
20
- HF_MODEL = os.getenv("HF_MODEL", "Qwen/Qwen2.5-72B-Instruct")
21
  # Use a valid provider literal from the documentation
22
  DEFAULT_PROVIDER: Literal["hf-inference"] = "hf-inference"
23
  HF_PROVIDER = os.getenv("HF_PROVIDER", DEFAULT_PROVIDER)
@@ -83,13 +91,14 @@ app.add_middleware(CORSMiddleware, allow_origins=["*"])
83
  async def get_agent():
84
  """Get or create Agent instance"""
85
  print("πŸ€– get_agent() called...")
 
86
  global agent_instance
87
  if agent_instance is None and HF_TOKEN:
88
  print("πŸ”§ Creating new Agent instance...")
89
  print(f"πŸ”‘ HF_TOKEN present: {bool(HF_TOKEN)}")
90
  print(f"πŸ€– Model: {HF_MODEL}")
91
  print(f"πŸ”— Provider: {DEFAULT_PROVIDER}")
92
-
93
  try:
94
  agent_instance = Agent(
95
  model=HF_MODEL,
@@ -108,7 +117,9 @@ async def get_agent():
108
  ],
109
  )
110
  print("βœ… Agent instance created successfully")
 
111
  print("πŸ”§ Loading tools...")
 
112
  await agent_instance.load_tools()
113
  print("βœ… Tools loaded successfully")
114
  except Exception as e:
@@ -518,7 +529,8 @@ app = gr.mount_gradio_app(app, gradio_app, path="/gradio")
518
 
519
 
520
  if __name__ == "__main__":
 
521
  print("πŸš€ Starting HF Tagging Bot...")
522
  print("πŸ“Š Dashboard: http://localhost:7860/gradio")
523
  print("πŸ”— Webhook: http://localhost:7860/webhook")
524
- uvicorn.run("app:app", host="0.0.0.0", port=7860, reload=True)
 
11
  from pydantic import BaseModel
12
  from huggingface_hub.inference._mcp.agent import Agent
13
  from dotenv import load_dotenv
14
+ #add logging
15
+ import logging
16
+ logger = logging.getLogger(__name__)
17
+ logging.basicConfig(filename='/home/user/tag.log', encoding='utf-8', level=logging.DEBUG)
18
+ logger.debug('This message should go to the log file')
19
+ logger.info('So should this')
20
+ logger.warning('And this, too')
21
+ logger.error('And non-ASCII stuff, too, like Øresund and Malmâ')
22
 
23
  load_dotenv()
24
 
25
  # Configuration
26
  WEBHOOK_SECRET = os.getenv("WEBHOOK_SECRET", "your-webhook-secret")
27
  HF_TOKEN = os.getenv("HF_TOKEN")
28
+ HF_MODEL = os.getenv("HF_MODEL", "microsoft/DialoGPT-medium")
29
  # Use a valid provider literal from the documentation
30
  DEFAULT_PROVIDER: Literal["hf-inference"] = "hf-inference"
31
  HF_PROVIDER = os.getenv("HF_PROVIDER", DEFAULT_PROVIDER)
 
91
  async def get_agent():
92
  """Get or create Agent instance"""
93
  print("πŸ€– get_agent() called...")
94
+ logger.info('get_agent() called...')
95
  global agent_instance
96
  if agent_instance is None and HF_TOKEN:
97
  print("πŸ”§ Creating new Agent instance...")
98
  print(f"πŸ”‘ HF_TOKEN present: {bool(HF_TOKEN)}")
99
  print(f"πŸ€– Model: {HF_MODEL}")
100
  print(f"πŸ”— Provider: {DEFAULT_PROVIDER}")
101
+ logger.info('Creating new Agent instance...')
102
  try:
103
  agent_instance = Agent(
104
  model=HF_MODEL,
 
117
  ],
118
  )
119
  print("βœ… Agent instance created successfully")
120
+ logger.info('Agent instance created successfully')
121
  print("πŸ”§ Loading tools...")
122
+ logger.info('Loading tools...')
123
  await agent_instance.load_tools()
124
  print("βœ… Tools loaded successfully")
125
  except Exception as e:
 
529
 
530
 
531
  if __name__ == "__main__":
532
+ logger.info('πŸš€ Starting HF Tagging Bot...')
533
  print("πŸš€ Starting HF Tagging Bot...")
534
  print("πŸ“Š Dashboard: http://localhost:7860/gradio")
535
  print("πŸ”— Webhook: http://localhost:7860/webhook")
536
+ uvicorn.run("app:app", host="0.0.0.0", port=7860, reload=False)
mcp_server.py CHANGED
@@ -10,6 +10,13 @@ from huggingface_hub import HfApi, model_info, ModelCard, ModelCardData
10
  from huggingface_hub.utils import HfHubHTTPError
11
  from dotenv import load_dotenv
12
 
 
 
 
 
 
 
 
13
  load_dotenv()
14
 
15
  # Configuration
@@ -26,7 +33,7 @@ mcp = FastMCP("hf-tagging-bot")
26
  def get_current_tags(repo_id: str) -> str:
27
  """Get current tags from a HuggingFace model repository"""
28
  print(f"πŸ”§ get_current_tags called with repo_id: {repo_id}")
29
-
30
  if not hf_api:
31
  error_result = {"error": "HF token not configured"}
32
  json_str = json.dumps(error_result)
@@ -35,6 +42,7 @@ def get_current_tags(repo_id: str) -> str:
35
 
36
  try:
37
  print(f"πŸ“‘ Fetching model info for: {repo_id}")
 
38
  info = model_info(repo_id=repo_id, token=HF_TOKEN)
39
  current_tags = info.tags if info.tags else []
40
  print(f"🏷️ Found {len(current_tags)} tags: {current_tags}")
@@ -61,7 +69,7 @@ def get_current_tags(repo_id: str) -> str:
61
  def add_new_tag(repo_id: str, new_tag: str) -> str:
62
  """Add a new tag to a HuggingFace model repository via PR"""
63
  print(f"πŸ”§ add_new_tag called with repo_id: {repo_id}, new_tag: {new_tag}")
64
-
65
  if not hf_api:
66
  error_result = {"error": "HF token not configured"}
67
  json_str = json.dumps(error_result)
@@ -181,4 +189,5 @@ This PR adds the `{new_tag}` tag to the model repository.
181
 
182
 
183
  if __name__ == "__main__":
 
184
  mcp.run()
 
10
  from huggingface_hub.utils import HfHubHTTPError
11
  from dotenv import load_dotenv
12
 
13
+ #adding logger"πŸš€ Starting HF Tagging Bot...
14
+ import logging
15
+ loggerS = logging.getLogger(__name__)
16
+ logging.basicConfig(filename='/home/user/Stag.log', encoding='utf-8', level=logging.DEBUG)
17
+ #for testing logger
18
+ loggerS.debug('MCP-server, This message should go to the log file')
19
+
20
  load_dotenv()
21
 
22
  # Configuration
 
33
  def get_current_tags(repo_id: str) -> str:
34
  """Get current tags from a HuggingFace model repository"""
35
  print(f"πŸ”§ get_current_tags called with repo_id: {repo_id}")
36
+ loggerS.info('πŸ”§ MCP Server-get_current_tags called with repo_id %s', {repo_id})
37
  if not hf_api:
38
  error_result = {"error": "HF token not configured"}
39
  json_str = json.dumps(error_result)
 
42
 
43
  try:
44
  print(f"πŸ“‘ Fetching model info for: {repo_id}")
45
+ loggerS.info('πŸ“‘ MCP Server-Fetching model info for: %', {repo_id})
46
  info = model_info(repo_id=repo_id, token=HF_TOKEN)
47
  current_tags = info.tags if info.tags else []
48
  print(f"🏷️ Found {len(current_tags)} tags: {current_tags}")
 
69
  def add_new_tag(repo_id: str, new_tag: str) -> str:
70
  """Add a new tag to a HuggingFace model repository via PR"""
71
  print(f"πŸ”§ add_new_tag called with repo_id: {repo_id}, new_tag: {new_tag}")
72
+ loggerS.info('Server-Add a new tag')
73
  if not hf_api:
74
  error_result = {"error": "HF token not configured"}
75
  json_str = json.dumps(error_result)
 
189
 
190
 
191
  if __name__ == "__main__":
192
+ loggerS.info('main mcp-server')
193
  mcp.run()