Spaces:
Sleeping
Sleeping
Commit
·
2e94196
1
Parent(s):
59c54cf
Refactor Docker setup: remove Ollama integration, update FastAPI service, enhance health checks, and improve README documentation
Browse files- Dockerfile +14 -22
- Dockerfile.fastapi +4 -16
- Makefile +0 -10
- README-docker-compose.md +13 -24
- docker-compose.prod.yml +2 -18
- docker-compose.yml +0 -49
- fastapi_startup_script.sh +2 -20
- start.sh +1 -57
Dockerfile
CHANGED
@@ -1,9 +1,7 @@
|
|
1 |
-
FROM
|
2 |
|
3 |
-
# Install
|
4 |
RUN apt-get update && apt-get install -y \
|
5 |
-
python3 \
|
6 |
-
python3-pip \
|
7 |
curl \
|
8 |
&& apt-get upgrade -y \
|
9 |
&& apt-get clean \
|
@@ -17,35 +15,29 @@ WORKDIR /app
|
|
17 |
|
18 |
# Copy requirements and install Python packages
|
19 |
COPY requirements.txt .
|
20 |
-
RUN pip3 install --no-cache-dir
|
|
|
21 |
|
22 |
# Copy your FastAPI app
|
23 |
COPY fast.py .
|
24 |
COPY app.py .
|
25 |
-
COPY start.sh .
|
26 |
-
RUN chmod +x start.sh
|
27 |
|
28 |
-
# Create
|
29 |
-
RUN mkdir -p /
|
30 |
-
chown -R appuser:appuser /
|
31 |
-
chmod -R 755 /home/appuser/.ollama
|
32 |
-
|
33 |
-
# Change ownership to non-root user
|
34 |
-
RUN chown -R appuser:appuser /app
|
35 |
|
36 |
# Set environment variables
|
37 |
-
ENV OLLAMA_HOST=0.0.0.0:11434
|
38 |
-
ENV OLLAMA_ORIGINS=*
|
39 |
ENV HOME=/home/appuser
|
40 |
-
ENV OLLAMA_HOME=/home/appuser/.ollama
|
41 |
|
42 |
-
# Expose
|
43 |
EXPOSE 7860
|
44 |
-
EXPOSE 11434
|
45 |
|
46 |
# Switch to non-root user
|
47 |
USER appuser
|
48 |
|
49 |
-
#
|
50 |
-
|
51 |
-
CMD
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.11-slim
|
2 |
|
3 |
+
# Install system dependencies with security updates
|
4 |
RUN apt-get update && apt-get install -y \
|
|
|
|
|
5 |
curl \
|
6 |
&& apt-get upgrade -y \
|
7 |
&& apt-get clean \
|
|
|
15 |
|
16 |
# Copy requirements and install Python packages
|
17 |
COPY requirements.txt .
|
18 |
+
RUN pip3 install --no-cache-dir --upgrade pip && \
|
19 |
+
pip3 install --no-cache-dir -r requirements.txt
|
20 |
|
21 |
# Copy your FastAPI app
|
22 |
COPY fast.py .
|
23 |
COPY app.py .
|
|
|
|
|
24 |
|
25 |
+
# Create logs directory and change ownership
|
26 |
+
RUN mkdir -p /app/logs && \
|
27 |
+
chown -R appuser:appuser /app
|
|
|
|
|
|
|
|
|
28 |
|
29 |
# Set environment variables
|
|
|
|
|
30 |
ENV HOME=/home/appuser
|
|
|
31 |
|
32 |
+
# Expose port
|
33 |
EXPOSE 7860
|
|
|
34 |
|
35 |
# Switch to non-root user
|
36 |
USER appuser
|
37 |
|
38 |
+
# Health check
|
39 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
40 |
+
CMD curl -f http://localhost:7860/health || exit 1
|
41 |
+
|
42 |
+
# Start FastAPI application
|
43 |
+
CMD ["python3", "app.py"]
|
Dockerfile.fastapi
CHANGED
@@ -7,18 +7,9 @@ RUN apt-get update && apt-get install -y \
|
|
7 |
&& apt-get clean \
|
8 |
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
9 |
|
10 |
-
# Install Ollama
|
11 |
-
RUN curl -fsSL https://ollama.com/install.sh | sh
|
12 |
-
|
13 |
# Create non-root user for security
|
14 |
RUN groupadd -r appuser && useradd -r -g appuser -m appuser
|
15 |
|
16 |
-
# Create Ollama directories with proper permissions
|
17 |
-
RUN mkdir -p /home/appuser/.ollama && \
|
18 |
-
mkdir -p /home/appuser/.ollama/models && \
|
19 |
-
chown -R appuser:appuser /home/appuser/.ollama && \
|
20 |
-
chmod -R 755 /home/appuser/.ollama
|
21 |
-
|
22 |
# Set working directory
|
23 |
WORKDIR /app
|
24 |
|
@@ -27,17 +18,14 @@ COPY requirements.txt .
|
|
27 |
RUN pip3 install --no-cache-dir --upgrade pip && \
|
28 |
pip3 install --no-cache-dir -r requirements.txt
|
29 |
|
30 |
-
# Copy FastAPI application
|
31 |
COPY fast.py .
|
32 |
-
COPY fastapi_startup_script.sh .
|
33 |
-
RUN chmod +x fastapi_startup_script.sh
|
34 |
|
35 |
# Create logs directory and change ownership
|
36 |
RUN mkdir -p /app/logs && \
|
37 |
chown -R appuser:appuser /app
|
38 |
|
39 |
-
# Set environment variables
|
40 |
-
ENV OLLAMA_HOME=/home/appuser/.ollama
|
41 |
ENV HOME=/home/appuser
|
42 |
|
43 |
# Switch to non-root user
|
@@ -50,5 +38,5 @@ EXPOSE 7860
|
|
50 |
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
51 |
CMD curl -f http://localhost:7860/health || exit 1
|
52 |
|
53 |
-
# Start
|
54 |
-
CMD ["
|
|
|
7 |
&& apt-get clean \
|
8 |
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
9 |
|
|
|
|
|
|
|
10 |
# Create non-root user for security
|
11 |
RUN groupadd -r appuser && useradd -r -g appuser -m appuser
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
# Set working directory
|
14 |
WORKDIR /app
|
15 |
|
|
|
18 |
RUN pip3 install --no-cache-dir --upgrade pip && \
|
19 |
pip3 install --no-cache-dir -r requirements.txt
|
20 |
|
21 |
+
# Copy FastAPI application
|
22 |
COPY fast.py .
|
|
|
|
|
23 |
|
24 |
# Create logs directory and change ownership
|
25 |
RUN mkdir -p /app/logs && \
|
26 |
chown -R appuser:appuser /app
|
27 |
|
28 |
+
# Set environment variables
|
|
|
29 |
ENV HOME=/home/appuser
|
30 |
|
31 |
# Switch to non-root user
|
|
|
38 |
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
39 |
CMD curl -f http://localhost:7860/health || exit 1
|
40 |
|
41 |
+
# Start FastAPI application
|
42 |
+
CMD ["python3", "-m", "uvicorn", "fast:app", "--host", "0.0.0.0", "--port", "7860"]
|
Makefile
CHANGED
@@ -14,7 +14,6 @@ help:
|
|
14 |
@echo " status - Show service status"
|
15 |
@echo " health - Check application health"
|
16 |
@echo " shell-api - Open shell in FastAPI container"
|
17 |
-
@echo " shell-ollama - Open shell in Ollama container"
|
18 |
|
19 |
# Build all services
|
20 |
build:
|
@@ -48,8 +47,6 @@ status:
|
|
48 |
|
49 |
# Check application health
|
50 |
health:
|
51 |
-
@echo "Checking Ollama health..."
|
52 |
-
@curl -s http://localhost:11434/api/tags > /dev/null && echo "✅ Ollama: Healthy" || echo "❌ Ollama: Unhealthy"
|
53 |
@echo "Checking FastAPI health..."
|
54 |
@curl -s http://localhost:7860/health > /dev/null && echo "✅ FastAPI: Healthy" || echo "❌ FastAPI: Unhealthy"
|
55 |
|
@@ -57,16 +54,9 @@ health:
|
|
57 |
shell-api:
|
58 |
docker-compose exec fastapi bash
|
59 |
|
60 |
-
# Open shell in Ollama container
|
61 |
-
shell-ollama:
|
62 |
-
docker-compose exec ollama bash
|
63 |
-
|
64 |
# Development commands
|
65 |
dev-build:
|
66 |
docker-compose build --no-cache
|
67 |
|
68 |
dev-logs-api:
|
69 |
docker-compose logs -f fastapi
|
70 |
-
|
71 |
-
dev-logs-ollama:
|
72 |
-
docker-compose logs -f ollama
|
|
|
14 |
@echo " status - Show service status"
|
15 |
@echo " health - Check application health"
|
16 |
@echo " shell-api - Open shell in FastAPI container"
|
|
|
17 |
|
18 |
# Build all services
|
19 |
build:
|
|
|
47 |
|
48 |
# Check application health
|
49 |
health:
|
|
|
|
|
50 |
@echo "Checking FastAPI health..."
|
51 |
@curl -s http://localhost:7860/health > /dev/null && echo "✅ FastAPI: Healthy" || echo "❌ FastAPI: Unhealthy"
|
52 |
|
|
|
54 |
shell-api:
|
55 |
docker-compose exec fastapi bash
|
56 |
|
|
|
|
|
|
|
|
|
57 |
# Development commands
|
58 |
dev-build:
|
59 |
docker-compose build --no-cache
|
60 |
|
61 |
dev-logs-api:
|
62 |
docker-compose logs -f fastapi
|
|
|
|
|
|
README-docker-compose.md
CHANGED
@@ -1,26 +1,25 @@
|
|
1 |
# Fashion Analyzer - Docker Compose Setup
|
2 |
|
3 |
-
This project provides a secure, containerized fashion analysis application using
|
4 |
|
5 |
## 🏗️ Architecture
|
6 |
|
7 |
-
The application consists of
|
8 |
|
9 |
-
1. **
|
10 |
-
2. **FastAPI Service**: Provides the web API and user interface
|
11 |
-
3. **Model Loader**: One-time service to download the required LLaVA model
|
12 |
|
13 |
## 🔒 Security Features
|
14 |
|
15 |
-
- ✅ **Pinned
|
16 |
- ✅ **Non-root user execution** - Enhanced container security
|
17 |
- ✅ **Security updates** - Latest package updates applied
|
18 |
- ✅ **Health checks** - Service monitoring and restart policies
|
19 |
-
- ✅ **
|
20 |
|
21 |
## 🚀 Quick Start
|
22 |
|
23 |
### Prerequisites
|
|
|
24 |
- Docker Engine 20.10+
|
25 |
- Docker Compose 2.0+
|
26 |
|
@@ -42,7 +41,6 @@ docker-compose ps
|
|
42 |
- **Web Interface**: http://localhost:7860
|
43 |
- **API Documentation**: http://localhost:7860/docs
|
44 |
- **Health Check**: http://localhost:7860/health
|
45 |
-
- **Ollama API**: http://localhost:11434
|
46 |
|
47 |
### 3. Stop the Application
|
48 |
|
@@ -50,7 +48,7 @@ docker-compose ps
|
|
50 |
# Stop all services
|
51 |
docker-compose down
|
52 |
|
53 |
-
# Stop and remove volumes (removes
|
54 |
docker-compose down -v
|
55 |
```
|
56 |
|
@@ -72,11 +70,9 @@ AI/
|
|
72 |
### Environment Variables (.env)
|
73 |
|
74 |
```env
|
75 |
-
OLLAMA_HOST=0.0.0.0:11434
|
76 |
-
OLLAMA_ORIGINS=*
|
77 |
-
OLLAMA_BASE_URL=http://ollama:11434
|
78 |
-
OLLAMA_PORT=11434
|
79 |
FASTAPI_PORT=7860
|
|
|
|
|
80 |
```
|
81 |
|
82 |
### Custom Configuration
|
@@ -106,7 +102,6 @@ docker-compose logs -f
|
|
106 |
|
107 |
# Specific service
|
108 |
docker-compose logs -f fastapi
|
109 |
-
docker-compose logs -f ollama
|
110 |
```
|
111 |
|
112 |
### Debugging
|
@@ -114,7 +109,6 @@ docker-compose logs -f ollama
|
|
114 |
```bash
|
115 |
# Execute commands in running containers
|
116 |
docker-compose exec fastapi bash
|
117 |
-
docker-compose exec ollama bash
|
118 |
|
119 |
# Check service health
|
120 |
docker-compose exec fastapi curl http://localhost:7860/health
|
@@ -124,22 +118,17 @@ docker-compose exec fastapi curl http://localhost:7860/health
|
|
124 |
|
125 |
### Health Checks
|
126 |
|
127 |
-
|
128 |
-
- **Ollama**: Checks API availability
|
129 |
-
- **FastAPI**: Checks application health and Ollama connectivity
|
130 |
-
|
131 |
-
### Service Dependencies
|
132 |
|
133 |
-
- FastAPI
|
134 |
-
- Model loader runs after Ollama is ready
|
135 |
|
136 |
## 🛠️ Troubleshooting
|
137 |
|
138 |
### Common Issues
|
139 |
|
140 |
1. **Port conflicts**: Change ports in `.env` file
|
141 |
-
2. **Model
|
142 |
-
3. **
|
143 |
|
144 |
### Reset Everything
|
145 |
|
|
|
1 |
# Fashion Analyzer - Docker Compose Setup
|
2 |
|
3 |
+
This project provides a secure, containerized fashion analysis application using Hugging Face Transformers and FastAPI with Docker Compose.
|
4 |
|
5 |
## 🏗️ Architecture
|
6 |
|
7 |
+
The application consists of a single service:
|
8 |
|
9 |
+
1. **FastAPI Service**: Provides the web API and user interface with integrated Hugging Face transformers models for fashion analysis
|
|
|
|
|
10 |
|
11 |
## 🔒 Security Features
|
12 |
|
13 |
+
- ✅ **Pinned Python version** (3.11-slim) - Secure base image
|
14 |
- ✅ **Non-root user execution** - Enhanced container security
|
15 |
- ✅ **Security updates** - Latest package updates applied
|
16 |
- ✅ **Health checks** - Service monitoring and restart policies
|
17 |
+
- ✅ **Minimal dependencies** - Reduced attack surface
|
18 |
|
19 |
## 🚀 Quick Start
|
20 |
|
21 |
### Prerequisites
|
22 |
+
|
23 |
- Docker Engine 20.10+
|
24 |
- Docker Compose 2.0+
|
25 |
|
|
|
41 |
- **Web Interface**: http://localhost:7860
|
42 |
- **API Documentation**: http://localhost:7860/docs
|
43 |
- **Health Check**: http://localhost:7860/health
|
|
|
44 |
|
45 |
### 3. Stop the Application
|
46 |
|
|
|
48 |
# Stop all services
|
49 |
docker-compose down
|
50 |
|
51 |
+
# Stop and remove volumes (removes logs)
|
52 |
docker-compose down -v
|
53 |
```
|
54 |
|
|
|
70 |
### Environment Variables (.env)
|
71 |
|
72 |
```env
|
|
|
|
|
|
|
|
|
73 |
FASTAPI_PORT=7860
|
74 |
+
ENVIRONMENT=development
|
75 |
+
LOG_LEVEL=info
|
76 |
```
|
77 |
|
78 |
### Custom Configuration
|
|
|
102 |
|
103 |
# Specific service
|
104 |
docker-compose logs -f fastapi
|
|
|
105 |
```
|
106 |
|
107 |
### Debugging
|
|
|
109 |
```bash
|
110 |
# Execute commands in running containers
|
111 |
docker-compose exec fastapi bash
|
|
|
112 |
|
113 |
# Check service health
|
114 |
docker-compose exec fastapi curl http://localhost:7860/health
|
|
|
118 |
|
119 |
### Health Checks
|
120 |
|
121 |
+
The FastAPI service includes health checks:
|
|
|
|
|
|
|
|
|
122 |
|
123 |
+
- **FastAPI**: Checks application health and model availability
|
|
|
124 |
|
125 |
## 🛠️ Troubleshooting
|
126 |
|
127 |
### Common Issues
|
128 |
|
129 |
1. **Port conflicts**: Change ports in `.env` file
|
130 |
+
2. **Model loading fails**: Check internet connection and available memory
|
131 |
+
3. **Application startup slow**: Transformers models need time to download and load
|
132 |
|
133 |
### Reset Everything
|
134 |
|
docker-compose.prod.yml
CHANGED
@@ -1,31 +1,15 @@
|
|
1 |
-
version: '3.8'
|
2 |
-
|
3 |
# Production overrides for docker-compose.yml
|
4 |
# Usage: docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d
|
5 |
|
6 |
services:
|
7 |
-
ollama:
|
8 |
-
restart: always
|
9 |
-
deploy:
|
10 |
-
resources:
|
11 |
-
limits:
|
12 |
-
memory: 8G
|
13 |
-
reservations:
|
14 |
-
memory: 4G
|
15 |
-
logging:
|
16 |
-
driver: "json-file"
|
17 |
-
options:
|
18 |
-
max-size: "10m"
|
19 |
-
max-file: "3"
|
20 |
-
|
21 |
fastapi:
|
22 |
restart: always
|
23 |
deploy:
|
24 |
resources:
|
25 |
limits:
|
26 |
-
memory:
|
27 |
reservations:
|
28 |
-
memory:
|
29 |
logging:
|
30 |
driver: "json-file"
|
31 |
options:
|
|
|
|
|
|
|
1 |
# Production overrides for docker-compose.yml
|
2 |
# Usage: docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d
|
3 |
|
4 |
services:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
fastapi:
|
6 |
restart: always
|
7 |
deploy:
|
8 |
resources:
|
9 |
limits:
|
10 |
+
memory: 4G # Increased for transformers models
|
11 |
reservations:
|
12 |
+
memory: 2G
|
13 |
logging:
|
14 |
driver: "json-file"
|
15 |
options:
|
docker-compose.yml
CHANGED
@@ -1,26 +1,4 @@
|
|
1 |
-
version: "3.8"
|
2 |
-
|
3 |
services:
|
4 |
-
ollama:
|
5 |
-
image: ollama/ollama:0.9.2
|
6 |
-
container_name: ollama-server
|
7 |
-
restart: unless-stopped
|
8 |
-
ports:
|
9 |
-
- "11434:11434"
|
10 |
-
volumes:
|
11 |
-
- ollama_data:/root/.ollama
|
12 |
-
environment:
|
13 |
-
- OLLAMA_HOST=0.0.0.0:11434
|
14 |
-
- OLLAMA_ORIGINS=*
|
15 |
-
healthcheck:
|
16 |
-
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
|
17 |
-
interval: 30s
|
18 |
-
timeout: 10s
|
19 |
-
retries: 3
|
20 |
-
start_period: 30s
|
21 |
-
networks:
|
22 |
-
- fashion-analyzer
|
23 |
-
|
24 |
fastapi:
|
25 |
build:
|
26 |
context: .
|
@@ -29,38 +7,11 @@ services:
|
|
29 |
restart: unless-stopped
|
30 |
ports:
|
31 |
- "7860:7860"
|
32 |
-
environment:
|
33 |
-
- OLLAMA_BASE_URL=http://ollama:11434
|
34 |
-
depends_on:
|
35 |
-
- ollama
|
36 |
networks:
|
37 |
- fashion-analyzer
|
38 |
volumes:
|
39 |
- ./logs:/app/logs
|
40 |
|
41 |
-
model-loader:
|
42 |
-
image: ollama/ollama:0.9.2
|
43 |
-
container_name: model-loader
|
44 |
-
restart: "no"
|
45 |
-
environment:
|
46 |
-
- OLLAMA_HOST=http://ollama:11434
|
47 |
-
depends_on:
|
48 |
-
- ollama
|
49 |
-
networks:
|
50 |
-
- fashion-analyzer
|
51 |
-
command: >
|
52 |
-
sh -c "
|
53 |
-
echo 'Waiting for Ollama server to be ready...' &&
|
54 |
-
sleep 10 &&
|
55 |
-
echo 'Pulling LLaVA model for vision analysis...' &&
|
56 |
-
ollama pull llava:7b &&
|
57 |
-
echo 'Model pulled successfully!'
|
58 |
-
"
|
59 |
-
|
60 |
-
volumes:
|
61 |
-
ollama_data:
|
62 |
-
driver: local
|
63 |
-
|
64 |
networks:
|
65 |
fashion-analyzer:
|
66 |
driver: bridge
|
|
|
|
|
|
|
1 |
services:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
fastapi:
|
3 |
build:
|
4 |
context: .
|
|
|
7 |
restart: unless-stopped
|
8 |
ports:
|
9 |
- "7860:7860"
|
|
|
|
|
|
|
|
|
10 |
networks:
|
11 |
- fashion-analyzer
|
12 |
volumes:
|
13 |
- ./logs:/app/logs
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
networks:
|
16 |
fashion-analyzer:
|
17 |
driver: bridge
|
fastapi_startup_script.sh
CHANGED
@@ -3,27 +3,9 @@
|
|
3 |
# Debug: Check current user and permissions
|
4 |
echo "Current user: $(whoami)"
|
5 |
echo "Home directory: $HOME"
|
6 |
-
echo "Ollama home: $OLLAMA_HOME"
|
7 |
-
echo "Checking .ollama directory permissions:"
|
8 |
-
ls -la /home/appuser/.ollama/ || echo "Directory doesn't exist"
|
9 |
|
10 |
-
#
|
11 |
-
echo "Starting
|
12 |
-
ollama serve &
|
13 |
-
|
14 |
-
# Wait for Ollama to start
|
15 |
-
echo "Waiting for Ollama server to start..."
|
16 |
-
while ! curl -s http://localhost:11434 > /dev/null; do
|
17 |
-
sleep 1
|
18 |
-
done
|
19 |
-
|
20 |
-
echo "Ollama server started!"
|
21 |
-
|
22 |
-
# Pull the LLaVA model for vision analysis
|
23 |
-
echo "Pulling LLaVA model for vision analysis..."
|
24 |
-
ollama pull llava:7b
|
25 |
-
|
26 |
-
echo "Model pulled successfully!"
|
27 |
|
28 |
# Start FastAPI on port 7860 (HF Spaces requirement)
|
29 |
echo "Starting FastAPI server on port 7860..."
|
|
|
3 |
# Debug: Check current user and permissions
|
4 |
echo "Current user: $(whoami)"
|
5 |
echo "Home directory: $HOME"
|
|
|
|
|
|
|
6 |
|
7 |
+
# Note: This script is deprecated as the application now uses transformers instead of Ollama
|
8 |
+
echo "Starting FastAPI server with transformers-based fashion analysis..."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# Start FastAPI on port 7860 (HF Spaces requirement)
|
11 |
echo "Starting FastAPI server on port 7860..."
|
start.sh
CHANGED
@@ -6,66 +6,10 @@ echo "Current user: $(whoami)"
|
|
6 |
echo "User ID: $(id)"
|
7 |
echo "Current directory: $(pwd)"
|
8 |
echo "Home directory: $HOME"
|
9 |
-
echo "Ollama home: $OLLAMA_HOME"
|
10 |
echo "PATH: $PATH"
|
11 |
-
echo "Which ollama: $(which ollama)"
|
12 |
-
echo "Ollama version: $(ollama --version 2>&1 || echo 'Failed to get version')"
|
13 |
-
|
14 |
-
# Check Ollama directory permissions
|
15 |
-
echo "Checking Ollama directory:"
|
16 |
-
ls -la $HOME/.ollama/ 2>/dev/null || echo "Ollama directory doesn't exist"
|
17 |
-
echo "Checking if we can write to Ollama directory:"
|
18 |
-
touch $HOME/.ollama/test_write 2>/dev/null && rm $HOME/.ollama/test_write && echo "Write test: SUCCESS" || echo "Write test: FAILED"
|
19 |
echo "========================="
|
20 |
|
21 |
-
|
22 |
-
echo "Setting up Ollama directory..."
|
23 |
-
mkdir -p $HOME/.ollama
|
24 |
-
chmod 755 $HOME/.ollama
|
25 |
-
echo "Ollama directory setup complete."
|
26 |
-
|
27 |
-
# Start Ollama server in background
|
28 |
-
echo "Starting Ollama server..."
|
29 |
-
|
30 |
-
# Try different ways to start Ollama in case of PATH issues
|
31 |
-
if command -v ollama >/dev/null 2>&1; then
|
32 |
-
echo "Using ollama from PATH"
|
33 |
-
ollama serve &
|
34 |
-
elif [ -f "/usr/local/bin/ollama" ]; then
|
35 |
-
echo "Using ollama from /usr/local/bin/"
|
36 |
-
/usr/local/bin/ollama serve &
|
37 |
-
elif [ -f "/usr/bin/ollama" ]; then
|
38 |
-
echo "Using ollama from /usr/bin/"
|
39 |
-
/usr/bin/ollama serve &
|
40 |
-
else
|
41 |
-
echo "ERROR: Could not find ollama binary!"
|
42 |
-
exit 1
|
43 |
-
fi
|
44 |
-
|
45 |
-
# Wait for Ollama to start
|
46 |
-
echo "Waiting for Ollama server to start..."
|
47 |
-
while ! curl -s http://localhost:11434 > /dev/null; do
|
48 |
-
sleep 1
|
49 |
-
done
|
50 |
-
|
51 |
-
echo "Ollama server started!"
|
52 |
-
|
53 |
-
# Pull the LLaVA model for vision analysis
|
54 |
-
echo "Pulling LLaVA model for vision analysis..."
|
55 |
-
|
56 |
-
# Use the same approach for pulling models
|
57 |
-
if command -v ollama >/dev/null 2>&1; then
|
58 |
-
ollama pull llava:7b
|
59 |
-
elif [ -f "/usr/local/bin/ollama" ]; then
|
60 |
-
/usr/local/bin/ollama pull llava:7b
|
61 |
-
elif [ -f "/usr/bin/ollama" ]; then
|
62 |
-
/usr/bin/ollama pull llava:7b
|
63 |
-
else
|
64 |
-
echo "ERROR: Could not find ollama binary for pulling model!"
|
65 |
-
exit 1
|
66 |
-
fi
|
67 |
-
|
68 |
-
echo "Model pulled successfully!"
|
69 |
|
70 |
# Start FastAPI on port 7860 (HF Spaces requirement)
|
71 |
echo "Starting FastAPI server on port 7860..."
|
|
|
6 |
echo "User ID: $(id)"
|
7 |
echo "Current directory: $(pwd)"
|
8 |
echo "Home directory: $HOME"
|
|
|
9 |
echo "PATH: $PATH"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
echo "========================="
|
11 |
|
12 |
+
echo "Starting Fashion Analyzer with transformers-based AI models..."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Start FastAPI on port 7860 (HF Spaces requirement)
|
15 |
echo "Starting FastAPI server on port 7860..."
|