Spaces:
Running
Running
Commit
·
9b19015
1
Parent(s):
67bae95
feat: github workflows action
Browse files- .github/workflows/ci-production.yml +61 -0
- Dockerfile +17 -0
- requirements.txt +2 -1
.github/workflows/ci-production.yml
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Deploy to Google Vertex AI
|
2 |
+
|
3 |
+
on:
|
4 |
+
push:
|
5 |
+
branches:
|
6 |
+
- production
|
7 |
+
|
8 |
+
jobs:
|
9 |
+
setup-build-publish-deploy:
|
10 |
+
name: Setup, Build, Publish, and Deploy
|
11 |
+
runs-on: ubuntu-latest
|
12 |
+
|
13 |
+
steps:
|
14 |
+
- name: Checkout code
|
15 |
+
uses: actions/checkout@v2
|
16 |
+
|
17 |
+
- name: Set up Cloud SDK
|
18 |
+
uses: google-github-actions/[email protected]
|
19 |
+
with:
|
20 |
+
project_id: ${{ secrets.GCP_PROJECT_ID }}
|
21 |
+
service_account_key: ${{ secrets.GCP_DEPLOYER_SA_KEY }}
|
22 |
+
export_default_credentials: true
|
23 |
+
|
24 |
+
- name: Authenticate Docker
|
25 |
+
run: |
|
26 |
+
gcloud auth configure-docker ${{ secrets.GCP_REPO_REGION }}-docker.pkg.dev --quiet
|
27 |
+
|
28 |
+
- name: Build image
|
29 |
+
working-directory: ./
|
30 |
+
run: docker build . --file Dockerfile --tag ${{ secrets.GCP_REPO_REGION }}-docker.pkg.dev/${{ secrets.GCP_PROJECT_ID }}/interview-ai-detector/model-prediction
|
31 |
+
|
32 |
+
- name: Push image
|
33 |
+
run: docker push ${{ secrets.GCP_REPO_REGION }}-docker.pkg.dev/${{ secrets.GCP_PROJECT_ID }}/interview-ai-detector/model-prediction
|
34 |
+
|
35 |
+
- name: Create model
|
36 |
+
id: create_model
|
37 |
+
run: |
|
38 |
+
MODEL_ID=$(gcloud ai models upload \
|
39 |
+
--region=${{ secrets.GCP_VERTEX_AI_REGION }} \
|
40 |
+
--display-name="interview-ai-detector-model" \
|
41 |
+
--container-image-uri="${{ secrets.GCP_REPO_REGION }}-docker.pkg.dev/${{ secrets.GCP_PROJECT_ID }}/interview-ai-detector/model-prediction:latest" \
|
42 |
+
--format="value(model)")
|
43 |
+
echo "MODEL_ID=${MODEL_ID}" >> $GITHUB_ENV
|
44 |
+
|
45 |
+
- name: Create Vertex AI endpoint
|
46 |
+
id: create_endpoint
|
47 |
+
run: |
|
48 |
+
ENDPOINT_ID=$(gcloud ai endpoints create \
|
49 |
+
--region=${{ secrets.GCP_VERTEX_AI_REGION }} \
|
50 |
+
--display-name="interview-ai-detector-endpoint" \
|
51 |
+
--format="value(name)")
|
52 |
+
echo "ENDPOINT_ID=${ENDPOINT_ID}" >> $GITHUB_ENV
|
53 |
+
|
54 |
+
- name: Deploy model to endpoint
|
55 |
+
run: |
|
56 |
+
gcloud ai endpoints deploy-model ${{ env.ENDPOINT_ID }} \
|
57 |
+
--region ${{ secrets.GCP_VERTEX_AI_REGION }} \
|
58 |
+
--model ${{ env.MODEL_ID }} \
|
59 |
+
--display-name interview-ai-detector-deployment \
|
60 |
+
--machine-type n1-standard-4 \
|
61 |
+
--accelerator count=1,type=nvidia-tesla-t4
|
Dockerfile
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use an official Python runtime as a base image
|
2 |
+
FROM python:3.12-slim
|
3 |
+
|
4 |
+
# Set the working directory in the container
|
5 |
+
WORKDIR /app
|
6 |
+
|
7 |
+
# Copy the current directory contents into the container at /app
|
8 |
+
COPY . /app
|
9 |
+
|
10 |
+
# Install any needed packages specified in requirements.txt
|
11 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
12 |
+
|
13 |
+
# Make port 8080 available to the world outside this container
|
14 |
+
EXPOSE 8080
|
15 |
+
|
16 |
+
# Run gunicorn with Uvicorn workers
|
17 |
+
CMD ["gunicorn", "-k", "uvicorn.workers.UvicornWorker", "-w", "4", "-b", "0.0.0.0:8080", "prediction:app"]
|
requirements.txt
CHANGED
@@ -6,4 +6,5 @@ textstat
|
|
6 |
scikit-learn==1.4.1.post1
|
7 |
transformers
|
8 |
fastapi
|
9 |
-
uvicorn
|
|
|
|
6 |
scikit-learn==1.4.1.post1
|
7 |
transformers
|
8 |
fastapi
|
9 |
+
uvicorn
|
10 |
+
gunicorn
|