Spaces:
Running
Running
Merge pull request #7 from YakobusIP/production
Browse files
core-model-prediction/cloudbuild.yaml → cloudbuild-endpoint.yaml
RENAMED
@@ -1,49 +1,53 @@
|
|
1 |
steps:
|
2 |
-
- name: "docker"
|
3 |
-
dir: "core-model-prediction"
|
4 |
-
args:
|
5 |
-
[
|
6 |
-
"builds",
|
7 |
-
"submit",
|
8 |
-
"--tag",
|
9 |
-
"gcr.io/$PROJECT_ID/interview-ai-detector/model-prediction",
|
10 |
-
]
|
11 |
-
|
12 |
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
13 |
entrypoint: "bash"
|
|
|
|
|
14 |
args:
|
15 |
- "-c"
|
16 |
- |
|
17 |
-
|
18 |
-
--region="
|
19 |
--container-ports=8080 \
|
20 |
-
--container-image-uri="
|
21 |
--container-predict-route="/predict" \
|
22 |
--container-health-route="/health" \
|
23 |
-
--display-name="interview-ai-detector-model"
|
24 |
-
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
28 |
entrypoint: "bash"
|
|
|
|
|
29 |
args:
|
30 |
- "-c"
|
31 |
- |
|
32 |
ENDPOINT_ID=$(gcloud ai endpoints create \
|
33 |
-
--region="
|
34 |
--display-name="interview-ai-detector-endpoint" \
|
35 |
--format="value(name)")
|
36 |
-
echo
|
37 |
|
38 |
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
39 |
entrypoint: "bash"
|
|
|
40 |
args:
|
41 |
- "-c"
|
42 |
- |
|
43 |
-
gcloud ai endpoints deploy-model "$
|
44 |
-
--region="
|
45 |
-
--model="$
|
46 |
--display-name="interview-ai-detector-deployment" \
|
47 |
--machine-type="n1-standard-4" \
|
48 |
--accelerator="count=1,type=nvidia-tesla-t4" \
|
49 |
-
--service-account="
|
|
|
1 |
steps:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
3 |
entrypoint: "bash"
|
4 |
+
id: upload-model
|
5 |
+
timeout: 600s
|
6 |
args:
|
7 |
- "-c"
|
8 |
- |
|
9 |
+
gcloud ai models upload \
|
10 |
+
--region="us-central1" \
|
11 |
--container-ports=8080 \
|
12 |
+
--container-image-uri="us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest" \
|
13 |
--container-predict-route="/predict" \
|
14 |
--container-health-route="/health" \
|
15 |
+
--display-name="interview-ai-detector-model"
|
16 |
+
|
17 |
+
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
18 |
+
entrypoint: "bash"
|
19 |
+
id: fetch-model
|
20 |
+
waitFor: upload-model
|
21 |
+
timeout: 600s
|
22 |
+
args:
|
23 |
+
- "-c"
|
24 |
+
- |
|
25 |
+
MODEL_ID=$(gcloud ai models list --region=us-central1 --format="value(name)" | head -n 1)
|
26 |
+
echo MODEL_ID > /workspace/model_id.txt
|
27 |
|
28 |
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
29 |
entrypoint: "bash"
|
30 |
+
id: create-endpoint
|
31 |
+
waitFor: fetch-model
|
32 |
args:
|
33 |
- "-c"
|
34 |
- |
|
35 |
ENDPOINT_ID=$(gcloud ai endpoints create \
|
36 |
+
--region="us-central1" \
|
37 |
--display-name="interview-ai-detector-endpoint" \
|
38 |
--format="value(name)")
|
39 |
+
echo ENDPOINT_ID > /workspace/endpoint_id.txt
|
40 |
|
41 |
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
|
42 |
entrypoint: "bash"
|
43 |
+
waitFor: create-endpoint
|
44 |
args:
|
45 |
- "-c"
|
46 |
- |
|
47 |
+
gcloud ai endpoints deploy-model "${$(cat /workspace/endpoint_id.txt)}" \
|
48 |
+
--region="us-central1" \
|
49 |
+
--model="${$(cat /workspace/model_id.txt)}" \
|
50 |
--display-name="interview-ai-detector-deployment" \
|
51 |
--machine-type="n1-standard-4" \
|
52 |
--accelerator="count=1,type=nvidia-tesla-t4" \
|
53 |
+
--service-account="vertex-ai-user-managed-sa@steady-climate-416810.iam.gserviceaccount.com"
|
cloudbuild-model.yaml
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
steps:
|
2 |
+
- name: "gcr.io/cloud-builders/docker"
|
3 |
+
dir: "core-model-prediction"
|
4 |
+
args:
|
5 |
+
[
|
6 |
+
"build",
|
7 |
+
"-t",
|
8 |
+
"us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest",
|
9 |
+
".",
|
10 |
+
]
|
11 |
+
|
12 |
+
- name: "gcr.io/cloud-builders/docker"
|
13 |
+
args:
|
14 |
+
[
|
15 |
+
"push",
|
16 |
+
"us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest",
|
17 |
+
]
|
18 |
+
|
19 |
+
images:
|
20 |
+
- "us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest"
|
core-model-prediction/prediction.py
CHANGED
@@ -64,7 +64,7 @@ def process_instance(data: PredictRequest):
|
|
64 |
return {
|
65 |
"prediction_class": "AI" if secondary_model_prediction == 1 else "HUMAN",
|
66 |
"details": {
|
67 |
-
"main_model_probability": main_model_probability,
|
68 |
"final_prediction": secondary_model_prediction
|
69 |
}
|
70 |
}
|
|
|
64 |
return {
|
65 |
"prediction_class": "AI" if secondary_model_prediction == 1 else "HUMAN",
|
66 |
"details": {
|
67 |
+
"main_model_probability": str(main_model_probability),
|
68 |
"final_prediction": secondary_model_prediction
|
69 |
}
|
70 |
}
|
core-model-prediction/random_forest_dependencies.py
CHANGED
@@ -5,7 +5,6 @@ from collections import Counter
|
|
5 |
class RandomForestDependencies:
|
6 |
def __init__(self):
|
7 |
self.gemma2bdependencies = Gemma2BDependencies()
|
8 |
-
self.random_forest_features = []
|
9 |
|
10 |
def calculate_features(self, question: str, answer: str, probability: float, backspace_count: int, typing_duration: int, letter_click_counts: dict[str, int]):
|
11 |
cosine_similarity = self.gemma2bdependencies.calculate_cosine_similarity(
|
@@ -15,7 +14,7 @@ class RandomForestDependencies:
|
|
15 |
letter_discrepancy = self.calculate_letter_discrepancy(
|
16 |
answer, letter_click_counts)
|
17 |
|
18 |
-
|
19 |
cosine_similarity, probability, backspace_count_normalized,
|
20 |
typing_duration_normalized, letter_discrepancy
|
21 |
]
|
|
|
5 |
class RandomForestDependencies:
|
6 |
def __init__(self):
|
7 |
self.gemma2bdependencies = Gemma2BDependencies()
|
|
|
8 |
|
9 |
def calculate_features(self, question: str, answer: str, probability: float, backspace_count: int, typing_duration: int, letter_click_counts: dict[str, int]):
|
10 |
cosine_similarity = self.gemma2bdependencies.calculate_cosine_similarity(
|
|
|
14 |
letter_discrepancy = self.calculate_letter_discrepancy(
|
15 |
answer, letter_click_counts)
|
16 |
|
17 |
+
return [
|
18 |
cosine_similarity, probability, backspace_count_normalized,
|
19 |
typing_duration_normalized, letter_discrepancy
|
20 |
]
|
core-model-prediction/random_forest_model.py
CHANGED
@@ -12,4 +12,4 @@ class RandomForestModel:
|
|
12 |
return self.scaler.transform(np.array(secondary_model_features).astype(np.float32).reshape(1, -1))
|
13 |
|
14 |
def predict(self, secondary_model_features: List[float]):
|
15 |
-
return self.model.predict(self.preprocess_input(secondary_model_features))[0]
|
|
|
12 |
return self.scaler.transform(np.array(secondary_model_features).astype(np.float32).reshape(1, -1))
|
13 |
|
14 |
def predict(self, secondary_model_features: List[float]):
|
15 |
+
return int(self.model.predict(self.preprocess_input(secondary_model_features))[0])
|