Spaces:
Running
Running
Commit
·
14ad06d
1
Parent(s):
33eefe1
Reapply "fix: typo on hf_token env name"
Browse filesThis reverts commit bade57b054b7d7b05dde76a077c7b6b9a4feabdc.
- cloudbuild.yaml +8 -12
- core-model-prediction/Dockerfile +1 -1
cloudbuild.yaml
CHANGED
@@ -1,16 +1,12 @@
|
|
1 |
steps:
|
2 |
- name: "gcr.io/cloud-builders/docker"
|
3 |
dir: "core-model-prediction"
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
"us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest",
|
11 |
-
".",
|
12 |
-
]
|
13 |
-
secretEnv: ["HF_TOKEN"]
|
14 |
|
15 |
- name: "gcr.io/cloud-builders/docker"
|
16 |
args:
|
@@ -24,5 +20,5 @@ images:
|
|
24 |
|
25 |
availableSecrets:
|
26 |
secretManager:
|
27 |
-
- versionName: "projects/${PROJECT_ID}/secrets/HF_TOKEN/versions/
|
28 |
-
env: "
|
|
|
1 |
steps:
|
2 |
- name: "gcr.io/cloud-builders/docker"
|
3 |
dir: "core-model-prediction"
|
4 |
+
entrypoint: "bash"
|
5 |
+
secretEnv: ["_HF_TOKEN"]
|
6 |
+
args:
|
7 |
+
- -c
|
8 |
+
- |
|
9 |
+
docker build -t us-central1-docker.pkg.dev/${PROJECT_ID}/interview-ai-detector/model-prediction:latest --build-arg=HF_TOKEN=$$_HF_TOKEN .
|
|
|
|
|
|
|
|
|
10 |
|
11 |
- name: "gcr.io/cloud-builders/docker"
|
12 |
args:
|
|
|
20 |
|
21 |
availableSecrets:
|
22 |
secretManager:
|
23 |
+
- versionName: "projects/${PROJECT_ID}/secrets/HF_TOKEN/versions/latest"
|
24 |
+
env: "_HF_TOKEN"
|
core-model-prediction/Dockerfile
CHANGED
@@ -21,7 +21,7 @@ RUN python -m nltk.downloader punkt wordnet averaged_perceptron_tagger
|
|
21 |
RUN unzip /root/nltk_data/corpora/wordnet.zip -d /root/nltk_data/corpora/
|
22 |
|
23 |
# Print masked HF Token for debugging
|
24 |
-
RUN echo $
|
25 |
|
26 |
# Download HuggingFace model
|
27 |
RUN python -c "from transformers import AutoTokenizer, AutoModelForCausalLM; \
|
|
|
21 |
RUN unzip /root/nltk_data/corpora/wordnet.zip -d /root/nltk_data/corpora/
|
22 |
|
23 |
# Print masked HF Token for debugging
|
24 |
+
RUN echo $HF_TOKEN
|
25 |
|
26 |
# Download HuggingFace model
|
27 |
RUN python -c "from transformers import AutoTokenizer, AutoModelForCausalLM; \
|