Spaces:
Runtime error
Runtime error
using langchain for creating model
Browse files
app.py
CHANGED
@@ -37,6 +37,14 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
37 |
low_cpu_mem_usage=True
|
38 |
)
|
39 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
41 |
tokenizer.use_default_system_prompt = False
|
42 |
|
|
|
37 |
low_cpu_mem_usage=True
|
38 |
)
|
39 |
|
40 |
+
# model_id = "gpt2"
|
41 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
42 |
+
# model = AutoModelForCausalLM.from_pretrained(model_id)
|
43 |
+
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=10)
|
44 |
+
hf = HuggingFacePipeline(pipeline=pipe)
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
49 |
tokenizer.use_default_system_prompt = False
|
50 |
|