File size: 359 Bytes
6df67d9
 
 
 
 
 
 
 
 
52ea782
6df67d9
 
 
 
49d83ef
6df67d9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
name: Phi 3
model: phi3:mini
version: 1

# Results Preferences
top_p: 0.95
temperature: 0.7
frequency_penalty: 0
presence_penalty: 0
max_tokens: 8192 # Infer from base config.json -> max_position_embeddings
stream: true # true | false
stop: ["<|end|>"]

# Engine / Model Settings
engine: llama-cpp
prompt_template: "<|user|>\n{prompt}<|end|>\n<|assistant|>\n"