metastable-void commited on
Commit
1ff7179
·
unverified ·
1 Parent(s): ff3e19f

consider time context in prompt

Browse files
Files changed (1) hide show
  1. app.py +21 -2
app.py CHANGED
@@ -24,7 +24,7 @@ if torch.cuda.is_available():
24
  model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava17"
25
  base_model_id = "llm-jp/llm-jp-3-1.8b-instruct"
26
  tokenizer = AutoTokenizer.from_pretrained(base_model_id, trust_remote_code=True)
27
- tokenizer.chat_template = "{{bos_token}}{% for message in messages %}{% if message['role'] == 'user' %}{{ '\\n\\n### 前の投稿:\\n' + message['content'] + '' }}{% elif message['role'] == 'system' %}{{ '以下は、SNS上の投稿です。あなたはSNSの投稿生成botとして、次に続く投稿を考えなさい。説明はせず、投稿の内容のみを鉤括弧をつけずに答えよ。' }}{% elif message['role'] == 'assistant' %}{{ '\\n\\n### 次の投稿:\\n' + message['content'] + eos_token }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '\\n\\n### 次の投稿:\\n' }}{% endif %}{% endfor %}"
28
  model = AutoModelForCausalLM.from_pretrained(
29
  base_model_id,
30
  trust_remote_code=True,
@@ -49,8 +49,27 @@ def generate(
49
  top_k: int = 50,
50
  repetition_penalty: float = 1.0,
51
  ) -> Iterator[str]:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  messages = [
53
- {"role": "system", "content": "あなたはSNSの投稿生成botで、次に続く投稿を考えてください。"},
54
  {"role": "user", "content": message},
55
  ]
56
 
 
24
  model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava17"
25
  base_model_id = "llm-jp/llm-jp-3-1.8b-instruct"
26
  tokenizer = AutoTokenizer.from_pretrained(base_model_id, trust_remote_code=True)
27
+ tokenizer.chat_template = "{{bos_token}}{% for message in messages %}{% if message['role'] == 'user' %}{{ '\\n\\n### 前の投稿:\\n' + message['content'] + '' }}{% elif message['role'] == 'system' %}{{ '以下は、SNS上の投稿です。あなたはSNSの投稿生成botとして、次に続く投稿を考えなさい。説明はせず、投稿の内容のみを鉤括弧をつけずに答えよ。' + message['content'] }}{% elif message['role'] == 'assistant' %}{{ '\\n\\n### 次の投稿:\\n' + message['content'] + eos_token }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '\\n\\n### 次の投稿:\\n' }}{% endif %}{% endfor %}"
28
  model = AutoModelForCausalLM.from_pretrained(
29
  base_model_id,
30
  trust_remote_code=True,
 
49
  top_k: int = 50,
50
  repetition_penalty: float = 1.0,
51
  ) -> Iterator[str]:
52
+ from datetime import datetime, timezone, timedelta
53
+
54
+ d=datetime.now(timezone(timedelta(hours=9), 'JST'))
55
+ m=d.month
56
+ if m < 3 or m > 11:
57
+ season = '冬'
58
+ elif m < 6:
59
+ season = '春'
60
+ elif m < 9:
61
+ season = '夏'
62
+ else:
63
+ season = '秋'
64
+
65
+ h=d.hour
66
+ go = '午前' if h < 12 else '午後'
67
+ h = h % 12
68
+ minute = d.minute
69
+ time = go + str(h) + '時' + str(minute) + '分'
70
+
71
  messages = [
72
+ {"role": "system", "content": "なお今は日本の" + season + "で、時刻は" + time + "であるものとする。"},
73
  {"role": "user", "content": message},
74
  ]
75