-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexample_chat_chain.py
More file actions
44 lines (37 loc) · 1.12 KB
/
example_chat_chain.py
File metadata and controls
44 lines (37 loc) · 1.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from hyperchain.prompt_templates import ChatTemplate
from hyperchain.chain import LLMChain
from hyperchain.llm_runners import OpenAIChatRunner
chat_template = ChatTemplate(
[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "{question}"},
],
)
chat_template_2 = chat_template + [
{"role": "system", "content": "{answer}"},
{"role": "user", "content": "{question2}"},
]
chat_template_result = chat_template_2 + [
{"role": "user", "content": "{answer2}"}
]
chat_runner = OpenAIChatRunner(
api_key="ENTER API KEY HERE OR IN ENV VARIABLE",
model_params={"max_tokens": 600},
)
llm_chain = LLMChain(
template=chat_template, llm_runner=chat_runner, output_name="answer"
) + LLMChain(
template=chat_template_2,
llm_runner=chat_runner,
)
response = llm_chain.run(
question="How are you?", question2="What can you assist me with?"
)
print(
chat_template_result.format(
question="How are you?",
answer=response.previous_result.answer,
question2="What can you assist me with?",
answer2=response.result,
)
)