-
Notifications
You must be signed in to change notification settings - Fork 14
Expand file tree
/
Copy path03_context_client.py
More file actions
51 lines (39 loc) · 1.31 KB
/
03_context_client.py
File metadata and controls
51 lines (39 loc) · 1.31 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import os
import ollama
OLLAMA_URL = os.environ.get("OLLAMA_URL", "http://localhost:11434")
CONTEXT = open("context.csv").read()
def ask_ollama(prompt):
# Set the Ollama API base URL
client = ollama.Client(host=OLLAMA_URL)
model = "llama3.1"
system_prompt = "Analize the assistant content and give short answers"
# Create the messages list
messages = [
{"role": "system", "content": system_prompt},
{
"role": "assistant",
"content": f"Here is the list of feature, version and state for all the Magic System features.\n {CONTEXT}",
},
{"role": "user", "content": prompt},
]
# Make the streaming request
stream = client.chat(model=model, messages=messages, stream=True)
# Process and print the streamed response
for chunk in stream:
message_chunk = chunk.get("message", {}).get("content", "")
if message_chunk:
print(message_chunk, end="")
print()
def main():
while True:
prompt = input("Enter your prompt: ")
if not prompt:
print("Prompt cannot be empty.")
continue
if prompt.strip() in ["exit", "quit", "q"]:
print("Exiting...")
break
ask_ollama(prompt)
print("-" * 50)
if __name__ == "__main__":
main()