-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
90 lines (71 loc) · 2.72 KB
/
main.py
File metadata and controls
90 lines (71 loc) · 2.72 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import argparse
import os
from dotenv import load_dotenv
from google.genai import Client
from google.genai.types import (
Content,
GenerateContentConfig,
GenerateContentResponse,
Part,
)
from functions.functions import available_functions
MODEL = "gemini-2.5-flash"
SYSTEM_PROMPT = """
You are a helpful AI coding agent.
When a user asks a question or makes a request, make a function call plan. You can perform the following operations:
- List files and directories
- Read a file's contents
- Write text contents to a file
- Run a Python file
All paths you provide should be relative to the working directory. You do not need to specify the working directory in your function calls as it is automatically injected for security reasons.
"""
def main():
load_dotenv()
gemini_api_key = os.environ.get("GEMINI_API_KEY", "")
if not gemini_api_key:
raise RuntimeError(
'Required environment variable "GEMINI_API_KEY" was missing or empty!'
)
parser = argparse.ArgumentParser(description="Chatbot")
parser.add_argument("user_prompt", type=str, help="User prompt")
parser.add_argument("--verbose", action="store_true", help="Enable verbose output")
args = parser.parse_args()
prompt = args.user_prompt
verbose = args.verbose
if verbose:
print(f'GEMINI_API_KEY: "{gemini_api_key}"')
print(f'Model: "{MODEL}"')
print(f'Prompt: "{prompt}"')
messages = [Content(role="user", parts=[Part(text=prompt)])]
client = Client(api_key=gemini_api_key)
response = _generate_content(client, MODEL, messages)
prompt_tokens = response.usage_metadata.prompt_token_count
response_tokens = response.usage_metadata.candidates_token_count
if response.text:
print(f'Response: "{response.text}"')
if response.function_calls:
for function in response.function_calls:
print(f"Calling function: {function.name}({function.args})")
if verbose:
print(f"Prompt tokens: {prompt_tokens}")
print(f"Response tokens: {response_tokens}")
def _generate_content(
client: Client, model: str, contents: str
) -> GenerateContentResponse:
response = client.models.generate_content(
model=model,
contents=contents,
config=GenerateContentConfig(
system_instruction=SYSTEM_PROMPT,
tools=[available_functions],
),
)
if not response:
raise RuntimeError("expected response")
if not response.text and not response.function_calls:
raise RuntimeError("expected response text or function_calls")
if not response.usage_metadata:
raise RuntimeError("expected response usage metadata")
return response
if __name__ == "__main__":
main()