-
Notifications
You must be signed in to change notification settings - Fork 0
feat: release updates #7
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
66a0193
000ccbc
8690511
276f319
7636b7b
8510876
05e89cd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -103,4 +103,6 @@ typings/ | |
| # TernJS port file | ||
| .tern-port | ||
|
|
||
| .env* | ||
|
|
||
| TODO.md | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,36 @@ | ||
| import requests | ||
|
|
||
| def deepseek_summary(issues, prompt, key, model="deepseek-chat"): | ||
| url = "https://api.deepseek.com/chat/completions" | ||
| headers = { | ||
| "Content-Type": "application/json", | ||
| "Authorization": f"Bearer {key}" | ||
| } | ||
|
|
||
| prompt = f"{prompt} {issues}" | ||
| data = { | ||
| "model": model, | ||
| "messages": [ | ||
| {"role": "user", "content": prompt} | ||
| ], | ||
| "stream": False | ||
| } | ||
|
|
||
| response = requests.post(url, headers=headers, json=data) | ||
|
|
||
| if response.status_code == 200: | ||
| result = response.json() | ||
| if not result.choices: | ||
| raise ValueError("No response choices available") | ||
| if not result.choices[0]: | ||
| raise ValueError("First choice is null") | ||
| if not result.choices[0].message: | ||
| raise ValueError("Message is null") | ||
|
|
||
| summary = result.choices[0].message.content | ||
| if not summary: | ||
| raise ValueError("Summary is null or empty.") | ||
|
|
||
| return summary | ||
| else: | ||
| raise ValueError("Request failed with status code: " + str(response.status_code)) | ||
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,14 @@ | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| from google import genai | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| def gemini_summary(issues, prompt, key, model="gemini-2.0-flash"): | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| client = genai.Client(api_key=key) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| prompt = f"{prompt} {issues}" | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| response = client.models.generate_content( | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| model=model, contents=prompt | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| ) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| if not response or not response.text: | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| raise ValueError("Summary is null or empty.") | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| return response.text | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
Comment on lines
+3
to
+14
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Add error handling and input validation. The function needs additional error handling and input validation:
Apply this diff to improve error handling and validation: def gemini_summary(issues, prompt, key, model="gemini-2.0-flash"):
+ if not issues or not prompt or not key:
+ raise ValueError("Required parameters (issues, prompt, key) cannot be empty.")
+
client = genai.Client(api_key=key)
prompt = f"{prompt} {issues}"
- response = client.models.generate_content(
- model=model, contents=prompt
- )
+ try:
+ response = client.models.generate_content(
+ model=model, contents=prompt
+ )
+ except Exception as e:
+ raise ValueError(f"Failed to generate content: {str(e)}")
if not response or not response.text:
raise ValueError("Summary is null or empty.")📝 Committable suggestion
Suggested change
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Fix response parsing and add request error handling.
Apply this diff to fix the issues:
📝 Committable suggestion