diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3daf9b1..08fd286 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,8 +25,10 @@ jobs: id: summary uses: ./ with: - openAiKey: ${{ secrets.OPENAI_KEY }} - openAiOrg: ${{ secrets.OPENAI_ORG }} + #deepseekKey: ${{ secrets.DEEPSEEK_KEY }} + geminiKey: ${{ secrets.GEMINI_KEY }} + #openAiKey: ${{ secrets.OPENAI_KEY }} + #openAiOrg: ${{ secrets.OPENAI_ORG }} #anthropicKey: ${{ secrets.ANTHROPIC_KEY }} #notionKey: ${{ secrets.NOTION_KEY }} #notionDbId: ${{ secrets.NOTION_DB_ID }} diff --git a/.gitignore b/.gitignore index 8df304f..d756c15 100644 --- a/.gitignore +++ b/.gitignore @@ -103,4 +103,6 @@ typings/ # TernJS port file .tern-port +.env* + TODO.md \ No newline at end of file diff --git a/README.md b/README.md index 01b1c61..0c72b3e 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,10 @@ In the future we will allow more customization of the task/issue management tool - **`openAiModel`** (optional): OpenAI model to use (e.g. gpt-4o, gpt-3.5-turbo). Default: gpt-4o - **`anthropicKey`** (optional): Anthropic API key. Ignored if empty. - **`anthropicModel`** (optional): Anthropic model to use (e.g. claude-3-opus-20240229, claude-3-5-sonnet-20240620). Default: claude-3-5-sonnet-20240620. +- **`deepseekKey`** (optional): Deepseek API key. If empty it will be ignored. +- **`deepseekModel`** (optional): Deepseek model to use (e.g. deepseek-chat, deepseek-reasoner). Default: deepseek-chat. +- **`geminiKey`** (optional): Gemini API key. If empty it will be ignored. +- **`geminiModel`** (optional): Gemini model to use (e.g. gemini-2.0-flash, gemini-1.5-pro). Default: gemini-2.0-flash. - **`linearKey`** (optional): Linear API key. Ignored if empty. - **`linearViewId`** (optional): Linear view ID. This allows you to specify a custom view for fetching issues that can be modified to your needs. - **`notionKey`** (optional): Notion API key. @@ -117,5 +121,6 @@ jobs: - [ ] Add support for other task/issue management tools - [ ] Add support for other LLM providers - [x] different model selection - - [ ] DeepSeek + - [x] DeepSeek + - [x] Gemini - [ ] Qween diff --git a/action.yml b/action.yml index fbf2398..64db9c4 100644 --- a/action.yml +++ b/action.yml @@ -21,6 +21,20 @@ inputs: description: "Anthropic model to use (e.g. claude-3-opus-20240229, claude-3-5-sonnet-20240620). Default: claude-3-5-sonnet-20240620" required: false default: "claude-3-5-sonnet-20240620" + deepseekKey: + description: "Deepseek API key. If empty it will be ignored." + required: false + deepseekModel: + description: "Deepseek model to use (e.g. deepseek-chat, deepseek-reasoner). Default: deepseek-chat" + required: false + default: "deepseek-chat" + geminiKey: + description: "Gemini API key. If empty it will be ignored." + required: false + geminiModel: + description: "Gemini model to use (e.g. gemini-2.0-flash, gemini-1.5-pro). Default: gemini-2.0-flash" + required: false + default: "gemini-2.0-flash" linearKey: description: "Linear API key. If empty it will be ignored." required: false @@ -87,7 +101,7 @@ runs: pip install -r ${{ github.action_path }}/requirements.txt else echo "requirements.txt not found, installing dependencies directly" - pip install 'requests==2.32.3' 'anthropic==0.40.0' 'openai==1.55.3' 'nltk==3.9.1' 'markdown-it-py==3.0.0' + pip install 'requests==2.32.3' 'anthropic==0.40.0' 'openai==1.55.3' 'nltk==3.9.1' 'markdown-it-py==3.0.0' 'google-genai==1.2.0' fi - name: 🤖 Run @@ -109,6 +123,10 @@ runs: OPENAI_MODEL: ${{ inputs.openAiModel }} ANTHROPIC_KEY: ${{ inputs.anthropicKey }} ANTHROPIC_MODEL: ${{ inputs.anthropicModel }} + DEEPSEEK_KEY: ${{ inputs.deepseekKey }} + DEEPSEEK_MODEL: ${{ inputs.deepseekModel }} + GEMINI_KEY: ${{ inputs.geminiKey }} + GEMINI_MODEL: ${{ inputs.geminiModel }} NOTION_KEY: ${{ inputs.notionKey }} LINEAR_KEY: ${{ inputs.linearKey }} LINEAR_VIEW_ID: ${{ inputs.linearViewId }} diff --git a/requirements.txt b/requirements.txt index 4b89f82..eefe015 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,5 @@ anthropic==0.40.0 openai==1.55.3 pytest==8.3.3 nltk==3.9.1 -markdown-it-py==3.0.0 \ No newline at end of file +markdown-it-py==3.0.0 +google-genai==1.2.0 \ No newline at end of file diff --git a/src/deepseek_summary.py b/src/deepseek_summary.py new file mode 100644 index 0000000..601dc23 --- /dev/null +++ b/src/deepseek_summary.py @@ -0,0 +1,36 @@ +import requests + +def deepseek_summary(issues, prompt, key, model="deepseek-chat"): + url = "https://api.deepseek.com/chat/completions" + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {key}" + } + + prompt = f"{prompt} {issues}" + data = { + "model": model, + "messages": [ + {"role": "user", "content": prompt} + ], + "stream": False + } + + response = requests.post(url, headers=headers, json=data) + + if response.status_code == 200: + result = response.json() + if not result.choices: + raise ValueError("No response choices available") + if not result.choices[0]: + raise ValueError("First choice is null") + if not result.choices[0].message: + raise ValueError("Message is null") + + summary = result.choices[0].message.content + if not summary: + raise ValueError("Summary is null or empty.") + + return summary + else: + raise ValueError("Request failed with status code: " + str(response.status_code)) \ No newline at end of file diff --git a/src/gemini_summary.py b/src/gemini_summary.py new file mode 100644 index 0000000..c78f5f6 --- /dev/null +++ b/src/gemini_summary.py @@ -0,0 +1,14 @@ +from google import genai + +def gemini_summary(issues, prompt, key, model="gemini-2.0-flash"): + client = genai.Client(api_key=key) + + prompt = f"{prompt} {issues}" + response = client.models.generate_content( + model=model, contents=prompt + ) + + if not response or not response.text: + raise ValueError("Summary is null or empty.") + + return response.text diff --git a/src/main.py b/src/main.py index 18ca9fa..a0f32bc 100644 --- a/src/main.py +++ b/src/main.py @@ -3,6 +3,8 @@ from claude_summary import claude_summary from linear import linear from openai_summary import openai_summary +from deepseek_summary import deepseek_summary +from gemini_summary import gemini_summary from notion import notion from helpers import is_empty @@ -15,6 +17,10 @@ def main(): OPENAI_MODEL = os.environ.get("OPENAI_MODEL") ANTHROPIC_KEY = os.environ.get("ANTHROPIC_KEY") ANTHROPIC_MODEL = os.environ.get("ANTHROPIC_MODEL") + DEEPSEEK_KEY = os.environ.get("DEEPSEEK_KEY") + DEEPSEEK_MODEL = os.environ.get("DEEPSEEK_MODEL") + GEMINI_KEY = os.environ.get("GEMINI_KEY") + GEMINI_MODEL = os.environ.get("GEMINI_MODEL") NOTION_KEY = os.environ.get("NOTION_KEY") NOTION_DB_ID = os.environ.get("NOTION_DB_ID") LINEAR_KEY = os.environ.get("LINEAR_KEY") @@ -40,6 +46,10 @@ def main(): release_notes = claude_summary(issues, PROMPT, ANTHROPIC_KEY, ANTHROPIC_MODEL) elif(not is_empty(OPENAI_KEY) and not is_empty(OPENAI_ORG)): release_notes = openai_summary(issues, PROMPT, OPENAI_KEY, OPENAI_ORG, OPENAI_MODEL) + elif(not is_empty(DEEPSEEK_KEY)): + release_notes = deepseek_summary(issues, PROMPT, DEEPSEEK_KEY, DEEPSEEK_MODEL) + elif(not is_empty(GEMINI_KEY)): + release_notes = gemini_summary(issues, PROMPT, GEMINI_KEY, GEMINI_MODEL) if(not is_empty(NOTION_KEY) and not is_empty(release_notes)): notion(release_notes, COMMITS, NOTION_KEY, NOTION_DB_ID, VERSION, CHANGELOG, PR_LINK)