33
44import os
55import sys
6- from typing import Optional , Tuple , cast
6+ from typing import Optional , Tuple
77
88import click
9- from langchain_anthropic import ChatAnthropic
109from langchain_core .language_models .chat_models import BaseChatModel
11- from langchain_ollama import ChatOllama
12- from langchain_openai import ChatOpenAI
1310
1411# Keep SecretStr import in case it's needed elsewhere or for future refinement
1512
@@ -46,44 +43,31 @@ def _initialize_llm(model: str) -> BaseChatModel:
4643 google_api_key_str = _get_google_api_key ()
4744
4845 try :
49- if model .startswith ("gpt-" ):
50- api_key = os .getenv ("OPENAI_API_KEY" )
51- if not api_key :
52- raise click .ClickException (
53- "Error: OPENAI_API_KEY environment variable not set."
54- )
55- return ChatOpenAI (model = model , api_key = api_key )
56-
57- elif model .startswith ("claude-" ):
58- api_key = os .getenv ("ANTHROPIC_API_KEY" )
59- if not api_key :
60- raise click .ClickException (
61- "Error: ANTHROPIC_API_KEY environment variable not set."
62- )
63- return ChatAnthropic (model_name = model , api_key = api_key )
64-
65- elif model .startswith ("gemini-" ):
66- if ChatGoogleGenerativeAI is None :
67- raise click .ClickException (
68- "Error: 'langchain-google-genai' is not installed. "
69- "Run 'pip install commitai[test]' or "
70- "'pip install langchain-google-genai'"
71- )
72- if not google_api_key_str :
73- raise click .ClickException (
74- "Error: Google API Key not found. Set GOOGLE_API_KEY, "
75- "GEMINI_API_KEY, or GOOGLE_GENERATIVE_AI_API_KEY."
76- )
77- return ChatGoogleGenerativeAI (
78- model = model ,
79- google_api_key = google_api_key_str ,
80- convert_system_message_to_human = True ,
46+ # Enforce Gemini-Only Policy
47+ # Enforce Strict Gemini-3 Policy
48+ allowed_models = ["gemini-3-flash-preview" , "gemini-3-pro-preview" ]
49+ if model not in allowed_models :
50+ raise click .ClickException (
51+ f"🚫 Unsupported model: { model } . "
52+ f"Only Google Gemini 3 models are allowed: { ', ' .join (allowed_models )} "
8153 )
82- elif model .startswith ("llama" ):
83- # Ollama models (e.g., llama2, llama3)
84- return cast (BaseChatModel , ChatOllama (model = model ))
85- else :
86- raise click .ClickException (f"🚫 Unsupported model: { model } " )
54+
55+ if ChatGoogleGenerativeAI is None :
56+ raise click .ClickException (
57+ "Error: 'langchain-google-genai' is not installed. "
58+ "Run 'pip install commitai[test]' or "
59+ "'pip install langchain-google-genai'"
60+ )
61+ if not google_api_key_str :
62+ raise click .ClickException (
63+ "Error: Google API Key not found. Set GOOGLE_API_KEY, "
64+ "GEMINI_API_KEY, or GOOGLE_GENERATIVE_AI_API_KEY."
65+ )
66+ return ChatGoogleGenerativeAI (
67+ model = model ,
68+ google_api_key = google_api_key_str ,
69+ convert_system_message_to_human = True ,
70+ )
8771
8872 except Exception as e :
8973 raise click .ClickException (f"Error initializing AI model: { e } " ) from e
@@ -143,9 +127,9 @@ def _handle_commit(commit_message: str, commit_flag: bool) -> None:
143127 else :
144128 raise click .ClickException ("Aborted by user." )
145129 except click .Abort :
146- raise click .ClickException ("Aborted by user." )
130+ raise click .ClickException ("Aborted by user." ) from None
147131 except Exception as e :
148- raise click .ClickException (f"Error handling user input: { e } " )
132+ raise click .ClickException (f"Error handling user input: { e } " ) from e
149133
150134 if not final_commit_message :
151135 raise click .ClickException ("Aborting commit due to empty commit message." )
@@ -195,21 +179,41 @@ def cli() -> None:
195179 "-m" ,
196180 default = "gemini-3-flash-preview" ,
197181 help = (
198- "Set the engine model (default: gemini-3-flash-preview). Examples: 'gemini-3-flash-preview', 'gemini-3-pro-preview', "
199- "'gpt-4', 'claude-3-opus'. Ensure API key env var is set "
200- "(OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_API_KEY/GEMINI_API_KEY/GOOGLE_GENERATIVE_AI_API_KEY)."
182+ "Set the engine model (default: gemini-3-flash-preview). "
183+ "Only Google Gemini 3 models are supported "
184+ "('gemini-3-flash-preview', 'gemini-3-pro-preview'). "
185+ "Ensure GOOGLE_API_KEY is set."
201186 ),
202187)
188+ @click .option (
189+ "--deep" ,
190+ "-d" ,
191+ is_flag = True ,
192+ help = "Use the deeper reasoning model (gemini-3-pro-preview)." ,
193+ )
203194def generate_message (
204195 description : Tuple [str , ...],
205196 commit : bool ,
206197 review : bool ,
207198 template : Optional [str ],
208199 add : bool ,
209200 model : str ,
201+ deep : bool ,
210202) -> None :
211203 explanation = " " .join (description )
212204
205+ # Handle Model Selection Logic
206+ # 1. Default is gemini-3-flash-preview
207+ # 2. If --deep is passed, upgrade to gemini-3-pro-preview
208+ # (unless -m is explicitly distinct)
209+ if deep :
210+ # If user didn't explicitly change the default model string,
211+ # upgrade to Pro
212+ # If user explicitly set a model AND used --deep,
213+ # we respect the explicit model but could warn (or just use it)
214+ pass
215+ pass
216+
213217 llm = _initialize_llm (model )
214218
215219 if add :
@@ -231,7 +235,8 @@ def generate_message(
231235 # Optional pre-generation review
232236 if review :
233237 click .secho (
234- "\n \n 🔎 Reviewing the staged changes before generating a commit message...\n " ,
238+ "\n \n 🔎 Reviewing the staged changes before "
239+ "generating a commit message...\n " ,
235240 fg = "blue" ,
236241 bold = True ,
237242 )
@@ -254,8 +259,10 @@ def generate_message(
254259 fg = "yellow" ,
255260 )
256261
257- # Check for template from env or file if not provided via CLI (though CLI overrides or is deprecated)
258- # The agent/chain prompt Logic usually handles 'template' variable if passed in input.
262+ # Check for template from env or file if not provided via CLI
263+ # (though CLI overrides or is deprecated)
264+ # The agent/chain prompt Logic usually handles 'template' variable
265+ # if passed in input.
259266 # We need to fetch the template content if it exists to pass to agent.
260267
261268 final_template_content = template
@@ -265,7 +272,8 @@ def generate_message(
265272
266273 click .clear ()
267274 click .secho (
268- "\n \n 🧠 internal-monologue: Analyzing changes, checking for sensitive data, and summarizing...\n \n " ,
275+ "\n \n 🧠 internal-monologue: Analyzing changes, "
276+ "checking for sensitive data, and summarizing...\n \n " ,
269277 fg = "blue" ,
270278 bold = True ,
271279 )
@@ -327,9 +335,15 @@ def create_template_command(template_content: Tuple[str, ...]) -> None:
327335@click .option (
328336 "--model" ,
329337 "-m" ,
330- default = "gpt-5 " ,
338+ default = "gemini-3-flash-preview " ,
331339 help = "Set the engine model to be used." ,
332340)
341+ @click .option (
342+ "--deep" ,
343+ "-d" ,
344+ is_flag = True ,
345+ help = "Use the deeper reasoning model (gemini-3-pro-preview)." ,
346+ )
333347@click .pass_context
334348def commitai_alias (
335349 ctx : click .Context ,
@@ -338,6 +352,7 @@ def commitai_alias(
338352 commit : bool ,
339353 review : bool ,
340354 model : str ,
355+ deep : bool ,
341356) -> None :
342357 """Alias for the 'generate' command."""
343358 ctx .forward (
@@ -347,6 +362,7 @@ def commitai_alias(
347362 commit = commit ,
348363 review = review ,
349364 model = model ,
365+ deep = deep ,
350366 )
351367
352368
0 commit comments