@@ -749,6 +749,7 @@ def cleaned_args(args: dict):
749749 KNOWN_LLM_CONFIG_FIELDS ,
750750 ignore_underscored = True ,
751751 )
752+ ret = {}
752753 logger .debug (f"Options: via_streaming: { via_streaming } , stream: { stream } " )
753754 logger .debug (f"Initial completion kwargs: { cleaned_args (completion_kwargs )} " )
754755 if recursive_call_info is None :
@@ -760,6 +761,19 @@ def cleaned_args(args: dict):
760761 if llm .get ("api_url" ):
761762 completion_kwargs ["api_base" ] = llm ["api_url" ]
762763 if tools is not None :
764+ # check if this LLM allows function calling, if not, it wont help with tools
765+ if not litellm .supports_function_calling (model = llm ["llm" ]):
766+ ret ["error" ] = f"Model { llmalias } does not support function_calling, cannot use with tools"
767+ ret ["answer" ] = ""
768+ ret ["ok" ] = False
769+ return ret
770+ #
771+ if not "tools" in litellm .get_supported_openai_params (model = llm ["llm" ]):
772+ ret ["error" ] = f"Model { llmalias } does not support 'tools' parameter, cannot use with tools"
773+ ret ["answer" ] = ""
774+ ret ["ok" ] = False
775+ return ret
776+
763777 # add tooling-related arguments to completion_kwargs
764778 completion_kwargs ["tools" ] = tools
765779 if not self .supports_function_calling (llmalias ):
@@ -787,7 +801,6 @@ def cleaned_args(args: dict):
787801 completion_kwargs ["stream" ] = True
788802 completion_kwargs ["stream_options" ] = {"include_usage" : True }
789803 logger .debug (f"completion kwargs after detecting stream: { cleaned_args (completion_kwargs )} " )
790- ret = {}
791804 # before adding the kwargs, save the recursive_call_info and remove it from kwargs
792805 if debug :
793806 logger .debug (f"Received recursive call info: { recursive_call_info } " )
0 commit comments