[Feature] OpenAI Function Calling (#1224)
This commit is contained in:
@@ -74,3 +74,32 @@ def test_get_llm_model_answer_without_system_prompt(config, mocker):
|
||||
model_kwargs={"top_p": config.top_p},
|
||||
api_key=os.environ["OPENAI_API_KEY"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"mock_return, expected",
|
||||
[
|
||||
([{"test": "test"}], '{"test": "test"}'),
|
||||
([], "Input could not be mapped to the function!"),
|
||||
],
|
||||
)
|
||||
def test_get_llm_model_answer_with_tools(config, mocker, mock_return, expected):
|
||||
mocked_openai_chat = mocker.patch("embedchain.llm.openai.ChatOpenAI")
|
||||
mocked_convert_to_openai_tool = mocker.patch("langchain_core.utils.function_calling.convert_to_openai_tool")
|
||||
mocked_json_output_tools_parser = mocker.patch("langchain.output_parsers.openai_tools.JsonOutputToolsParser")
|
||||
mocked_openai_chat.return_value.bind.return_value.pipe.return_value.invoke.return_value = mock_return
|
||||
|
||||
llm = OpenAILlm(config, tools={"test": "test"})
|
||||
answer = llm.get_llm_model_answer("Test query")
|
||||
|
||||
mocked_openai_chat.assert_called_once_with(
|
||||
model=config.model,
|
||||
temperature=config.temperature,
|
||||
max_tokens=config.max_tokens,
|
||||
model_kwargs={"top_p": config.top_p},
|
||||
api_key=os.environ["OPENAI_API_KEY"],
|
||||
)
|
||||
mocked_convert_to_openai_tool.assert_called_once_with({"test": "test"})
|
||||
mocked_json_output_tools_parser.assert_called_once()
|
||||
|
||||
assert answer == expected
|
||||
|
||||
Reference in New Issue
Block a user