Fixes pytests openai: args change and pathlib reference for pricing file (#1602)

This commit is contained in:
Pranav Puranik
2024-07-31 09:56:23 -05:00
committed by GitHub
parent 47afe52296
commit abd4ec64eb
3 changed files with 25 additions and 38 deletions

View File

@@ -114,7 +114,8 @@ def test_get_llm_model_answer_without_system_prompt(config, mocker):
model=config.model,
temperature=config.temperature,
max_tokens=config.max_tokens,
model_kwargs={"top_p": config.top_p},
model_kwargs={},
top_p= config.top_p,
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_API_BASE"],
http_client=None,
@@ -133,7 +134,8 @@ def test_get_llm_model_answer_with_special_headers(config, mocker):
model=config.model,
temperature=config.temperature,
max_tokens=config.max_tokens,
model_kwargs={"top_p": config.top_p},
model_kwargs={},
top_p= config.top_p,
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_API_BASE"],
default_headers={"test": "test"},
@@ -153,7 +155,8 @@ def test_get_llm_model_answer_with_model_kwargs(config, mocker):
model=config.model,
temperature=config.temperature,
max_tokens=config.max_tokens,
model_kwargs={"top_p": config.top_p, "response_format": {"type": "json_object"}},
model_kwargs={"response_format": {"type": "json_object"}},
top_p=config.top_p,
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_API_BASE"],
http_client=None,
@@ -181,7 +184,8 @@ def test_get_llm_model_answer_with_tools(config, mocker, mock_return, expected):
model=config.model,
temperature=config.temperature,
max_tokens=config.max_tokens,
model_kwargs={"top_p": config.top_p},
model_kwargs={},
top_p=config.top_p,
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_API_BASE"],
http_client=None,
@@ -218,7 +222,8 @@ def test_get_llm_model_answer_with_http_client_proxies(env_config, mocker):
model=config.model,
temperature=config.temperature,
max_tokens=config.max_tokens,
model_kwargs={"top_p": config.top_p},
model_kwargs={},
top_p=config.top_p,
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_API_BASE"],
http_client=mock_http_client_instance,
@@ -252,7 +257,8 @@ def test_get_llm_model_answer_with_http_async_client_proxies(env_config, mocker)
model=config.model,
temperature=config.temperature,
max_tokens=config.max_tokens,
model_kwargs={"top_p": config.top_p},
model_kwargs={},
top_p=config.top_p,
api_key=os.environ["OPENAI_API_KEY"],
base_url=os.environ["OPENAI_API_BASE"],
http_client=None,