Update max_token and formatting (#2273)

This commit is contained in:
Dev Khant
2025-02-28 15:59:34 +05:30
committed by GitHub
parent 6acb00731d
commit b131c4bfc4
25 changed files with 31 additions and 32 deletions

View File

@@ -24,7 +24,7 @@ config = {
"config": {
"model": "arn:aws:bedrock:us-east-1:123456789012:model/your-model-name",
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
}
}
}

View File

@@ -19,7 +19,7 @@ config = {
"config": {
"model": "deepseek-chat", # default model
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
"top_p": 1.0
}
}

View File

@@ -19,7 +19,7 @@ config = {
"config": {
"model": "gemini-1.5-flash-latest",
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
}
}
}

View File

@@ -19,7 +19,7 @@ config = {
"config": {
"model": "gemini/gemini-pro",
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
}
}
}

View File

@@ -17,7 +17,7 @@ config = {
"config": {
"model": "mixtral-8x7b-32768",
"temperature": 0.1,
"max_tokens": 1000,
"max_tokens": 2000,
}
}
}

View File

@@ -14,7 +14,7 @@ config = {
"config": {
"model": "gpt-4o-mini",
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
}
}
}

View File

@@ -18,7 +18,7 @@ config = {
"config": {
"model": "gpt-4o",
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
}
}
}

View File

@@ -15,7 +15,7 @@ config = {
"config": {
"model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"temperature": 0.2,
"max_tokens": 1500,
"max_tokens": 2000,
}
}
}

View File

@@ -21,7 +21,7 @@ config = {
"config": {
"model": "grok-2-latest",
"temperature": 0.1,
"max_tokens": 1000,
"max_tokens": 2000,
}
}
}