From 0483145a3bf3e1d94092a39d28bc7fda82d7e1da Mon Sep 17 00:00:00 2001 From: box-apimgmt Date: Thu, 24 Oct 2024 08:22:34 -0700 Subject: [PATCH] feat: add AWS params --- openapi.json | 41 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/openapi.json b/openapi.json index 9eec65b3..b4a7b65a 100644 --- a/openapi.json +++ b/openapi.json @@ -14,7 +14,7 @@ "url": "http://www.apache.org/licenses/LICENSE-2.0" }, "version": "2.0.0", - "x-box-commit-hash": "e83bf201d8" + "x-box-commit-hash": "30e62e9e7d" }, "servers": [ { @@ -24505,6 +24505,9 @@ }, { "$ref": "#/components/schemas/AiLlmEndpointParamsGoogle" + }, + { + "$ref": "#/components/schemas/AiLlmEndpointParamsAWS" } ] } @@ -24989,6 +24992,42 @@ "base" ] }, + "AiLlmEndpointParamsAWS": { + "description": "AI LLM endpoint params AWS object", + "type": "object", + "properties": { + "type": { + "description": "The type of the AI LLM endpoint params object for AWS.\nThis parameter is **required**.", + "type": "string", + "example": "aws_params", + "enum": [ + "aws_params" + ], + "nullable": false + }, + "temperature": { + "description": "What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random, \nwhile lower values like 0.2 will make it more focused and deterministic. \nWe generally recommend altering this or `top_p` but not both.", + "type": "number", + "example": 0.5, + "maximum": 1, + "minimum": 0, + "nullable": true + }, + "top_p": { + "description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results \nof the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability \nmass are considered. We generally recommend altering this or temperature but not both.", + "type": "number", + "example": 0.5, + "maximum": 1, + "minimum": 0, + "nullable": true + } + }, + "required": [ + "type" + ], + "title": "AI LLM endpoint params AWS", + "x-box-resource-id": "ai_llm_endpoint_params_aws" + }, "AiLlmEndpointParamsGoogle": { "description": "AI LLM endpoint params Google object", "type": "object",