Skip to content

Commit

Permalink
feat: add AI LLM endpoint AWS params (box/box-openapi#478) (#388)
Browse files Browse the repository at this point in the history
  • Loading branch information
box-sdk-build authored Oct 29, 2024
1 parent 9262570 commit d2fd1ec
Show file tree
Hide file tree
Showing 9 changed files with 218 additions and 41 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "2efc8ab", "specHash": "e798cb1", "version": "1.7.0" }
{ "engineHash": "2efc8ab", "specHash": "90cf4e4", "version": "1.7.0" }
10 changes: 5 additions & 5 deletions src/schemas/aiAgentBasicGenTool.generated.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { deserializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { serializeAiAgentBasicTextToolTextGen } from './aiAgentBasicTextToolTextGen.generated.js';
Expand All @@ -8,7 +8,7 @@ import { serializeAiAgentLongTextToolTextGenEmbeddingsField } from './aiAgentLon
import { deserializeAiAgentLongTextToolTextGenEmbeddingsField } from './aiAgentLongTextToolTextGen.generated.js';
import { serializeAiAgentLongTextToolTextGen } from './aiAgentLongTextToolTextGen.generated.js';
import { deserializeAiAgentLongTextToolTextGen } from './aiAgentLongTextToolTextGen.generated.js';
import { AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { AiAgentBasicTextToolTextGen } from './aiAgentBasicTextToolTextGen.generated.js';
import { AiAgentLongTextToolTextGenEmbeddingsField } from './aiAgentLongTextToolTextGen.generated.js';
Expand Down Expand Up @@ -101,10 +101,10 @@ export function deserializeAiAgentBasicGenTool(
: val.num_tokens_for_completion;
const llmEndpointParams:
| undefined
| AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
| AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
val.llm_endpoint_params == void 0
? void 0
: deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llm_endpoint_params
);
return {
Expand Down
10 changes: 5 additions & 5 deletions src/schemas/aiAgentBasicTextTool.generated.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { deserializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { BoxSdkError } from '../box/errors.js';
import { SerializedData } from '../serialization/json.js';
Expand Down Expand Up @@ -86,10 +86,10 @@ export function deserializeAiAgentBasicTextTool(
: val.num_tokens_for_completion;
const llmEndpointParams:
| undefined
| AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
| AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
val.llm_endpoint_params == void 0
? void 0
: deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llm_endpoint_params
);
return {
Expand Down
14 changes: 7 additions & 7 deletions src/schemas/aiAgentBasicTextToolBase.generated.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { BoxSdkError } from '../box/errors.js';
import { SerializedData } from '../serialization/json.js';
import { sdIsEmpty } from '../serialization/json.js';
Expand All @@ -18,7 +18,7 @@ export interface AiAgentBasicTextToolBase {
readonly numTokensForCompletion?: number;
/**
* The parameters for the LLM endpoint specific to OpenAI / Google models. */
readonly llmEndpointParams?: AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi;
readonly llmEndpointParams?: AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi;
readonly rawData?: SerializedData;
}
export function serializeAiAgentBasicTextToolBase(
Expand All @@ -33,7 +33,7 @@ export function serializeAiAgentBasicTextToolBase(
['llm_endpoint_params']:
val.llmEndpointParams == void 0
? void 0
: serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llmEndpointParams
),
};
Expand Down Expand Up @@ -68,10 +68,10 @@ export function deserializeAiAgentBasicTextToolBase(
: val.num_tokens_for_completion;
const llmEndpointParams:
| undefined
| AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
| AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
val.llm_endpoint_params == void 0
? void 0
: deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llm_endpoint_params
);
return {
Expand Down
10 changes: 5 additions & 5 deletions src/schemas/aiAgentBasicTextToolTextGen.generated.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { deserializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { BoxSdkError } from '../box/errors.js';
import { SerializedData } from '../serialization/json.js';
Expand Down Expand Up @@ -89,10 +89,10 @@ export function deserializeAiAgentBasicTextToolTextGen(
: val.num_tokens_for_completion;
const llmEndpointParams:
| undefined
| AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
| AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
val.llm_endpoint_params == void 0
? void 0
: deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llm_endpoint_params
);
return {
Expand Down
10 changes: 5 additions & 5 deletions src/schemas/aiAgentLongTextTool.generated.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { deserializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { serializeAiAgentBasicTextTool } from './aiAgentBasicTextTool.generated.js';
import { deserializeAiAgentBasicTextTool } from './aiAgentBasicTextTool.generated.js';
import { AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { AiAgentBasicTextTool } from './aiAgentBasicTextTool.generated.js';
import { BoxSdkError } from '../box/errors.js';
Expand Down Expand Up @@ -178,10 +178,10 @@ export function deserializeAiAgentLongTextTool(
: val.num_tokens_for_completion;
const llmEndpointParams:
| undefined
| AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
| AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
val.llm_endpoint_params == void 0
? void 0
: deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llm_endpoint_params
);
return {
Expand Down
10 changes: 5 additions & 5 deletions src/schemas/aiAgentLongTextToolTextGen.generated.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { serializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { serializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { deserializeAiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { serializeAiAgentBasicTextToolTextGen } from './aiAgentBasicTextToolTextGen.generated.js';
import { deserializeAiAgentBasicTextToolTextGen } from './aiAgentBasicTextToolTextGen.generated.js';
import { AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi } from './aiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi.generated.js';
import { AiAgentBasicTextToolBase } from './aiAgentBasicTextToolBase.generated.js';
import { AiAgentBasicTextToolTextGen } from './aiAgentBasicTextToolTextGen.generated.js';
import { BoxSdkError } from '../box/errors.js';
Expand Down Expand Up @@ -186,10 +186,10 @@ export function deserializeAiAgentLongTextToolTextGen(
: val.num_tokens_for_completion;
const llmEndpointParams:
| undefined
| AiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
| AiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi =
val.llm_endpoint_params == void 0
? void 0
: deserializeAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
: deserializeAiLlmEndpointParamsAwsOrAiLlmEndpointParamsGoogleOrAiLlmEndpointParamsOpenAi(
val.llm_endpoint_params
);
return {
Expand Down
166 changes: 166 additions & 0 deletions src/schemas/aiLlmEndpointParamsAws.generated.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
import { BoxSdkError } from '../box/errors.js';
import { SerializedData } from '../serialization/json.js';
import { sdIsEmpty } from '../serialization/json.js';
import { sdIsBoolean } from '../serialization/json.js';
import { sdIsNumber } from '../serialization/json.js';
import { sdIsString } from '../serialization/json.js';
import { sdIsList } from '../serialization/json.js';
import { sdIsMap } from '../serialization/json.js';
export type AiLlmEndpointParamsAwsTypeField = 'aws_params';
export class AiLlmEndpointParamsAws {
/**
* The type of the AI LLM endpoint params object for AWS.
* This parameter is **required**. */
readonly type: AiLlmEndpointParamsAwsTypeField =
'aws_params' as AiLlmEndpointParamsAwsTypeField;
/**
* What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random,
* while lower values like 0.2 will make it more focused and deterministic.
* We generally recommend altering this or `top_p` but not both. */
readonly temperature?: number;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the results
* of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability
* mass are considered. We generally recommend altering this or temperature but not both. */
readonly topP?: number;
readonly rawData?: SerializedData;
constructor(
fields: Omit<AiLlmEndpointParamsAws, 'type'> &
Partial<Pick<AiLlmEndpointParamsAws, 'type'>>
) {
if (fields.type) {
this.type = fields.type;
}
if (fields.temperature) {
this.temperature = fields.temperature;
}
if (fields.topP) {
this.topP = fields.topP;
}
if (fields.rawData) {
this.rawData = fields.rawData;
}
}
}
export interface AiLlmEndpointParamsAwsInput {
/**
* The type of the AI LLM endpoint params object for AWS.
* This parameter is **required**. */
readonly type?: AiLlmEndpointParamsAwsTypeField;
/**
* What sampling temperature to use, between 0 and 1. Higher values like 0.8 will make the output more random,
* while lower values like 0.2 will make it more focused and deterministic.
* We generally recommend altering this or `top_p` but not both. */
readonly temperature?: number;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the results
* of the tokens with `top_p` probability mass. So 0.1 means only the tokens comprising the top 10% probability
* mass are considered. We generally recommend altering this or temperature but not both. */
readonly topP?: number;
readonly rawData?: SerializedData;
}
export function serializeAiLlmEndpointParamsAwsTypeField(
val: AiLlmEndpointParamsAwsTypeField
): SerializedData {
return val;
}
export function deserializeAiLlmEndpointParamsAwsTypeField(
val: SerializedData
): AiLlmEndpointParamsAwsTypeField {
if (val == 'aws_params') {
return val;
}
throw new BoxSdkError({
message: "Can't deserialize AiLlmEndpointParamsAwsTypeField",
});
}
export function serializeAiLlmEndpointParamsAws(
val: AiLlmEndpointParamsAws
): SerializedData {
return {
['type']: serializeAiLlmEndpointParamsAwsTypeField(val.type),
['temperature']: val.temperature == void 0 ? void 0 : val.temperature,
['top_p']: val.topP == void 0 ? void 0 : val.topP,
};
}
export function deserializeAiLlmEndpointParamsAws(
val: SerializedData
): AiLlmEndpointParamsAws {
if (!sdIsMap(val)) {
throw new BoxSdkError({
message: 'Expecting a map for "AiLlmEndpointParamsAws"',
});
}
if (val.type == void 0) {
throw new BoxSdkError({
message:
'Expecting "type" of type "AiLlmEndpointParamsAws" to be defined',
});
}
const type: AiLlmEndpointParamsAwsTypeField =
deserializeAiLlmEndpointParamsAwsTypeField(val.type);
if (!(val.temperature == void 0) && !sdIsNumber(val.temperature)) {
throw new BoxSdkError({
message:
'Expecting number for "temperature" of type "AiLlmEndpointParamsAws"',
});
}
const temperature: undefined | number =
val.temperature == void 0 ? void 0 : val.temperature;
if (!(val.top_p == void 0) && !sdIsNumber(val.top_p)) {
throw new BoxSdkError({
message: 'Expecting number for "top_p" of type "AiLlmEndpointParamsAws"',
});
}
const topP: undefined | number = val.top_p == void 0 ? void 0 : val.top_p;
return {
type: type,
temperature: temperature,
topP: topP,
} satisfies AiLlmEndpointParamsAws;
}
export function serializeAiLlmEndpointParamsAwsInput(
val: AiLlmEndpointParamsAwsInput
): SerializedData {
return {
['type']:
val.type == void 0
? void 0
: serializeAiLlmEndpointParamsAwsTypeField(val.type),
['temperature']: val.temperature == void 0 ? void 0 : val.temperature,
['top_p']: val.topP == void 0 ? void 0 : val.topP,
};
}
export function deserializeAiLlmEndpointParamsAwsInput(
val: SerializedData
): AiLlmEndpointParamsAwsInput {
if (!sdIsMap(val)) {
throw new BoxSdkError({
message: 'Expecting a map for "AiLlmEndpointParamsAwsInput"',
});
}
const type: undefined | AiLlmEndpointParamsAwsTypeField =
val.type == void 0
? void 0
: deserializeAiLlmEndpointParamsAwsTypeField(val.type);
if (!(val.temperature == void 0) && !sdIsNumber(val.temperature)) {
throw new BoxSdkError({
message:
'Expecting number for "temperature" of type "AiLlmEndpointParamsAwsInput"',
});
}
const temperature: undefined | number =
val.temperature == void 0 ? void 0 : val.temperature;
if (!(val.top_p == void 0) && !sdIsNumber(val.top_p)) {
throw new BoxSdkError({
message:
'Expecting number for "top_p" of type "AiLlmEndpointParamsAwsInput"',
});
}
const topP: undefined | number = val.top_p == void 0 ? void 0 : val.top_p;
return {
type: type,
temperature: temperature,
topP: topP,
} satisfies AiLlmEndpointParamsAwsInput;
}
Loading

0 comments on commit d2fd1ec

Please sign in to comment.