Skip to content

Commit

Permalink
feat: Add aiAgent info to AiResponse (box/box-openapi#485) (#304)
Browse files Browse the repository at this point in the history
  • Loading branch information
box-sdk-build authored Dec 9, 2024
1 parent 8bd79ab commit b614a6f
Show file tree
Hide file tree
Showing 13 changed files with 140 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "f073ce3", "specHash": "544d370", "version": "0.5.0" }
{ "engineHash": "a839036", "specHash": "d7dfe68", "version": "0.5.0" }
32 changes: 32 additions & 0 deletions BoxSdkGen.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions Sources/Managers/Ai/AiManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ public class AiManager {
}

/// Sends an AI request to supported Large Language Models (LLMs) and extracts metadata in form of key-value pairs.
/// Freeform metadata extraction does not require any metadata template setup before sending the request.
/// In this request, both the prompt and the output can be freeform.
/// Metadata template setup before sending the request is not required.
///
/// - Parameters:
/// - requestBody: Request body of createAiExtract method
Expand All @@ -65,7 +66,8 @@ public class AiManager {
}

/// Sends an AI request to supported Large Language Models (LLMs) and returns extracted metadata as a set of key-value pairs.
/// For this request, you need to use an already defined metadata template or a define a schema yourself.
/// For this request, you either need a metadata template or a list of fields you want to extract.
/// Input is **either** a metadata template or a list of fields to ensure the structure.
/// To learn more about creating templates, see [Creating metadata templates in the Admin Console](https://support.box.com/hc/en-us/articles/360044194033-Customizing-Metadata-Templates)
/// or use the [metadata template API](g://metadata/templates/create).
///
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Foundation

/// AI agent tool used to handle basic text.
/// AI agent processor used to handle basic text.
public class AiAgentBasicTextTool: AiAgentBasicTextToolBase {
private enum CodingKeys: String, CodingKey {
case systemMessage = "system_message"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Foundation

/// AI agent tool used to handle basic text.
/// AI agent processor used to handle basic text.
public class AiAgentBasicTextToolBase: Codable {
private enum CodingKeys: String, CodingKey {
case model
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Foundation

/// AI agent tool used to handle basic text.
/// AI agent processor used to handle basic text.
public class AiAgentBasicTextToolTextGen: AiAgentBasicTextToolBase {
private enum CodingKeys: String, CodingKey {
case systemMessage = "system_message"
Expand Down
38 changes: 38 additions & 0 deletions Sources/Schemas/AiAgentInfo/AiAgentInfo.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import Foundation

/// The information on the models and processors used in the request.
public class AiAgentInfo: Codable {
private enum CodingKeys: String, CodingKey {
case models
case processor
}

/// The models used for the request
public let models: [AiAgentInfoModelsField]?

/// The processor used for the request
public let processor: String?

/// Initializer for a AiAgentInfo.
///
/// - Parameters:
/// - models: The models used for the request
/// - processor: The processor used for the request
public init(models: [AiAgentInfoModelsField]? = nil, processor: String? = nil) {
self.models = models
self.processor = processor
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
models = try container.decodeIfPresent([AiAgentInfoModelsField].self, forKey: .models)
processor = try container.decodeIfPresent(String.self, forKey: .processor)
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encodeIfPresent(models, forKey: .models)
try container.encodeIfPresent(processor, forKey: .processor)
}

}
45 changes: 45 additions & 0 deletions Sources/Schemas/AiAgentInfo/AiAgentInfoModelsField.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import Foundation

public class AiAgentInfoModelsField: Codable {
private enum CodingKeys: String, CodingKey {
case name
case provider
case supportedPurpose = "supported_purpose"
}

/// The name of the model used for the request
public let name: String?

/// The provider that owns the model used for the request
public let provider: String?

/// The supported purpose utilized by the model used for the request
public let supportedPurpose: String?

/// Initializer for a AiAgentInfoModelsField.
///
/// - Parameters:
/// - name: The name of the model used for the request
/// - provider: The provider that owns the model used for the request
/// - supportedPurpose: The supported purpose utilized by the model used for the request
public init(name: String? = nil, provider: String? = nil, supportedPurpose: String? = nil) {
self.name = name
self.provider = provider
self.supportedPurpose = supportedPurpose
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
name = try container.decodeIfPresent(String.self, forKey: .name)
provider = try container.decodeIfPresent(String.self, forKey: .provider)
supportedPurpose = try container.decodeIfPresent(String.self, forKey: .supportedPurpose)
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encodeIfPresent(name, forKey: .name)
try container.encodeIfPresent(provider, forKey: .provider)
try container.encodeIfPresent(supportedPurpose, forKey: .supportedPurpose)
}

}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Foundation

/// AI agent tool used to to handle longer text.
/// AI agent processor used to to handle longer text.
public class AiAgentLongTextTool: AiAgentBasicTextTool {
private enum CodingKeys: String, CodingKey {
case embeddings
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Foundation

/// AI agent tool used to to handle longer text.
/// AI agent processor used to to handle longer text.
public class AiAgentLongTextToolTextGen: AiAgentBasicTextToolTextGen {
private enum CodingKeys: String, CodingKey {
case embeddings
Expand Down
9 changes: 8 additions & 1 deletion Sources/Schemas/AiResponse/AiResponse.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ public class AiResponse: Codable {
case answer
case createdAt = "created_at"
case completionReason = "completion_reason"
case aiAgentInfo = "ai_agent_info"
}

/// The answer provided by the LLM.
Expand All @@ -17,30 +18,36 @@ public class AiResponse: Codable {
/// The reason the response finishes.
public let completionReason: String?

public let aiAgentInfo: AiAgentInfo?

/// Initializer for a AiResponse.
///
/// - Parameters:
/// - answer: The answer provided by the LLM.
/// - createdAt: The ISO date formatted timestamp of when the answer to the prompt was created.
/// - completionReason: The reason the response finishes.
public init(answer: String, createdAt: Date, completionReason: String? = nil) {
/// - aiAgentInfo:
public init(answer: String, createdAt: Date, completionReason: String? = nil, aiAgentInfo: AiAgentInfo? = nil) {
self.answer = answer
self.createdAt = createdAt
self.completionReason = completionReason
self.aiAgentInfo = aiAgentInfo
}

required public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
answer = try container.decode(String.self, forKey: .answer)
createdAt = try Utils.Dates.dateTimeFromString(dateTime: try container.decode(String.self, forKey: .createdAt))
completionReason = try container.decodeIfPresent(String.self, forKey: .completionReason)
aiAgentInfo = try container.decodeIfPresent(AiAgentInfo.self, forKey: .aiAgentInfo)
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(answer, forKey: .answer)
try container.encode(Utils.Dates.dateTimeToString(dateTime: createdAt), forKey: .createdAt)
try container.encodeIfPresent(completionReason, forKey: .completionReason)
try container.encodeIfPresent(aiAgentInfo, forKey: .aiAgentInfo)
}

}
5 changes: 3 additions & 2 deletions Sources/Schemas/AiResponseFull/AiResponseFull.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@ public class AiResponseFull: AiResponse {
/// - answer: The answer provided by the LLM.
/// - createdAt: The ISO date formatted timestamp of when the answer to the prompt was created.
/// - completionReason: The reason the response finishes.
/// - aiAgentInfo:
/// - citations: The citations of the LLM's answer reference.
public init(answer: String, createdAt: Date, completionReason: String? = nil, citations: [AiCitation]? = nil) {
public init(answer: String, createdAt: Date, completionReason: String? = nil, aiAgentInfo: AiAgentInfo? = nil, citations: [AiCitation]? = nil) {
self.citations = citations

super.init(answer: answer, createdAt: createdAt, completionReason: completionReason)
super.init(answer: answer, createdAt: createdAt, completionReason: completionReason, aiAgentInfo: aiAgentInfo)
}

required public init(from decoder: Decoder) throws {
Expand Down
6 changes: 4 additions & 2 deletions docs/Ai.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,8 @@ The response depends on the agent configuration requested in this endpoint.
## Extract metadata (freeform)

Sends an AI request to supported Large Language Models (LLMs) and extracts metadata in form of key-value pairs.
Freeform metadata extraction does not require any metadata template setup before sending the request.
In this request, both the prompt and the output can be freeform.
Metadata template setup before sending the request is not required.

This operation is performed by calling function `createAiExtract`.

Expand Down Expand Up @@ -133,7 +134,8 @@ A response including the answer from the LLM.
## Extract metadata (structured)

Sends an AI request to supported Large Language Models (LLMs) and returns extracted metadata as a set of key-value pairs.
For this request, you need to use an already defined metadata template or a define a schema yourself.
For this request, you either need a metadata template or a list of fields you want to extract.
Input is **either** a metadata template or a list of fields to ensure the structure.
To learn more about creating templates, see [Creating metadata templates in the Admin Console](https://support.box.com/hc/en-us/articles/360044194033-Customizing-Metadata-Templates)
or use the [metadata template API](g://metadata/templates/create).

Expand Down

0 comments on commit b614a6f

Please sign in to comment.