diff --git a/README.md b/README.md index 34c5912..fd189da 100644 --- a/README.md +++ b/README.md @@ -106,113 +106,13 @@ let result = try await Anthropic(apiKey: "your_claude_api_key") ) ``` -## Amazon Web Services Bedrock +## Extensions -This library provides support for the [Anthropic Bedrock API](https://aws.amazon.com/bedrock/claude/) through a separate package. +By introducing an extension Swift package, it is possible to access the Anthropic Claude API through AWS Bedrock and Vertex AI. The supported services are as follows: -```swift -let package = Package( - name: "MyPackage", - products: [...], - targets: [ - .target( - "YouAppModule", - dependencies: [ - .product(name: "AnthropicSwiftSDK-Bedrock", package: "AnthropicSwiftSDK") - ] - ) - ], - dependencies: [ - .package(url: "https://github.com/fumito-ito/AnthropicSwiftSDK.git", .upToNextMajor(from: "0.5.0")) - ] -) -``` - -To create an `AnthropicBedrockClient` from a `BedrockRuntimeClient` with a `Model` to access Claude on Bedrock. -The API usage is the same as the normal AnthropicClient. - -```swift -let client = try BedrockRuntimeClient(region: "us-west-2") -let anthropic = client.useAnthropic() - -let response = try await anthropic.messages.createMessage(Message(role: .user, content: [.text("This is test text")]), maxTokens: 1024) -for content in response.content { - switch content { - case .text(let text): - print(text) - case .image(let imageContent): - // handle base64 encoded image content - } -} -``` - -Of course, `Streaming Message API` works in the same way. - -```swift -let client = try BedrockRuntimeClient(region: "us-west-2") -let anthropic = client.useAnthropic() - -let stream = try await anthropic.messages.streamMessage([Message(role: .user, content: [.text("This is test text")])], maxTokens: 1024) -for try await chunk in stream { - switch chunk.type { - case .messageStart: - // handle message start object with casting chunk into `StreamingMessageStartResponse` - } -} -``` - -## Google Vertex AI - -This library provides support for the [Anthropic Vertex AI](https://cloud.google.com/blog/products/ai-machine-learning/announcing-anthropics-claude-3-models-in-google-cloud-vertex-ai?hl=en) through a separate package. - -```swift -let package = Package( - name: "MyPackage", - products: [...], - targets: [ - .target( - "YouAppModule", - dependencies: [ - .product(name: "AnthropicSwiftSDK-VertexAI", package: "AnthropicSwiftSDK") - ] - ) - ], - dependencies: [ - .package(url: "https://github.com/fumito-ito/AnthropicSwiftSDK.git", .upToNextMajor(from: "0.5.0")) - ] -) -``` - -To create an `AnthropicVertexAIClient` with a `Model` to access Claude on VertexAI. -The API usage is the same as the normal AnthropicClient. - -```swift -let anthropic = AnthropicVertexAIClient(projectId: "your-project-id", accessToken: "access-token-for-vertexai", region = .usCentral1) - -let response = try await anthropic.messages.createMessage(Message(role: .user, content: [.text("This is test text")]), maxTokens: 1024) -for content in response.content { - switch content { - case .text(let text): - print(text) - case .image(let imageContent): - // handle base64 encoded image content - } -} -``` - -Of course, `Streaming Message API` works in the same way. - -```swift -let anthropic = AnthropicVertexAIClient(projectId: "your-project-id", accessToken: "access-token-for-vertexai", region = .usCentral1) - -let stream = try await anthropic.messages.streamMessage([Message(role: .user, content: [.text("This is test text")])], maxTokens: 1024) -for try await chunk in stream { - switch chunk.type { - case .messageStart: - // handle message start object with casting chunk into `StreamingMessageStartResponse` - } -} -``` +- [Amazon Web Services Bedrock](https://github.com/fumito-ito/AnthropicSwiftSDK-Bedrock) +- VertexAI + - T.B.D. ## Contributing