From e4e2dacc3aa9e2e542608d9780977e8873628d93 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 19 Nov 2024 18:29:25 +0000 Subject: [PATCH] feat: Updated OpenAPI spec --- .../Jina.ClassificationClient.Classify.g.cs | 1 + .../Jina.ClassificationClient.Train.g.cs | 1 + .../Jina.IClassificationClient.Classify.g.cs | 1 + .../Jina.IClassificationClient.Train.g.cs | 1 + .../Jina.Models.ClassificationAPIInput.g.cs | 2 ++ .../Jina.Models.ImageEmbeddingInput.g.cs | 15 +++++++++++++- .../Jina.Models.MixedEmbeddingInput.g.cs | 15 +++++++++++++- .../Jina.Models.TextEmbeddingInput.g.cs | 2 ++ .../Jina.Models.TrainingAPIInput.g.cs | 2 ++ .../Jina/Generated/JsonSerializerContext.g.cs | 1 + src/libs/Jina/openapi.yaml | 20 +++++++++++++------ 11 files changed, 53 insertions(+), 8 deletions(-) diff --git a/src/libs/Jina/Generated/Jina.ClassificationClient.Classify.g.cs b/src/libs/Jina/Generated/Jina.ClassificationClient.Classify.g.cs index ad6d1bf..e4bb48c 100644 --- a/src/libs/Jina/Generated/Jina.ClassificationClient.Classify.g.cs +++ b/src/libs/Jina/Generated/Jina.ClassificationClient.Classify.g.cs @@ -190,6 +190,7 @@ partial void ProcessClassifyResponseContent( /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/Jina.ClassificationClient.Train.g.cs b/src/libs/Jina/Generated/Jina.ClassificationClient.Train.g.cs index aa85ca4..3bc2d85 100644 --- a/src/libs/Jina/Generated/Jina.ClassificationClient.Train.g.cs +++ b/src/libs/Jina/Generated/Jina.ClassificationClient.Train.g.cs @@ -190,6 +190,7 @@ partial void ProcessTrainResponseContent( /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/Jina.IClassificationClient.Classify.g.cs b/src/libs/Jina/Generated/Jina.IClassificationClient.Classify.g.cs index f588f51..49bec81 100644 --- a/src/libs/Jina/Generated/Jina.IClassificationClient.Classify.g.cs +++ b/src/libs/Jina/Generated/Jina.IClassificationClient.Classify.g.cs @@ -23,6 +23,7 @@ public partial interface IClassificationClient /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/Jina.IClassificationClient.Train.g.cs b/src/libs/Jina/Generated/Jina.IClassificationClient.Train.g.cs index e5e22fc..4612e4e 100644 --- a/src/libs/Jina/Generated/Jina.IClassificationClient.Train.g.cs +++ b/src/libs/Jina/Generated/Jina.IClassificationClient.Train.g.cs @@ -23,6 +23,7 @@ public partial interface IClassificationClient /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/Jina.Models.ClassificationAPIInput.g.cs b/src/libs/Jina/Generated/Jina.Models.ClassificationAPIInput.g.cs index 5aa5ff1..d133933 100644 --- a/src/libs/Jina/Generated/Jina.Models.ClassificationAPIInput.g.cs +++ b/src/libs/Jina/Generated/Jina.Models.ClassificationAPIInput.g.cs @@ -14,6 +14,7 @@ public sealed partial class ClassificationAPIInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
@@ -59,6 +60,7 @@ public sealed partial class ClassificationAPIInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/Jina.Models.ImageEmbeddingInput.g.cs b/src/libs/Jina/Generated/Jina.Models.ImageEmbeddingInput.g.cs index 300cfe1..92cb9a2 100644 --- a/src/libs/Jina/Generated/Jina.Models.ImageEmbeddingInput.g.cs +++ b/src/libs/Jina/Generated/Jina.Models.ImageEmbeddingInput.g.cs @@ -14,6 +14,7 @@ public sealed partial class ImageEmbeddingInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// For more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204). /// [global::System.Text.Json.Serialization.JsonPropertyName("model")] @@ -41,6 +42,12 @@ public sealed partial class ImageEmbeddingInput [global::System.Text.Json.Serialization.JsonPropertyName("normalized")] public bool? Normalized { get; set; } + /// + /// Used to specify output embedding size. If set, output embeddings will be truncated to the size specified. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("dimensions")] + public int? Dimensions { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -54,6 +61,7 @@ public sealed partial class ImageEmbeddingInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// For more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204). /// /// @@ -65,17 +73,22 @@ public sealed partial class ImageEmbeddingInput /// /// Flag to determine if the embeddings should be normalized to have a unit L2 norm /// + /// + /// Used to specify output embedding size. If set, output embeddings will be truncated to the size specified. + /// [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] public ImageEmbeddingInput( string model, global::Jina.AnyOf> input, global::Jina.AnyOf>? embeddingType, - bool? normalized) + bool? normalized, + int? dimensions) { this.Model = model ?? throw new global::System.ArgumentNullException(nameof(model)); this.Input = input; this.EmbeddingType = embeddingType; this.Normalized = normalized; + this.Dimensions = dimensions; } /// diff --git a/src/libs/Jina/Generated/Jina.Models.MixedEmbeddingInput.g.cs b/src/libs/Jina/Generated/Jina.Models.MixedEmbeddingInput.g.cs index d6ca7d2..aabd25d 100644 --- a/src/libs/Jina/Generated/Jina.Models.MixedEmbeddingInput.g.cs +++ b/src/libs/Jina/Generated/Jina.Models.MixedEmbeddingInput.g.cs @@ -14,6 +14,7 @@ public sealed partial class MixedEmbeddingInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// For more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204). ///
[global::System.Text.Json.Serialization.JsonPropertyName("model")] @@ -40,6 +41,12 @@ public sealed partial class MixedEmbeddingInput [global::System.Text.Json.Serialization.JsonPropertyName("normalized")] public bool? Normalized { get; set; } + /// + /// Used to specify output embedding size. If set, output embeddings will be truncated to the size specified. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("dimensions")] + public int? Dimensions { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -53,6 +60,7 @@ public sealed partial class MixedEmbeddingInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// For more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204). /// /// @@ -64,17 +72,22 @@ public sealed partial class MixedEmbeddingInput /// /// Flag to determine if the embeddings should be normalized to have a unit L2 norm /// + /// + /// Used to specify output embedding size. If set, output embeddings will be truncated to the size specified. + /// [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] public MixedEmbeddingInput( string model, global::System.Collections.Generic.IList> input, global::Jina.AnyOf>? embeddingType, - bool? normalized) + bool? normalized, + int? dimensions) { this.Model = model ?? throw new global::System.ArgumentNullException(nameof(model)); this.Input = input ?? throw new global::System.ArgumentNullException(nameof(input)); this.EmbeddingType = embeddingType; this.Normalized = normalized; + this.Dimensions = dimensions; } /// diff --git a/src/libs/Jina/Generated/Jina.Models.TextEmbeddingInput.g.cs b/src/libs/Jina/Generated/Jina.Models.TextEmbeddingInput.g.cs index 1bb7f98..15ff128 100644 --- a/src/libs/Jina/Generated/Jina.Models.TextEmbeddingInput.g.cs +++ b/src/libs/Jina/Generated/Jina.Models.TextEmbeddingInput.g.cs @@ -14,6 +14,7 @@ public sealed partial class TextEmbeddingInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
@@ -85,6 +86,7 @@ public sealed partial class TextEmbeddingInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/Jina.Models.TrainingAPIInput.g.cs b/src/libs/Jina/Generated/Jina.Models.TrainingAPIInput.g.cs index 555fe72..e304902 100644 --- a/src/libs/Jina/Generated/Jina.Models.TrainingAPIInput.g.cs +++ b/src/libs/Jina/Generated/Jina.Models.TrainingAPIInput.g.cs @@ -14,6 +14,7 @@ public sealed partial class TrainingAPIInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
@@ -68,6 +69,7 @@ public sealed partial class TrainingAPIInput /// The identifier of the model.
/// Available models and corresponding param size and dimension:
/// - `jina-clip-v1`, 223M, 768
+ /// - `jina-clip-v2`, 865M, 1024
/// - `jina-embeddings-v2-base-en`, 137M, 768
/// - `jina-embeddings-v2-base-es`, 161M, 768
/// - `jina-embeddings-v2-base-de`, 161M, 768
diff --git a/src/libs/Jina/Generated/JsonSerializerContext.g.cs b/src/libs/Jina/Generated/JsonSerializerContext.g.cs index f56fe56..2946341 100644 --- a/src/libs/Jina/Generated/JsonSerializerContext.g.cs +++ b/src/libs/Jina/Generated/JsonSerializerContext.g.cs @@ -56,6 +56,7 @@ namespace Jina typeof(global::Jina.JsonConverters.AnyOfJsonConverter>), typeof(global::Jina.JsonConverters.AnyOfJsonConverter>, global::Jina.TextExampleDoc, global::Jina.ImageExampleDoc>), typeof(global::Jina.JsonConverters.AnyOfJsonConverter), + typeof(global::Jina.JsonConverters.UnixTimestampJsonConverter), })] [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::Jina.JsonSerializerContextTypes))] diff --git a/src/libs/Jina/openapi.yaml b/src/libs/Jina/openapi.yaml index ffb98c1..9e720da 100644 --- a/src/libs/Jina/openapi.yaml +++ b/src/libs/Jina/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.1 info: title: The Jina Embedding Serving API description: This is the UniversalAPI to access all the Jina embedding models - version: 0.1.89 + version: 0.1.92 servers: - url: https://api.jina.ai/ paths: @@ -411,7 +411,7 @@ components: model: title: Model type: string - description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-embeddings-v2-base-en`,\t137M,\t768\n- `jina-embeddings-v2-base-es`,\t161M,\t768\n- `jina-embeddings-v2-base-de`,\t161M,\t768\n- `jina-embeddings-v2-base-zh`,\t161M,\t768\n- `jina-embeddings-v2-base-code`,\t137M,\t768\n- `jina-embeddings-v3`,\t570M,\t1024\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2307.11224).\n" + description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-clip-v2`,\t865M,\t1024\n- `jina-embeddings-v2-base-en`,\t137M,\t768\n- `jina-embeddings-v2-base-es`,\t161M,\t768\n- `jina-embeddings-v2-base-de`,\t161M,\t768\n- `jina-embeddings-v2-base-zh`,\t161M,\t768\n- `jina-embeddings-v2-base-code`,\t137M,\t768\n- `jina-embeddings-v3`,\t570M,\t1024\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2307.11224).\n" classifier_id: title: Classifier Id type: string @@ -583,7 +583,7 @@ components: model: title: Model type: string - description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204).\n" + description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-clip-v2`,\t865M,\t1024\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204).\n" input: title: Input anyOf: @@ -614,6 +614,10 @@ components: title: Normalized type: boolean description: Flag to determine if the embeddings should be normalized to have a unit L2 norm + dimensions: + title: Dimensions + type: integer + description: 'Used to specify output embedding size. If set, output embeddings will be truncated to the size specified.' additionalProperties: false description: The input to the API for text embedding. OpenAI compatible example: @@ -668,7 +672,7 @@ components: model: title: Model type: string - description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204).\n" + description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-clip-v2`,\t865M,\t1024\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2405.20204).\n" input: title: Input type: array @@ -700,6 +704,10 @@ components: title: Normalized type: boolean description: Flag to determine if the embeddings should be normalized to have a unit L2 norm + dimensions: + title: Dimensions + type: integer + description: 'Used to specify output embedding size. If set, output embeddings will be truncated to the size specified.' additionalProperties: false description: The input to the API for text embedding. OpenAI compatible example: @@ -927,7 +935,7 @@ components: model: title: Model type: string - description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-embeddings-v2-base-en`,\t137M,\t768\n- `jina-embeddings-v2-base-es`,\t161M,\t768\n- `jina-embeddings-v2-base-de`,\t161M,\t768\n- `jina-embeddings-v2-base-zh`,\t161M,\t768\n- `jina-embeddings-v2-base-code`,\t137M,\t768\n- `jina-embeddings-v3`,\t570M,\t1024\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2307.11224).\n" + description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-clip-v2`,\t865M,\t1024\n- `jina-embeddings-v2-base-en`,\t137M,\t768\n- `jina-embeddings-v2-base-es`,\t161M,\t768\n- `jina-embeddings-v2-base-de`,\t161M,\t768\n- `jina-embeddings-v2-base-zh`,\t161M,\t768\n- `jina-embeddings-v2-base-code`,\t137M,\t768\n- `jina-embeddings-v3`,\t570M,\t1024\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2307.11224).\n" input: title: Input anyOf: @@ -1017,7 +1025,7 @@ components: model: title: Model type: string - description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-embeddings-v2-base-en`,\t137M,\t768\n- `jina-embeddings-v2-base-es`,\t161M,\t768\n- `jina-embeddings-v2-base-de`,\t161M,\t768\n- `jina-embeddings-v2-base-zh`,\t161M,\t768\n- `jina-embeddings-v2-base-code`,\t137M,\t768\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2307.11224).\n\nYou can provide only either `model` or `classifier_id`" + description: "The identifier of the model.\n\nAvailable models and corresponding param size and dimension:\n- `jina-clip-v1`,\t223M,\t768\n- `jina-clip-v2`,\t865M,\t1024\n- `jina-embeddings-v2-base-en`,\t137M,\t768\n- `jina-embeddings-v2-base-es`,\t161M,\t768\n- `jina-embeddings-v2-base-de`,\t161M,\t768\n- `jina-embeddings-v2-base-zh`,\t161M,\t768\n- `jina-embeddings-v2-base-code`,\t137M,\t768\n\nFor more information, please checkout our [technical blog](https://arxiv.org/abs/2307.11224).\n\nYou can provide only either `model` or `classifier_id`" classifier_id: title: Classifier Id type: string