+ * interface Assistant { + * + * {@code @SystemMessage}("You are a helpful assistant") + * String chat(String userMessage); + * } + *+ * + * The system message can contain template variables, + * which will be resolved with values from method parameters annotated with @{@link V}. + *
+ * interface Assistant { + * + * {@code @SystemMessage}("You are a {{characteristic}} assistant") + * String chat(@UserMessage String userMessage, @V("characteristic") String characteristic); + * } + *+ * + * @see UserMessage + */ +@Documented +@Retention(RUNTIME) +@Target({ TYPE, METHOD }) +public @interface SystemMessage { + + /** + * Prompt template can be defined in one line or multiple lines. + * If the template is defined in multiple lines, the lines will be joined with a delimiter defined below. + */ + String[] value() default ""; + + String delimiter() default "\n"; + + /** + * The resource from which to read the prompt template. + * If no resource is specified, the prompt template is taken from {@link #value()}. + * If the resource is not found, an {@link IllegalConfigurationException} is thrown. + *
+ * The resource will be read by calling {@link Class#getResourceAsStream(String)} + * on the AI Service class (interface). + */ + String fromResource() default ""; +} diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/TokenStream.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/TokenStream.java new file mode 100644 index 0000000..76e9713 --- /dev/null +++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/TokenStream.java @@ -0,0 +1,59 @@ +package org.eclipse.microprofile.ai.llm; + +import java.util.function.Consumer; + +/** + * Represents a token stream from language model to which you can subscribe and receive updates + * when a new token is available, when language model finishes streaming, or when an error occurs during streaming. + * It is intended to be used as a return type in AI Service. + */ +public interface TokenStream { + + /** + * The provided consumer will be invoked when/if contents have been retrieved using {@link RetrievalAugmentor}. + *
+ * The invocation happens before any call is made to the language model.
+ *
+ * @param contentHandler lambda that consumes all retrieved contents
+ * @return token stream instance used to configure or start stream processing
+ */
+ // TokenStream onRetrieved(Consumer
+ * Will send a request to LLM and start response streaming.
+ */
+ void start();
+}
diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/TokenStreamAdapter.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/TokenStreamAdapter.java
new file mode 100644
index 0000000..fd7a70b
--- /dev/null
+++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/TokenStreamAdapter.java
@@ -0,0 +1,14 @@
+package org.eclipse.microprofile.ai.llm;
+
+import java.lang.reflect.Type;
+
+/**
+ * @author Buhake Sindi
+ * @since 11 October 2024
+ */
+public interface TokenStreamAdapter {
+
+ boolean canAdaptTokenStreamTo(Type type);
+
+ Object adapt(TokenStream tokenStream);
+}
diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/Tool.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/Tool.java
new file mode 100644
index 0000000..8655b33
--- /dev/null
+++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/Tool.java
@@ -0,0 +1,36 @@
+package org.eclipse.microprofile.ai.llm;
+
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * Java methods annotated with {@code @Tool} are considered tools/functions that language model can execute/call.
+ * Tool/function calling LLM capability (e.g., see OpenAI
+ * function calling documentation)
+ * is used under the hood.
+ * If LLM decides to call the tool, the arguments are automatically parsed and injected as method arguments.
+ */
+@Documented
+@Retention(RUNTIME)
+@Target(METHOD)
+public @interface Tool {
+
+ /**
+ * Name of the tool. If not provided, method name will be used.
+ *
+ * @return name of the tool.
+ */
+ String name() default "";
+
+ /**
+ * Description of the tool.
+ * It should be clear and descriptive to allow language model to understand the tool's purpose and its intended use.
+ *
+ * @return description of the tool.
+ */
+ String[] value() default "";
+}
diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/ToolMemoryId.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/ToolMemoryId.java
new file mode 100644
index 0000000..af46700
--- /dev/null
+++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/ToolMemoryId.java
@@ -0,0 +1,19 @@
+package org.eclipse.microprofile.ai.llm;
+
+import static java.lang.annotation.ElementType.PARAMETER;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * If a {@link Tool} method parameter is annotated with this annotation,
+ * memory id (parameter annotated with @MemoryId in AI Service) will be injected automatically.
+ */
+@Documented
+@Retention(RUNTIME)
+@Target(PARAMETER)
+public @interface ToolMemoryId {
+
+}
diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/UserMessage.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/UserMessage.java
new file mode 100644
index 0000000..172d081
--- /dev/null
+++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/UserMessage.java
@@ -0,0 +1,63 @@
+package org.eclipse.microprofile.ai.llm;
+
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.ElementType.PARAMETER;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * Specifies either a complete user message or a user message template to be used each time an AI service is invoked.
+ * The user message can contain template variables,
+ * which will be resolved with values from method parameters annotated with @{@link V}.
+ *
+ * The resource will be read by calling {@link Class#getResourceAsStream(String)}
+ * on the AI Service class (interface).
+ */
+ String fromResource() default "";
+}
diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/UserName.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/UserName.java
new file mode 100644
index 0000000..4e93838
--- /dev/null
+++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/UserName.java
@@ -0,0 +1,18 @@
+package org.eclipse.microprofile.ai.llm;
+
+import static java.lang.annotation.ElementType.PARAMETER;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * The value of a method parameter annotated with @UserName will be injected into the field 'name' of a UserMessage.
+ */
+@Documented
+@Retention(RUNTIME)
+@Target(PARAMETER)
+public @interface UserName {
+
+}
diff --git a/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/V.java b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/V.java
new file mode 100644
index 0000000..4110a4a
--- /dev/null
+++ b/mp-ai-api/src/main/java/org/eclipse/microprofile/ai/llm/V.java
@@ -0,0 +1,42 @@
+package org.eclipse.microprofile.ai.llm;
+
+import static java.lang.annotation.ElementType.PARAMETER;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * When a parameter of a method in an AI Service is annotated with {@code @V},
+ * it becomes a prompt template variable. Its value will be injected into prompt templates defined
+ * via @{@link UserMessage}, @{@link SystemMessage}.
+ *
+ * Example:
+ *
+ *
+ * Example:
+ *
+ *
+ *
+ * @see UserMessage
+ * @see SystemMessage
+ */
+@Documented
+@Retention(RUNTIME)
+@Target(PARAMETER)
+public @interface V {
+
+ /**
+ * Name of a variable (placeholder) in a prompt template.
+ */
+ String value();
+}
diff --git a/pom.xml b/pom.xml
index 881e44b..1b24b2d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,20 +1,20 @@
+~ Copyright 2017 Red Hat, Inc.
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~ http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
> contentHandler);
+
+ /**
+ * The provided consumer will be invoked every time a new token from a language model is available.
+ *
+ * @param tokenHandler lambda that consumes tokens of the response
+ * @return token stream instance used to configure or start stream processing
+ */
+ TokenStream onNext(Consumer
+ * An example:
+ *
+ *
+ * interface Assistant {
+ *
+ * {@code @UserMessage}("Say hello to {{name}}")
+ * String greet(@V("name") String name);
+ * }
+ *
+ *
+ * {@code @UserMessage} can also be used with method parameters:
+ *
+ *
+ * interface Assistant {
+ *
+ * {@code @SystemMessage}("You are a {{characteristic}} assistant")
+ * String chat(@UserMessage String userMessage, @V("characteristic") String characteristic);
+ * }
+ *
+ *
+ * In this case {@code String userMessage} can contain unresolved template variables (e.g. "{{characteristic}}"),
+ * which will be resolved using the values of method parameters annotated with @{@link V}.
+ *
+ * @see SystemMessage
+ */
+@Documented
+@Retention(RUNTIME)
+@Target({ METHOD, PARAMETER })
+public @interface UserMessage {
+
+ /**
+ * Prompt template can be defined in one line or multiple lines.
+ * If the template is defined in multiple lines, the lines will be joined with a delimiter defined below.
+ */
+ String[] value() default "";
+
+ String delimiter() default "\n";
+
+ /**
+ * The resource from which to read the prompt template.
+ * If no resource is specified, the prompt template is taken from {@link #value()}.
+ * If the resource is not found, an {@link IllegalConfigurationException} is thrown.
+ *
+ * {@code @UserMessage("Hello, my name is {{name}}. I am {{age}} years old.")}
+ * String chat(@V("name") String name, @V("age") int age);
+ *
+ *
+ * {@code @UserMessage("Hello, my name is {{name}}. I am {{age}} years old.")}
+ * String chat(@V String name, @V int age);
+ *
+ *