diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index c7649fbab..850e24a99 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -340,9 +340,9 @@ def get_chat_prompt_template(self) -> PromptTemplate: + CHAT_DEFAULT_TEMPLATE, ) - def get_inline_completion_prompt_template(self) -> PromptTemplate: + def get_completion_prompt_template(self) -> PromptTemplate: """ - Produce a prompt template optimised for code or text completion. + Produce a prompt template optimised for inline code or text completion. The template should take variables: prefix, suffix, language, filename. """ if self.is_chat_provider: diff --git a/packages/jupyter-ai/jupyter_ai/completions/handlers/default.py b/packages/jupyter-ai/jupyter_ai/completions/handlers/default.py index 847e1a63a..552d23791 100644 --- a/packages/jupyter-ai/jupyter_ai/completions/handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/completions/handlers/default.py @@ -31,7 +31,7 @@ def create_llm_chain( model_parameters = self.get_model_parameters(provider, provider_params) llm = provider(**provider_params, **model_parameters) - prompt_template = llm.get_inline_completion_prompt_template() + prompt_template = llm.get_completion_prompt_template() self.llm = llm self.llm_chain = prompt_template | llm | StrOutputParser()