Skip to content

Commit

Permalink
feat: Sequential chain with QnA (#52)
Browse files Browse the repository at this point in the history
* VectorStoreIndexCreator, error fixes, tweeking, Documents QnA test

* changing Web and Pdf sources to use propper document

* bugfixes and massive simplification of DocumentQnA test

* bugfix

* Sequential chain with QnA
  • Loading branch information
TesAnti authored Nov 5, 2023
1 parent fcb2cf1 commit ea43091
Show file tree
Hide file tree
Showing 3 changed files with 137 additions and 49 deletions.
6 changes: 3 additions & 3 deletions src/libs/LangChain.Core/Indexes/VectorStoreIndexWrapper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,19 @@ namespace LangChain.Indexes;

public class VectorStoreIndexWrapper
{
private readonly VectorStore _vectorStore;
public VectorStore Store { get; }

public VectorStoreIndexWrapper(VectorStore vectorStore)
{
_vectorStore = vectorStore;
Store = vectorStore;
}

public Task<string?> QueryAsync(string question, BaseCombineDocumentsChain llm, string inputKey= "question", string outputKey= "output_text")
{
var chain = new RetrievalQaChain(
new RetrievalQaChainInput(
llm,
_vectorStore.AsRetreiver())
Store.AsRetreiver())
{
InputKey= inputKey,
OutputKey= outputKey,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
using LangChain.Abstractions.Embeddings.Base;
using LangChain.Databases.InMemory;
using LangChain.Docstore;
using LangChain.Indexes;
using LangChain.Prompts;
using LangChain.TextSplitters;

namespace LangChain.Providers.LLamaSharp.IntegrationTests;

public partial class LLamaSharpTests
{

IEmbeddings CreateEmbeddings()
{
var embeddings = new LLamaSharpEmbeddings(new LLamaSharpConfiguration
{
PathToModelFile = ModelPath,
Temperature = 0
});
return embeddings;

}

IChatModel CreateInstructionModel()
{
var model = new LLamaSharpModelInstruction(new LLamaSharpConfiguration
{
PathToModelFile = ModelPath,
Temperature = 0
});
return model;

}
IChatModel CreateChatModel()
{
var model = new LLamaSharpModelChat(new LLamaSharpConfiguration
{
PathToModelFile = ModelPath,
Temperature = 0
});
return model;

}
VectorStoreIndexWrapper CreateVectorStoreIndex(IEmbeddings embeddings, string[] texts)
{
InMemoryVectorStore vectorStore = new InMemoryVectorStore(embeddings);
var textSplitter = new CharacterTextSplitter();
VectorStoreIndexCreator indexCreator = new VectorStoreIndexCreator(vectorStore, textSplitter);
var index = indexCreator.FromDocumentsAsync(texts.Select(x => new Document(x)).ToList()).Result;
return index;
}

PromptTemplate CreatePromptTemplate()
{
string prompt = "Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.\n\n{context}\n\nQuestion: {question}\nHelpful Answer:";
var template = new PromptTemplate(new PromptTemplateInput(prompt, new List<string>() { "context", "question" }));
return template;
}

}
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
using LangChain.Abstractions.Embeddings.Base;
using LangChain.Abstractions.Chains.Base;
using LangChain.Abstractions.Embeddings.Base;
using LangChain.Chains.CombineDocuments;
using LangChain.Chains.LLM;
using LangChain.Chains.RetrievalQA;
using LangChain.Chains.Sequentials;
using LangChain.Databases;
using LangChain.Databases.InMemory;
using LangChain.Docstore;
Expand All @@ -15,7 +18,7 @@
namespace LangChain.Providers.LLamaSharp.IntegrationTests;

[TestClass]
public class LLamaSharpTests
public partial class LLamaSharpTests
{
string ModelPath=>HuggingFaceModelDownloader.Instance.GetModel("TheBloke/Thespis-13B-v0.5-GGUF", "thespis-13b-v0.5.Q2_K.gguf","main").Result;
[TestMethod]
Expand Down Expand Up @@ -106,48 +109,6 @@ public void EmbeddingsTestWithInMemory()
Assert.AreEqual("My dog name is Bob", closest.PageContent);
}


#region Helpers
IEmbeddings CreateEmbeddings()
{
var embeddings = new LLamaSharpEmbeddings(new LLamaSharpConfiguration
{
PathToModelFile = ModelPath,
Temperature = 0
});
return embeddings;

}

IChatModel CreateInstructionModel()
{
var model = new LLamaSharpModelInstruction(new LLamaSharpConfiguration
{
PathToModelFile = ModelPath,
Temperature = 0
});
return model;

}

VectorStoreIndexWrapper CreateVectorStoreIndex(IEmbeddings embeddings, string[] texts)
{
InMemoryVectorStore vectorStore = new InMemoryVectorStore(embeddings);
var textSplitter = new CharacterTextSplitter();
VectorStoreIndexCreator indexCreator = new VectorStoreIndexCreator(vectorStore, textSplitter);
var index = indexCreator.FromDocumentsAsync(texts.Select(x => new Document(x)).ToList()).Result;
return index;
}

PromptTemplate CreatePromptTemplate()
{
string prompt = "Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.\n\n{context}\n\nQuestion: {question}\nHelpful Answer:";
var template = new PromptTemplate(new PromptTemplateInput(prompt, new List<string>() { "context", "question" }));
return template;
}
#endregion


[TestMethod]
#if CONTINUOUS_INTEGRATION_BUILD
[Ignore]
Expand Down Expand Up @@ -186,10 +147,77 @@ public void DocumentsQuestionAnsweringTest()
).Result;




Assert.IsTrue(answer.Contains("Bob"));
}

IChain CreateChain1(IChatModel model,IEmbeddings embeddings)
{

string[] texts = new string[]
{
"I spent entire day watching TV",
"My dog name is Bob",
"This icecream is delicious",
"It is cold in space"
};

var index = CreateVectorStoreIndex(embeddings, texts);
var template = CreatePromptTemplate();

var llmchain = new LlmChain(new LlmChainInput(model, template)
{
OutputKey = "pet_sentence",
});

var stuffDocumentsChain = new StuffDocumentsChain(new StuffDocumentsChainInput(llmchain)
{
DocumentVariableName = "context",

});

var chain = new RetrievalQaChain(
new RetrievalQaChainInput(
stuffDocumentsChain,
index.Store.AsRetreiver())
{
InputKey = "question",
OutputKey = "pet_sentence",
}
);

return chain;
}

[TestMethod]
#if CONTINUOUS_INTEGRATION_BUILD
[Ignore]
#endif
public void SequentialChainTest()
{
// setup
var embeddings = CreateEmbeddings();
var model = CreateInstructionModel();

var chain1 = CreateChain1(model, embeddings);

var prompt =
@"Human will provide you with sentence about pet. You need to answer with pet name.
Human: My dog name is Jack
Answer: Jack
Human: I think the best name for a pet is ""Jerry""
Answer: Jerry
Human: {pet_sentence}
Answer:";
var template = new PromptTemplate(new PromptTemplateInput(prompt, new List<string>() { "pet_sentence" }));
var chain2 = new LlmChain(new LlmChainInput(model, template));

var sequence = new SequentialChain(
new SequentialChainInput(
new [] { chain1, chain2 },
inputVariables:new[]{"question"}));

var answer=sequence.Run("What is the good name for a pet?").Result;

Assert.AreEqual("Bob", answer);
}
}

0 comments on commit ea43091

Please sign in to comment.