From a2adc4ebe47a61a38a8ff2a84fcf2668cd9600e1 Mon Sep 17 00:00:00 2001 From: marcus Date: Tue, 27 Feb 2024 12:40:31 -0800 Subject: [PATCH] deprecated the sampler --- llama-cpp-2/src/context/sample.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/llama-cpp-2/src/context/sample.rs b/llama-cpp-2/src/context/sample.rs index 08d3b59e..f9ef4c78 100644 --- a/llama-cpp-2/src/context/sample.rs +++ b/llama-cpp-2/src/context/sample.rs @@ -8,6 +8,7 @@ use llama_cpp_sys_2::llama_context; /// struct to hold params for sampling #[derive(Debug)] +#[deprecated(since = "0.1.32", note = "this does not scale well with many params and does not allow for changing of orders.")] pub struct Sampler<'grammar> { token_data_array: LlamaTokenDataArray, grammar: Option<&'grammar mut LlamaGrammar>, @@ -15,6 +16,7 @@ pub struct Sampler<'grammar> { } impl<'grammar> Sampler<'grammar> { + #[deprecated(since = "0.1.32", note = "this does not scale well with many params and does not allow for changing of orders.")] fn sample(self, llama_context: &mut LlamaContext) -> LlamaToken { match self { Sampler { @@ -58,6 +60,7 @@ impl<'grammar> Sampler<'grammar> { /// Create a new sampler. #[must_use] + #[deprecated(since = "0.1.32", note = "this does not scale well with many params and does not allow for changing of orders.")] pub fn new(llama_token_data_array: LlamaTokenDataArray) -> Self { Self { token_data_array: llama_token_data_array, @@ -68,6 +71,7 @@ impl<'grammar> Sampler<'grammar> { /// Set the grammar for sampling. #[must_use] + #[deprecated(since = "0.1.32", note = "this does not scale well with many params and does not allow for changing of orders.")] pub fn with_grammar(mut self, grammar: &'grammar mut LlamaGrammar) -> Self { self.grammar = Some(grammar); self @@ -87,6 +91,7 @@ impl<'grammar> Sampler<'grammar> { /// .with_temperature(0.5); /// ``` #[must_use] + #[deprecated(since = "0.1.32", note = "this does not scale well with many params and does not allow for changing of orders.")] pub fn with_temperature(mut self, temperature: f32) -> Self { if temperature == 0.0 { return self; @@ -102,6 +107,7 @@ impl LlamaContext<'_> { /// # Panics /// /// - sampler contains no tokens + #[deprecated(since = "0.1.32", note = "this does not scale well with many params and does not allow for changing of orders.")] pub fn sample(&mut self, sampler: Sampler) -> LlamaToken { sampler.sample(self) }