Skip to content

Commit

Permalink
Merge pull request #66 from utilityai/n_threads
Browse files Browse the repository at this point in the history
exposed n_threads
  • Loading branch information
MarcusDunn authored Feb 3, 2024
2 parents ee4eb35 + d6cd7c4 commit 7c95eab
Showing 1 changed file with 27 additions and 0 deletions.
27 changes: 27 additions & 0 deletions llama-cpp-2/src/context/params.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,33 @@ impl LlamaContextParams {
pub fn rope_freq_scale(&self) -> f32 {
self.context_params.rope_freq_scale
}

/// Get the number of threads.
///
/// # Examples
///
/// ```rust
/// let params = llama_cpp_2::context::params::LlamaContextParams::default();
/// assert_eq!(params.n_threads(), 4);
/// ```
pub fn n_threads(&self) -> u32 {
self.context_params.n_threads
}

/// Set the number of threads.
///
/// # Examples
///
/// ```rust
/// use llama_cpp_2::context::params::LlamaContextParams;
/// let params = LlamaContextParams::default()
/// .with_n_threads(8);
/// assert_eq!(params.n_threads(), 8);
/// ```
pub fn with_n_threads(mut self, n_threads: u32) -> Self {
self.context_params.n_threads = n_threads;
self
}
}

/// Default parameters for `LlamaContext`. (as defined in llama.cpp by `llama_context_default_params`)
Expand Down

0 comments on commit 7c95eab

Please sign in to comment.