Skip to content

Commit

Permalink
GO-4318 Try to fix potential leaks
Browse files Browse the repository at this point in the history
  • Loading branch information
fat-fellow committed Oct 22, 2024
1 parent 8e1b879 commit 6d6df20
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 30 deletions.
1 change: 0 additions & 1 deletion rust/src/c_util/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
mod util;

pub use self::util::set_error;
pub use self::util::assert_str;
pub use self::util::assert_string;
pub use self::util::assert_pointer;
pub use self::util::convert_document_as_json;
Expand Down
29 changes: 11 additions & 18 deletions rust/src/c_util/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,6 @@ fn process_c_str<'a>(str_ptr: *const c_char, error_buffer: *mut *mut c_char) ->
}
}

pub fn assert_str<'a>(str_ptr: *const c_char, error_buffer: *mut *mut c_char) -> Option<&'a str> {
match process_c_str(str_ptr, error_buffer) {
Ok(valid_str) => Some(valid_str),
Err(_) => None,
}
}

pub fn assert_string(str_ptr: *const c_char, error_buffer: *mut *mut c_char) -> Option<String> {
match process_c_str(str_ptr, error_buffer) {
Ok(valid_str) => Some(valid_str.to_owned()),
Expand Down Expand Up @@ -112,15 +105,15 @@ pub fn process_string_slice<'a, F>(
mut func: F,
) -> Result<(), ()>
where
F: FnMut(&'a str) -> Result<(), ()>,
F: FnMut(String) -> Result<(), ()>,
{
let slice = match assert_pointer(ptr, error_buffer) {
Some(ptr) => unsafe { slice::from_raw_parts(ptr, len) },
None => return Err(()),
};

for &item in slice {
let value = match assert_str(item, error_buffer) {
let value = match assert_string(item, error_buffer) {
Some(value) => value,
None => return Err(()),
};
Expand All @@ -133,14 +126,14 @@ where
Ok(())
}

pub fn schema_apply_for_field<'a, T, K, F: FnMut(Field, &'a str) -> Result<T, ()>>(
pub fn schema_apply_for_field<'a, T, K, F: FnMut(Field, String) -> Result<T, ()>>(
error_buffer: *mut *mut c_char,
schema: Schema,
field_name: &'a str,
field_name: String,
mut func: F,
) -> Result<T, ()>
{
match schema.get_field(field_name) {
match schema.get_field(field_name.as_str()) {
Ok(field) => func(field, field_name),
Err(err) => {
set_error(&err.to_string(), error_buffer);
Expand Down Expand Up @@ -270,7 +263,7 @@ pub fn delete_docs(
delete_ids_len: usize,
error_buffer: *mut *mut c_char,
context: &mut TantivyContext,
field_name: &str,
field_name: String,
) {
let schema = context.index.schema();

Expand All @@ -289,7 +282,7 @@ pub fn delete_docs(
};

if process_string_slice(delete_ids_ptr, error_buffer, delete_ids_len, |id_value| {
let _ = context.writer.delete_term(Term::from_field_text(field, id_value));
let _ = context.writer.delete_term(Term::from_field_text(field, id_value.as_str()));
Ok(())
}).is_err() {
rollback(error_buffer, &mut context.writer, "Failed to process string slice");
Expand Down Expand Up @@ -328,8 +321,8 @@ pub fn add_field(
error_buffer: *mut *mut c_char,
doc: &mut Document,
index: &Index,
field_name: &str,
field_value: &str,
field_name: String,
field_value: String,
) {
let schema = index.schema();
let field = match schema_apply_for_field::<Field, (), _>
Expand Down Expand Up @@ -367,14 +360,14 @@ pub fn search(
return Err(());
}

let query = match assert_str(query_ptr, error_buffer) {
let query = match assert_string(query_ptr, error_buffer) {
Some(value) => value,
None => return Err(())
};

let query_parser = QueryParser::for_index(&context.index, fields);

let query = match query_parser.parse_query(query) {
let query = match query_parser.parse_query(query.as_str()) {
Ok(query) => query,
Err(err) => {
set_error(&err.to_string(), error_buffer);
Expand Down
10 changes: 5 additions & 5 deletions rust/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::ptr;
use logcall::logcall;
use tantivy::{schema::*};

use crate::c_util::{add_and_consume_documents, add_field, assert_pointer, assert_str, assert_string, box_from, convert_document_as_json, create_context_with_schema, delete_docs, drop_any, get_doc, search, set_error, start_lib_init};
use crate::c_util::{add_and_consume_documents, add_field, assert_pointer, assert_string, box_from, convert_document_as_json, create_context_with_schema, delete_docs, drop_any, get_doc, search, set_error, start_lib_init};
use crate::tantivy_util::{add_text_field, Document, register_edge_ngram_tokenizer, register_ngram_tokenizer, register_raw_tokenizer, register_simple_tokenizer, SearchResult, TantivyContext};

mod tantivy_util;
Expand Down Expand Up @@ -158,12 +158,12 @@ pub extern "C" fn context_register_text_analyzer_simple(
None => return
};

let lang = match assert_str(lang_str_ptr, error_buffer) {
let lang = match assert_string(lang_str_ptr, error_buffer) {
Some(value) => value,
None => return
};

register_simple_tokenizer(text_limit, &context.index, tokenizer_name.as_str(), lang);
register_simple_tokenizer(text_limit, &context.index, tokenizer_name.as_str(), lang.as_str());
}

#[logcall]
Expand Down Expand Up @@ -216,7 +216,7 @@ pub extern "C" fn context_delete_documents(
None => return
};

let field_name = match assert_str(field_name_ptr, error_buffer) {
let field_name = match assert_string(field_name_ptr, error_buffer) {
Some(value) => value,
None => return
};
Expand Down Expand Up @@ -343,7 +343,7 @@ pub extern "C" fn document_add_field(
None => return
};

add_field(error_buffer, doc, &context.index, field_name.as_str(), field_value.as_str());
add_field(error_buffer, doc, &context.index, field_name, field_value);
}

#[logcall]
Expand Down
12 changes: 6 additions & 6 deletions rust/src/tantivy_util/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,16 @@ use crate::tantivy_util::{Document, extract_text_from_owned_value};

pub fn convert_document_to_json<'a>(
doc: &&mut Document,
field_to_name: HashMap<Field, &'a str>,
) -> HashMap<&'a str, serde_json::Value> {
let mut result_json: HashMap<&str, serde_json::Value> = HashMap::new();
field_to_name: HashMap<Field, String>,
) -> HashMap<String, serde_json::Value> {
let mut result_json: HashMap<String, serde_json::Value> = HashMap::new();

let _ = serde_json::to_value(doc.score).is_ok_and(
|score| result_json.insert("score", score).is_some()
|score| result_json.insert("score".to_string(), score).is_some()
);

let _ = serde_json::to_value(&doc.highlights).is_ok_and(
|highlights| result_json.insert("highlights", highlights).is_some()
|highlights| result_json.insert("highlights".to_string(), highlights).is_some()
);

let doc = &doc.tantivy_doc;
Expand All @@ -22,7 +22,7 @@ pub fn convert_document_to_json<'a>(
Some(key) => {
let _ = extract_text_from_owned_value(&field_value.value).is_some_and(
|value| serde_json::to_value(value).is_ok_and(
|value| result_json.insert(key, value).is_some())
|value| result_json.insert(key.to_string(), value).is_some())
);
}
None => {}
Expand Down

0 comments on commit 6d6df20

Please sign in to comment.