Skip to content

Commit

Permalink
GO-4318 Try to fix potential leaks
Browse files Browse the repository at this point in the history
  • Loading branch information
fat-fellow committed Oct 22, 2024
1 parent 1ed7daa commit 8e1b879
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 35 deletions.
1 change: 1 addition & 0 deletions rust/src/c_util/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ mod util;

pub use self::util::set_error;
pub use self::util::assert_str;
pub use self::util::assert_string;
pub use self::util::assert_pointer;
pub use self::util::convert_document_as_json;
pub use self::util::start_lib_init;
Expand Down
50 changes: 34 additions & 16 deletions rust/src/c_util/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use std::{fs, panic, slice};
use std::collections::HashMap;
use std::ffi::{CStr, CString};
use std::os::raw::c_char;
use std::panic::PanicInfo;
use std::path::Path;
use std::sync::Mutex;
use lazy_static::lazy_static;
Expand Down Expand Up @@ -36,23 +35,38 @@ fn write_buffer(error_buffer: *mut *mut c_char, err_str: CString) {
}
}

pub fn assert_str<'a>(str_ptr: *const c_char, error_buffer: *mut *mut c_char) -> Option<&'a str> {
let result = unsafe {
fn process_c_str<'a>(str_ptr: *const c_char, error_buffer: *mut *mut c_char) -> Result<&'a str, String> {
unsafe {
if str_ptr.is_null() {
set_error(POINTER_IS_NULL, error_buffer);
return None;
return Err(POINTER_IS_NULL.to_owned());
}
CStr::from_ptr(str_ptr)
}.to_str();
match result {
Ok(str) => Some(str),
Err(err) => {
set_error(&err.to_string(), error_buffer);
return None;
match CStr::from_ptr(str_ptr).to_str() {
Ok(valid_str) => Ok(valid_str),
Err(err) => {
let error_message = err.to_string();
set_error(&error_message, error_buffer);
Err(error_message)
}
}
}
}

pub fn assert_str<'a>(str_ptr: *const c_char, error_buffer: *mut *mut c_char) -> Option<&'a str> {
match process_c_str(str_ptr, error_buffer) {
Ok(valid_str) => Some(valid_str),
Err(_) => None,
}
}

pub fn assert_string(str_ptr: *const c_char, error_buffer: *mut *mut c_char) -> Option<String> {
match process_c_str(str_ptr, error_buffer) {
Ok(valid_str) => Some(valid_str.to_owned()),
Err(_) => None,
}
}


pub fn assert_pointer<'a, T>(ptr: *mut T, error_buffer: *mut *mut c_char) -> Option<&'a mut T> {
let result = unsafe {
if ptr.is_null() {
Expand Down Expand Up @@ -159,7 +173,7 @@ pub fn convert_document_as_json(
Ok(json!(doc_json).to_string())
}

pub fn start_lib_init(log_level: &str, clear_on_panic: bool) {
pub fn start_lib_init(log_level: String, clear_on_panic: bool) {
let old_hook = panic::take_hook();
if clear_on_panic {
panic::set_hook(Box::new(move |panic_info| {
Expand All @@ -185,13 +199,17 @@ pub fn start_lib_init(log_level: &str, clear_on_panic: bool) {
).try_init();
}

pub fn create_context_with_schema(error_buffer: *mut *mut c_char, schema: Schema, path: &str) -> Result<*mut TantivyContext, ()> {
pub fn create_context_with_schema(
error_buffer: *mut *mut c_char,
schema: Schema,
path: String,
) -> Result<*mut TantivyContext, ()> {
match FTS_PATH.lock() {
Ok(mut fts_path) => *fts_path = path.to_string(),
Ok(mut fts_path) => *fts_path = path.clone(),
Err(e) => debug!("Failed to set path: {}", e),
};

match fs::create_dir_all(Path::new(path)) {
match fs::create_dir_all(Path::new(path.as_str())) {
Err(e) => {
debug!("Failed to create directories: {}", e);
set_error(&e.to_string(), error_buffer);
Expand All @@ -200,7 +218,7 @@ pub fn create_context_with_schema(error_buffer: *mut *mut c_char, schema: Schema
_ => {}
}

let dir = match MmapDirectory::open(path.to_owned()) {
let dir = match MmapDirectory::open(path) {
Ok(dir) => dir,
Err(err) => {
set_error(&err.to_string(), error_buffer);
Expand Down
34 changes: 17 additions & 17 deletions rust/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::ptr;
use logcall::logcall;
use tantivy::{schema::*};

use crate::c_util::{add_and_consume_documents, add_field, assert_pointer, assert_str, box_from, convert_document_as_json, create_context_with_schema, delete_docs, drop_any, get_doc, search, set_error, start_lib_init};
use crate::c_util::{add_and_consume_documents, add_field, assert_pointer, assert_str, assert_string, box_from, convert_document_as_json, create_context_with_schema, delete_docs, drop_any, get_doc, search, set_error, start_lib_init};
use crate::tantivy_util::{add_text_field, Document, register_edge_ngram_tokenizer, register_ngram_tokenizer, register_raw_tokenizer, register_simple_tokenizer, SearchResult, TantivyContext};

mod tantivy_util;
Expand Down Expand Up @@ -33,12 +33,12 @@ pub extern "C" fn schema_builder_add_text_field(
None => return
};

let tokenizer_name = match assert_str(tokenizer_name_ptr, error_buffer) {
let tokenizer_name = match assert_string(tokenizer_name_ptr, error_buffer) {
Some(value) => value,
None => return
};

let field_name = match assert_str(field_name_ptr, error_buffer) {
let field_name = match assert_string(field_name_ptr, error_buffer) {
Some(value) => value,
None => return
};
Expand All @@ -50,7 +50,7 @@ pub extern "C" fn schema_builder_add_text_field(
_ => return set_error("index_record_option_const is wrong", error_buffer)
};

add_text_field(stored, is_text, is_fast, builder, tokenizer_name, field_name, index_record_option);
add_text_field(stored, is_text, is_fast, builder, tokenizer_name.as_str(), field_name.as_str(), index_record_option);
}

#[logcall]
Expand Down Expand Up @@ -79,7 +79,7 @@ pub extern "C" fn context_create_with_schema(
None => return ptr::null_mut(),
};

let path = match assert_str(path_ptr, error_buffer) {
let path = match assert_string(path_ptr, error_buffer) {
Some(value) => value,
None => return ptr::null_mut(),
};
Expand All @@ -105,12 +105,12 @@ pub extern "C" fn context_register_text_analyzer_ngram(
None => return
};

let tokenizer_name = match assert_str(tokenizer_name_ptr, error_buffer) {
let tokenizer_name = match assert_string(tokenizer_name_ptr, error_buffer) {
Some(value) => value,
None => return
};

match register_ngram_tokenizer(min_gram, max_gram, prefix_only, &context.index, tokenizer_name) {
match register_ngram_tokenizer(min_gram, max_gram, prefix_only, &context.index, tokenizer_name.as_str()) {
Err(err) => return set_error(&err.to_string(), error_buffer),
_ => return
};
Expand All @@ -131,12 +131,12 @@ pub extern "C" fn context_register_text_analyzer_edge_ngram(
None => return
};

let tokenizer_name = match assert_str(tokenizer_name_ptr, error_buffer) {
let tokenizer_name = match assert_string(tokenizer_name_ptr, error_buffer) {
Some(value) => value,
None => return
};

register_edge_ngram_tokenizer(min_gram, max_gram, limit, &context.index, tokenizer_name);
register_edge_ngram_tokenizer(min_gram, max_gram, limit, &context.index, tokenizer_name.as_str());
}

#[logcall]
Expand All @@ -153,7 +153,7 @@ pub extern "C" fn context_register_text_analyzer_simple(
None => return
};

let tokenizer_name = match assert_str(tokenizer_name_ptr, error_buffer) {
let tokenizer_name = match assert_string(tokenizer_name_ptr, error_buffer) {
Some(value) => value,
None => return
};
Expand All @@ -163,7 +163,7 @@ pub extern "C" fn context_register_text_analyzer_simple(
None => return
};

register_simple_tokenizer(text_limit, &context.index, tokenizer_name, lang);
register_simple_tokenizer(text_limit, &context.index, tokenizer_name.as_str(), lang);
}

#[logcall]
Expand All @@ -178,12 +178,12 @@ pub extern "C" fn context_register_text_analyzer_raw(
None => return
};

let tokenizer_name = match assert_str(tokenizer_name_ptr, error_buffer) {
let tokenizer_name = match assert_string(tokenizer_name_ptr, error_buffer) {
Some(value) => value,
None => return
};

register_raw_tokenizer(&context.index, tokenizer_name);
register_raw_tokenizer(&context.index, tokenizer_name.as_str());
}

#[logcall]
Expand Down Expand Up @@ -333,17 +333,17 @@ pub extern "C" fn document_add_field(
None => return
};

let field_name = match assert_str(field_name_ptr, error_buffer) {
let field_name = match assert_string(field_name_ptr, error_buffer) {
Some(value) => value,
None => return
};

let field_value = match assert_str(field_value_ptr, error_buffer) {
let field_value = match assert_string(field_value_ptr, error_buffer) {
Some(value) => value,
None => return
};

add_field(error_buffer, doc, &context.index, field_name, field_value);
add_field(error_buffer, doc, &context.index, field_name.as_str(), field_value.as_str());
}

#[logcall]
Expand Down Expand Up @@ -403,7 +403,7 @@ pub unsafe extern "C" fn init_lib(
error_buffer: *mut *mut c_char,
clear_on_panic: bool
) {
let log_level = match assert_str(log_level_ptr, error_buffer) {
let log_level = match assert_string(log_level_ptr, error_buffer) {
Some(value) => value,
None => return
};
Expand Down
10 changes: 9 additions & 1 deletion rust/src/tantivy_util/scheme_builder.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
use tantivy::schema::{FAST, IndexRecordOption, SchemaBuilder, STORED, STRING, TEXT, TextFieldIndexing};

pub fn add_text_field(stored: bool, is_text: bool, is_fast: bool, builder: &mut SchemaBuilder, tokenizer_name: &str, field_name: &str, index_record_option: IndexRecordOption) {
pub fn add_text_field(
stored: bool,
is_text: bool,
is_fast: bool,
builder: &mut SchemaBuilder,
tokenizer_name: &str,
field_name: &str,
index_record_option: IndexRecordOption,
) {
let mut text_options = if is_text { TEXT } else { STRING };
text_options = if stored { text_options | STORED } else { text_options };
text_options = if is_fast { text_options | FAST } else { text_options };
Expand Down
2 changes: 1 addition & 1 deletion tantivy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ func fx(
minGram uintptr,
isFastId bool,
) (*tantivy_go.Schema, *tantivy_go.TantivyContext) {
err := tantivy_go.LibInit("debug")
err := tantivy_go.LibInit(true, "debug")
assert.NoError(t, err)
builder, err := tantivy_go.NewSchemaBuilder()
require.NoError(t, err)
Expand Down

0 comments on commit 8e1b879

Please sign in to comment.