Skip to content

Commit

Permalink
engine: add extra metrics on the compression ratio of write.
Browse files Browse the repository at this point in the history
Signed-off-by: lucasliang <[email protected]>
  • Loading branch information
LykxSassinator committed Apr 19, 2024
1 parent e505d63 commit b792fe6
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 17 deletions.
3 changes: 2 additions & 1 deletion src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ where
return Ok(0);
}
let start = Instant::now();
let len = log_batch.finish_populate(
let (len, compression_ratio) = log_batch.finish_populate(
self.cfg.batch_compression_threshold.0 as usize,
self.cfg.compression_level,
)?;
Expand Down Expand Up @@ -225,6 +225,7 @@ where
now = end;
ENGINE_WRITE_DURATION_HISTOGRAM.observe(now.saturating_duration_since(start).as_secs_f64());
ENGINE_WRITE_SIZE_HISTOGRAM.observe(len as f64);
ENGINE_WRITE_COMPRESSION_RATIO_HISTOGRAM.observe(compression_ratio);
Ok(len)
}

Expand Down
2 changes: 1 addition & 1 deletion src/file_pipe_log/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ pub mod debug {
let log_file_format = LogFileContext::new(file_id, Version::default());
for batch in bs.iter_mut() {
let offset = writer.offset() as u64;
let len = batch
let (len, _) = batch
.finish_populate(1 /* compression_threshold */, None)
.unwrap();
batch.prepare_write(&log_file_format).unwrap();
Expand Down
24 changes: 13 additions & 11 deletions src/log_batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -767,28 +767,28 @@ impl LogBatch {
&mut self,
compression_threshold: usize,
compression_level: Option<usize>,
) -> Result<usize> {
) -> Result<(usize, f64)> {
let _t = StopWatch::new(perf_context!(log_populating_duration));
debug_assert!(self.buf_state == BufState::Open);
if self.is_empty() {
self.buf_state = BufState::Encoded(self.buf.len(), 0);
return Ok(0);
return Ok((0, 0.0));
}
self.buf_state = BufState::Incomplete;

// entries
let (header_offset, compression_type) = if compression_threshold > 0
let (header_offset, compression_type, compression_ratio) = if compression_threshold > 0
&& self.buf.len() >= LOG_BATCH_HEADER_LEN + compression_threshold
{
let buf_len = self.buf.len();
lz4::append_compress_block(
let compression_ratio = lz4::append_compress_block(
&mut self.buf,
LOG_BATCH_HEADER_LEN,
compression_level.unwrap_or(lz4::DEFAULT_LZ4_COMPRESSION_LEVEL),
)?;
(buf_len - LOG_BATCH_HEADER_LEN, CompressionType::Lz4)
(buf_len - LOG_BATCH_HEADER_LEN, CompressionType::Lz4, compression_ratio)
} else {
(0, CompressionType::None)
(0, CompressionType::None, 0.0)
};

// checksum
Expand Down Expand Up @@ -830,7 +830,7 @@ impl LogBatch {
}

self.buf_state = BufState::Encoded(header_offset, footer_roffset - LOG_BATCH_HEADER_LEN);
Ok(self.buf.len() - header_offset)
Ok((self.buf.len() - header_offset, compression_ratio))
}

/// Make preparations for the write of `LogBatch`.
Expand Down Expand Up @@ -1328,7 +1328,7 @@ mod tests {
offset: 0,
};
let old_approximate_size = batch.approximate_size();
let len = batch.finish_populate(usize::from(compress), None).unwrap();
let (len, _) = batch.finish_populate(usize::from(compress), None).unwrap();
assert!(old_approximate_size >= len);
assert_eq!(batch.approximate_size(), len);
let mut batch_handle = mocked_file_block_handle;
Expand Down Expand Up @@ -1493,7 +1493,7 @@ mod tests {
batch1.merge(&mut batch2).unwrap();
assert!(batch2.is_empty());

let len = batch1.finish_populate(0, None).unwrap();
let (len, _) = batch1.finish_populate(0, None).unwrap();
batch1.prepare_write(&file_context).unwrap();
let encoded = batch1.encoded_bytes();
assert_eq!(len, encoded.len());
Expand Down Expand Up @@ -1549,7 +1549,8 @@ mod tests {
offset: 0,
};
let buf_len = batch.buf.len();
let len = batch.finish_populate(1, None).unwrap();
let (len, compression_ratio) = batch.finish_populate(1, None).unwrap();
assert!(compression_ratio == 0.0);
assert!(len == 0);
assert_eq!(batch.buf_state, BufState::Encoded(buf_len, 0));
let file_context = LogFileContext::new(mocked_file_block_handles.id, Version::V2);
Expand Down Expand Up @@ -1671,7 +1672,8 @@ mod tests {
},
];
let old_approximate_size = batch.approximate_size();
let len = batch.finish_populate(1, None).unwrap();
let (len, compression_ratio) = batch.finish_populate(1, None).unwrap();
assert!(compression_ratio > 0.0);
assert!(old_approximate_size >= len);
assert_eq!(batch.approximate_size(), len);
let checksum = batch.item_batch.checksum;
Expand Down
6 changes: 6 additions & 0 deletions src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,12 @@ lazy_static! {
exponential_buckets(256.0, 1.8, 22).unwrap()
)
.unwrap();
pub static ref ENGINE_WRITE_COMPRESSION_RATIO_HISTOGRAM: Histogram = register_histogram!(
"raft_engine_write_compression_ratio",
"Bucketed histogram of Raft Engine write compression ratio",
exponential_buckets(0.0005, 1.8, 16).unwrap()
)
.unwrap();
pub static ref LOG_ALLOCATE_DURATION_HISTOGRAM: Histogram = register_histogram!(
"raft_engine_allocate_log_duration_seconds",
"Bucketed histogram of Raft Engine allocate log duration",
Expand Down
14 changes: 10 additions & 4 deletions src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,10 @@ pub mod lz4 {
pub const DEFAULT_LZ4_COMPRESSION_LEVEL: usize = 1;

/// Compress content in `buf[skip..]`, and append output to `buf`.
pub fn append_compress_block(buf: &mut Vec<u8>, skip: usize, level: usize) -> Result<()> {
pub fn append_compress_block(buf: &mut Vec<u8>, skip: usize, level: usize) -> Result<f64> {
let buf_len = buf.len();
let content_len = buf_len - skip;
let mut compression_ratio = 0.0;
if content_len > 0 {
if content_len > i32::MAX as usize {
return Err(Error::InvalidArgument(format!(
Expand Down Expand Up @@ -256,10 +257,11 @@ pub mod lz4 {
if compressed == 0 {
return Err(Error::Other(box_err!("Compression failed")));
}
compression_ratio = compressed as f64 / content_len as f64;
buf.set_len(buf_len + 4 + compressed as usize);
}
}
Ok(())
Ok(compression_ratio)
}

pub fn decompress_block(src: &[u8]) -> Result<Vec<u8>> {
Expand Down Expand Up @@ -301,8 +303,12 @@ pub mod lz4 {
let vecs: Vec<Vec<u8>> = vec![b"".to_vec(), b"123".to_vec(), b"12345678910".to_vec()];
for mut vec in vecs.into_iter() {
let uncompressed_len = vec.len();
super::append_compress_block(&mut vec, 0, super::DEFAULT_LZ4_COMPRESSION_LEVEL)
.unwrap();
let compression_ratio =
super::append_compress_block(&mut vec, 0, super::DEFAULT_LZ4_COMPRESSION_LEVEL)
.unwrap();
if uncompressed_len == 0 {
assert_eq!(compression_ratio, 0.0);
}
let res = super::decompress_block(&vec[uncompressed_len..]).unwrap();
assert_eq!(res, vec[..uncompressed_len].to_owned());
}
Expand Down

0 comments on commit b792fe6

Please sign in to comment.