-
Notifications
You must be signed in to change notification settings - Fork 463
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(pageserver): filter keys with gc-compaction (#9004)
Part of #8002 Close #8920 Legacy compaction (as well as gc-compaction) rely on the GC process to remove unused layer files, but this relies on many factors (i.e., key partition) to ensure data in a dropped table can be eventually removed. In gc-compaction, we consider the keyspace information when doing the compaction process. If a key is not in the keyspace, we will skip that key and not include it in the final output. However, this is not easy to implement because gc-compaction considers branch points (i.e., retain_lsns) and the retained keyspaces could change across different LSNs. Therefore, for now, we only remove aux v1 keys in the compaction process. ## Summary of changes * Add `FilterIterator` to filter out keys. * Integrate `FilterIterator` with gc-compaction. * Add `collect_gc_compaction_keyspace` for a spec of keyspaces that can be retained during the gc-compaction process. --------- Signed-off-by: Alex Chi Z <[email protected]>
- Loading branch information
Showing
4 changed files
with
244 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,205 @@ | ||
use std::ops::Range; | ||
|
||
use anyhow::bail; | ||
use pageserver_api::{ | ||
key::Key, | ||
keyspace::{KeySpace, SparseKeySpace}, | ||
}; | ||
use utils::lsn::Lsn; | ||
|
||
use crate::repository::Value; | ||
|
||
use super::merge_iterator::MergeIterator; | ||
|
||
/// A filter iterator over merge iterators (and can be easily extended to other types of iterators). | ||
/// | ||
/// The iterator will skip any keys not included in the keyspace filter. In other words, the keyspace filter contains the keys | ||
/// to be retained. | ||
pub struct FilterIterator<'a> { | ||
inner: MergeIterator<'a>, | ||
retain_key_filters: Vec<Range<Key>>, | ||
current_filter_idx: usize, | ||
} | ||
|
||
impl<'a> FilterIterator<'a> { | ||
pub fn create( | ||
inner: MergeIterator<'a>, | ||
dense_keyspace: KeySpace, | ||
sparse_keyspace: SparseKeySpace, | ||
) -> anyhow::Result<Self> { | ||
let mut retain_key_filters = Vec::new(); | ||
retain_key_filters.extend(dense_keyspace.ranges); | ||
retain_key_filters.extend(sparse_keyspace.0.ranges); | ||
retain_key_filters.sort_by(|a, b| a.start.cmp(&b.start)); | ||
// Verify key filters are non-overlapping and sorted | ||
for window in retain_key_filters.windows(2) { | ||
if window[0].end > window[1].start { | ||
bail!( | ||
"Key filters are overlapping: {:?} and {:?}", | ||
window[0], | ||
window[1] | ||
); | ||
} | ||
} | ||
Ok(Self { | ||
inner, | ||
retain_key_filters, | ||
current_filter_idx: 0, | ||
}) | ||
} | ||
|
||
pub async fn next(&mut self) -> anyhow::Result<Option<(Key, Lsn, Value)>> { | ||
while let Some(item) = self.inner.next().await? { | ||
while self.current_filter_idx < self.retain_key_filters.len() | ||
&& item.0 >= self.retain_key_filters[self.current_filter_idx].end | ||
{ | ||
// [filter region] [filter region] [filter region] | ||
// ^ item | ||
// ^ current filter | ||
self.current_filter_idx += 1; | ||
// [filter region] [filter region] [filter region] | ||
// ^ item | ||
// ^ current filter | ||
} | ||
if self.current_filter_idx >= self.retain_key_filters.len() { | ||
// We already exhausted all filters, so we should return now | ||
// [filter region] [filter region] [filter region] | ||
// ^ item | ||
// ^ current filter (nothing) | ||
return Ok(None); | ||
} | ||
if self.retain_key_filters[self.current_filter_idx].contains(&item.0) { | ||
// [filter region] [filter region] [filter region] | ||
// ^ item | ||
// ^ current filter | ||
return Ok(Some(item)); | ||
} | ||
// If the key is not contained in the key retaining filters, continue to the next item. | ||
// [filter region] [filter region] [filter region] | ||
// ^ item | ||
// ^ current filter | ||
} | ||
Ok(None) | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod tests { | ||
use super::*; | ||
|
||
use itertools::Itertools; | ||
use pageserver_api::key::Key; | ||
use utils::lsn::Lsn; | ||
|
||
use crate::{ | ||
tenant::{ | ||
harness::{TenantHarness, TIMELINE_ID}, | ||
storage_layer::delta_layer::test::produce_delta_layer, | ||
}, | ||
DEFAULT_PG_VERSION, | ||
}; | ||
|
||
async fn assert_filter_iter_equal( | ||
filter_iter: &mut FilterIterator<'_>, | ||
expect: &[(Key, Lsn, Value)], | ||
) { | ||
let mut expect_iter = expect.iter(); | ||
loop { | ||
let o1 = filter_iter.next().await.unwrap(); | ||
let o2 = expect_iter.next(); | ||
assert_eq!(o1.is_some(), o2.is_some()); | ||
if o1.is_none() && o2.is_none() { | ||
break; | ||
} | ||
let (k1, l1, v1) = o1.unwrap(); | ||
let (k2, l2, v2) = o2.unwrap(); | ||
assert_eq!(&k1, k2); | ||
assert_eq!(l1, *l2); | ||
assert_eq!(&v1, v2); | ||
} | ||
} | ||
|
||
#[tokio::test] | ||
async fn filter_keyspace_iterator() { | ||
use crate::repository::Value; | ||
use bytes::Bytes; | ||
|
||
let harness = TenantHarness::create("filter_iterator_filter_keyspace_iterator") | ||
.await | ||
.unwrap(); | ||
let (tenant, ctx) = harness.load().await; | ||
|
||
let tline = tenant | ||
.create_test_timeline(TIMELINE_ID, Lsn(0x10), DEFAULT_PG_VERSION, &ctx) | ||
.await | ||
.unwrap(); | ||
|
||
fn get_key(id: u32) -> Key { | ||
let mut key = Key::from_hex("000000000033333333444444445500000000").unwrap(); | ||
key.field6 = id; | ||
key | ||
} | ||
const N: usize = 100; | ||
let test_deltas1 = (0..N) | ||
.map(|idx| { | ||
( | ||
get_key(idx as u32), | ||
Lsn(0x20 * ((idx as u64) % 10 + 1)), | ||
Value::Image(Bytes::from(format!("img{idx:05}"))), | ||
) | ||
}) | ||
.collect_vec(); | ||
let resident_layer_1 = produce_delta_layer(&tenant, &tline, test_deltas1.clone(), &ctx) | ||
.await | ||
.unwrap(); | ||
|
||
let merge_iter = MergeIterator::create( | ||
&[resident_layer_1.get_as_delta(&ctx).await.unwrap()], | ||
&[], | ||
&ctx, | ||
); | ||
|
||
let mut filter_iter = FilterIterator::create( | ||
merge_iter, | ||
KeySpace { | ||
ranges: vec![ | ||
get_key(5)..get_key(10), | ||
get_key(20)..get_key(30), | ||
get_key(90)..get_key(110), | ||
get_key(1000)..get_key(2000), | ||
], | ||
}, | ||
SparseKeySpace(KeySpace::default()), | ||
) | ||
.unwrap(); | ||
let mut result = Vec::new(); | ||
result.extend(test_deltas1[5..10].iter().cloned()); | ||
result.extend(test_deltas1[20..30].iter().cloned()); | ||
result.extend(test_deltas1[90..100].iter().cloned()); | ||
assert_filter_iter_equal(&mut filter_iter, &result).await; | ||
|
||
let merge_iter = MergeIterator::create( | ||
&[resident_layer_1.get_as_delta(&ctx).await.unwrap()], | ||
&[], | ||
&ctx, | ||
); | ||
|
||
let mut filter_iter = FilterIterator::create( | ||
merge_iter, | ||
KeySpace { | ||
ranges: vec![ | ||
get_key(0)..get_key(10), | ||
get_key(20)..get_key(30), | ||
get_key(90)..get_key(95), | ||
], | ||
}, | ||
SparseKeySpace(KeySpace::default()), | ||
) | ||
.unwrap(); | ||
let mut result = Vec::new(); | ||
result.extend(test_deltas1[0..10].iter().cloned()); | ||
result.extend(test_deltas1[20..30].iter().cloned()); | ||
result.extend(test_deltas1[90..95].iter().cloned()); | ||
assert_filter_iter_equal(&mut filter_iter, &result).await; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
2969952
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
4977 tests run: 4812 passed, 1 failed, 164 skipped (full report)
Failures on Postgres 17
test_hot_standby_feedback
: debug-x86-64Flaky tests (3)
Postgres 17
test_pageserver_compaction_smoke
: release-x86-64, release-arm64Postgres 16
test_neon_cli_basics
: release-arm64Test coverage report is not available
2969952 at 2024-09-23T20:07:42.447Z :recycle: