Skip to content

Commit

Permalink
rebase and add auto-upgrade if aux flag is set
Browse files Browse the repository at this point in the history
Signed-off-by: Alex Chi Z <[email protected]>
  • Loading branch information
skyzh committed May 9, 2024
1 parent f4b1524 commit 2ee9021
Showing 1 changed file with 142 additions and 35 deletions.
177 changes: 142 additions & 35 deletions pageserver/src/tenant/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,21 @@ struct TimelineMetadataBodyV2 {
pg_version: u32,
}

impl TimelineMetadataBodyV2 {
pub fn from_v3(body: &TimelineMetadataBodyV3) -> Self {
assert!(body.last_aux_file_policy.is_none(), "cannot downgrade");
Self {
disk_consistent_lsn: body.disk_consistent_lsn,
prev_record_lsn: body.prev_record_lsn,
ancestor_lsn: body.ancestor_lsn,
ancestor_timeline: body.ancestor_timeline,
latest_gc_cutoff_lsn: body.latest_gc_cutoff_lsn,
initdb_lsn: body.initdb_lsn,
pg_version: body.pg_version,
}
}
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
struct TimelineMetadataBodyV1 {
disk_consistent_lsn: Lsn,
Expand Down Expand Up @@ -224,37 +239,39 @@ impl TimelineMetadata {
}

pub fn to_bytes(&self) -> Result<Vec<u8>, SerializeError> {
match self.hdr.format_version {
METADATA_OLD_FORMAT_VERSION_V2 => {
let body_bytes = self.body.ser()?;
let metadata_size = METADATA_HDR_SIZE + body_bytes.len();
let hdr = TimelineMetadataHeader {
size: metadata_size as u16,
format_version: METADATA_OLD_FORMAT_VERSION_V2,
checksum: crc32c::crc32c(&body_bytes),
};
let hdr_bytes = hdr.ser()?;
let mut metadata_bytes = vec![0u8; METADATA_MAX_SIZE];
metadata_bytes[0..METADATA_HDR_SIZE].copy_from_slice(&hdr_bytes);
metadata_bytes[METADATA_HDR_SIZE..metadata_size].copy_from_slice(&body_bytes);
Ok(metadata_bytes)
}
METADATA_FORMAT_VERSION => {
let body_bytes = serde_json::to_vec(&self.body)
.map_err(|e| SerializeError::BadInput(e.into()))?;
let metadata_size = METADATA_HDR_SIZE + body_bytes.len();
let hdr = TimelineMetadataHeader {
size: metadata_size as u16,
format_version: METADATA_FORMAT_VERSION,
checksum: crc32c::crc32c(&body_bytes),
};
let hdr_bytes = hdr.ser()?;
let mut metadata_bytes = Vec::new();
metadata_bytes.extend(hdr_bytes);
metadata_bytes.extend(body_bytes);
Ok(metadata_bytes)
}
_ => unreachable!(),
if self.hdr.format_version == METADATA_OLD_FORMAT_VERSION_V2
&& self.body.last_aux_file_policy.is_none()
{
let body_bytes = TimelineMetadataBodyV2::from_v3(&self.body).ser()?;
let metadata_size = METADATA_HDR_SIZE + body_bytes.len();
let hdr = TimelineMetadataHeader {
size: metadata_size as u16,
format_version: METADATA_OLD_FORMAT_VERSION_V2,
checksum: crc32c::crc32c(&body_bytes),
};
let hdr_bytes = hdr.ser()?;
let mut metadata_bytes = vec![0u8; METADATA_MAX_SIZE];
metadata_bytes[0..METADATA_HDR_SIZE].copy_from_slice(&hdr_bytes);
metadata_bytes[METADATA_HDR_SIZE..metadata_size].copy_from_slice(&body_bytes);
Ok(metadata_bytes)
} else if self.hdr.format_version == METADATA_FORMAT_VERSION
|| self.hdr.format_version == METADATA_OLD_FORMAT_VERSION_V2
{
let body_bytes =
serde_json::to_vec(&self.body).map_err(|e| SerializeError::BadInput(e.into()))?;
let metadata_size = METADATA_HDR_SIZE + body_bytes.len();
let hdr = TimelineMetadataHeader {
size: metadata_size as u16,
format_version: METADATA_FORMAT_VERSION,
checksum: crc32c::crc32c(&body_bytes),
};
let hdr_bytes = hdr.ser()?;
let mut metadata_bytes = Vec::new();
metadata_bytes.extend(hdr_bytes);
metadata_bytes.extend(body_bytes);
Ok(metadata_bytes)
} else {
unreachable!()
}
}

Expand Down Expand Up @@ -558,6 +575,90 @@ mod tests {

#[test]
fn test_roundtrip_metadata_v2() {
let metadata_v2 = TimelineMetadata {
hdr: TimelineMetadataHeader {
checksum: 0,
size: 0,
format_version: METADATA_OLD_FORMAT_VERSION_V2,
},
body: TimelineMetadataBodyV3 {
disk_consistent_lsn: Lsn(0x200),
prev_record_lsn: Some(Lsn(0x100)),
ancestor_timeline: Some(TIMELINE_ID),
ancestor_lsn: Lsn(0),
latest_gc_cutoff_lsn: Lsn(0),
initdb_lsn: Lsn(0),
pg_version: 16,
last_aux_file_policy: None,
},
};

let metadata_bytes = metadata_v2
.to_bytes()
.expect("Should serialize correct metadata to bytes");

// This should deserialize to the latest version format
let deserialized_metadata = TimelineMetadata::from_bytes(&metadata_bytes)
.expect("Should deserialize its own bytes");

let expected_metadata = TimelineMetadata::new(
Lsn(0x200),
Some(Lsn(0x100)),
Some(TIMELINE_ID),
Lsn(0),
Lsn(0),
Lsn(0),
16,
None,
);

assert_eq!(deserialized_metadata.body, expected_metadata.body);
}

#[test]
fn test_roundtrip_metadata_v3() {
let metadata_v3 = TimelineMetadata {
hdr: TimelineMetadataHeader {
checksum: 0,
size: 0,
format_version: METADATA_FORMAT_VERSION,
},
body: TimelineMetadataBodyV3 {
disk_consistent_lsn: Lsn(0x200),
prev_record_lsn: Some(Lsn(0x100)),
ancestor_timeline: Some(TIMELINE_ID),
ancestor_lsn: Lsn(0),
latest_gc_cutoff_lsn: Lsn(0),
initdb_lsn: Lsn(0),
pg_version: 16,
last_aux_file_policy: Some(AuxFilePolicy::V2),
},
};

let metadata_bytes = metadata_v3
.to_bytes()
.expect("Should serialize correct metadata to bytes");

// This should deserialize to the latest version format
let deserialized_metadata = TimelineMetadata::from_bytes(&metadata_bytes)
.expect("Should deserialize its own bytes");

let expected_metadata = TimelineMetadata::new(
Lsn(0x200),
Some(Lsn(0x100)),
Some(TIMELINE_ID),
Lsn(0),
Lsn(0),
Lsn(0),
16,
Some(AuxFilePolicy::V2),
);

assert_eq!(deserialized_metadata.body, expected_metadata.body);
}

#[test]
fn test_roundtrip_metadata_aux_auto_upgrade() {
let metadata_v2 = TimelineMetadata {
hdr: TimelineMetadataHeader {
checksum: 0,
Expand Down Expand Up @@ -595,6 +696,10 @@ mod tests {
Some(AuxFilePolicy::V2),
);

assert_eq!(
deserialized_metadata.hdr.format_version,
METADATA_FORMAT_VERSION
);
assert_eq!(deserialized_metadata.body, expected_metadata.body);
}

Expand All @@ -609,6 +714,7 @@ mod tests {
Lsn(0),
// Any version will do here, so use the default
crate::DEFAULT_PG_VERSION,
None,
);
original_metadata.hdr.format_version = METADATA_OLD_FORMAT_VERSION_V2;

Expand Down Expand Up @@ -649,8 +755,7 @@ mod tests {

let expected_metadata = {
let mut temp_metadata = original_metadata;
let body_bytes = temp_metadata
.body
let body_bytes = TimelineMetadataBodyV2::from_v3(&temp_metadata.body)
.ser()
.expect("Cannot serialize the metadata body");
let metadata_size = METADATA_HDR_SIZE + body_bytes.len();
Expand Down Expand Up @@ -682,6 +787,7 @@ mod tests {
latest_gc_cutoff_lsn: Lsn(0),
initdb_lsn: Lsn(0),
pg_version: 16,
last_aux_file_policy: Some(AuxFilePolicy::V2),
},
};

Expand All @@ -691,7 +797,7 @@ mod tests {

assert_eq!(
&metadata_bytes[..METADATA_HDR_SIZE],
&[202, 106, 183, 219, 0, 205, 0, 5]
&[194, 55, 211, 146, 0, 233, 0, 5]
);
let json_value: serde_json::Value =
serde_json::from_slice(&metadata_bytes[METADATA_HDR_SIZE..]).unwrap();
Expand All @@ -704,7 +810,8 @@ mod tests {
"initdb_lsn": "0/0",
"latest_gc_cutoff_lsn": "0/0",
"pg_version": 16,
"prev_record_lsn": "0/100"
"prev_record_lsn": "0/100",
"last_aux_file_policy": "V2"
})
);
}
Expand Down

0 comments on commit 2ee9021

Please sign in to comment.