Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into arpad/compaction_spli…
Browse files Browse the repository at this point in the history
…t_delta_lsn
  • Loading branch information
arpad-m committed May 10, 2024
2 parents d529aa5 + 6351313 commit 764d964
Show file tree
Hide file tree
Showing 29 changed files with 490 additions and 173 deletions.
2 changes: 1 addition & 1 deletion .config/nextest.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[profile.default]
slow-timeout = { period = "20s", terminate-after = 3 }
slow-timeout = { period = "60s", terminate-after = 3 }
4 changes: 1 addition & 3 deletions .github/actionlint.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
self-hosted-runner:
labels:
- arm64
- dev
- gen3
- large
# Remove `macos-14` from the list after https://github.com/rhysd/actionlint/pull/392 is merged.
- macos-14
- large-arm64
- small
- us-east-2
config-variables:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-build-tools-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
matrix:
arch: [ x64, arm64 ]

runs-on: ${{ fromJson(format('["self-hosted", "dev", "{0}"]', matrix.arch)) }}
runs-on: ${{ fromJson(format('["self-hosted", "gen3", "{0}"]', matrix.arch == 'arm64' && 'large-arm64' || 'large')) }}

env:
IMAGE_TAG: ${{ inputs.image-tag }}
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,9 @@ jobs:
env:
NEXTEST_RETRIES: 3
run: |
#nextest does not yet support running doctests
cargo test --doc $CARGO_FLAGS $CARGO_FEATURES
for io_engine in std-fs tokio-epoll-uring ; do
NEON_PAGESERVER_UNIT_TEST_VIRTUAL_FILE_IOENGINE=$io_engine ${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES
done
Expand Down
33 changes: 21 additions & 12 deletions .github/workflows/neon_extra_builds.yml
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ jobs:
check-linux-arm-build:
needs: [ check-permissions, build-build-tools-image ]
timeout-minutes: 90
runs-on: [ self-hosted, dev, arm64 ]
runs-on: [ self-hosted, large-arm64 ]

env:
# Use release build only, to have less debug info around
Expand Down Expand Up @@ -232,20 +232,20 @@ jobs:

- name: Run cargo build
run: |
mold -run cargo build --locked $CARGO_FLAGS $CARGO_FEATURES --bins --tests
mold -run cargo build --locked $CARGO_FLAGS $CARGO_FEATURES --bins --tests -j$(nproc)
- name: Run cargo test
env:
NEXTEST_RETRIES: 3
run: |
cargo nextest run $CARGO_FEATURES
cargo nextest run $CARGO_FEATURES -j$(nproc)
# Run separate tests for real S3
export ENABLE_REAL_S3_REMOTE_STORAGE=nonempty
export REMOTE_STORAGE_S3_BUCKET=neon-github-ci-tests
export REMOTE_STORAGE_S3_REGION=eu-central-1
# Avoid `$CARGO_FEATURES` since there's no `testing` feature in the e2e tests now
cargo nextest run --package remote_storage --test test_real_s3
cargo nextest run --package remote_storage --test test_real_s3 -j$(nproc)
# Run separate tests for real Azure Blob Storage
# XXX: replace region with `eu-central-1`-like region
Expand All @@ -255,12 +255,12 @@ jobs:
export REMOTE_STORAGE_AZURE_CONTAINER="${{ vars.REMOTE_STORAGE_AZURE_CONTAINER }}"
export REMOTE_STORAGE_AZURE_REGION="${{ vars.REMOTE_STORAGE_AZURE_REGION }}"
# Avoid `$CARGO_FEATURES` since there's no `testing` feature in the e2e tests now
cargo nextest run --package remote_storage --test test_real_azure
cargo nextest run --package remote_storage --test test_real_azure -j$(nproc)
check-codestyle-rust-arm:
needs: [ check-permissions, build-build-tools-image ]
timeout-minutes: 90
runs-on: [ self-hosted, dev, arm64 ]
runs-on: [ self-hosted, large-arm64 ]

container:
image: ${{ needs.build-build-tools-image.outputs.image }}
Expand All @@ -269,6 +269,11 @@ jobs:
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
options: --init

strategy:
fail-fast: false
matrix:
build_type: [ debug, release ]

steps:
- name: Fix git ownership
run: |
Expand Down Expand Up @@ -305,31 +310,35 @@ jobs:
exit 1
fi
echo "CLIPPY_COMMON_ARGS=${CLIPPY_COMMON_ARGS}" >> $GITHUB_ENV
- name: Run cargo clippy (debug)
if: matrix.build_type == 'debug'
run: cargo hack --feature-powerset clippy $CLIPPY_COMMON_ARGS
- name: Run cargo clippy (release)
if: matrix.build_type == 'release'
run: cargo hack --feature-powerset clippy --release $CLIPPY_COMMON_ARGS

- name: Check documentation generation
run: cargo doc --workspace --no-deps --document-private-items
if: matrix.build_type == 'release'
run: cargo doc --workspace --no-deps --document-private-items -j$(nproc)
env:
RUSTDOCFLAGS: "-Dwarnings -Arustdoc::private_intra_doc_links"

# Use `${{ !cancelled() }}` to run quck tests after the longer clippy run
- name: Check formatting
if: ${{ !cancelled() }}
if: ${{ !cancelled() && matrix.build_type == 'release' }}
run: cargo fmt --all -- --check

# https://github.com/facebookincubator/cargo-guppy/tree/bec4e0eb29dcd1faac70b1b5360267fc02bf830e/tools/cargo-hakari#2-keep-the-workspace-hack-up-to-date-in-ci
- name: Check rust dependencies
if: ${{ !cancelled() }}
if: ${{ !cancelled() && matrix.build_type == 'release' }}
run: |
cargo hakari generate --diff # workspace-hack Cargo.toml is up-to-date
cargo hakari manage-deps --dry-run # all workspace crates depend on workspace-hack
# https://github.com/EmbarkStudios/cargo-deny
- name: Check rust licenses/bans/advisories/sources
if: ${{ !cancelled() }}
if: ${{ !cancelled() && matrix.build_type == 'release' }}
run: cargo deny check

gather-rust-build-stats:
Expand All @@ -338,7 +347,7 @@ jobs:
contains(github.event.pull_request.labels.*.name, 'run-extra-build-stats') ||
contains(github.event.pull_request.labels.*.name, 'run-extra-build-*') ||
github.ref_name == 'main'
runs-on: [ self-hosted, gen3, large ]
runs-on: [ self-hosted, large ]
container:
image: ${{ needs.build-build-tools-image.outputs.image }}
credentials:
Expand Down Expand Up @@ -369,7 +378,7 @@ jobs:
run: make walproposer-lib -j$(nproc)

- name: Produce the build stats
run: cargo build --all --release --timings
run: cargo build --all --release --timings -j$(nproc)

- name: Upload the build stats
id: upload-stats
Expand Down
2 changes: 1 addition & 1 deletion libs/utils/src/poison.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
//! # Example
//!
//! ```
//! # tokio_test::block_on(async {
//! # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
//! use utils::poison::Poison;
//! use std::time::Duration;
//!
Expand Down
8 changes: 7 additions & 1 deletion pageserver/compaction/src/compact_tiered.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,13 @@ pub async fn compact_tiered<E: CompactionJobExecutor>(
ctx,
)
.await?;
if target_file_size == u64::MAX {
if current_level_target_height == u64::MAX {
// our target height includes all possible lsns
info!(
level = current_level_no,
depth = depth,
"compaction loop reached max current_level_target_height"
);
break;
}
current_level_no += 1;
Expand Down
89 changes: 82 additions & 7 deletions pageserver/ctl/src/draw_timeline_dir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
//! # From an `index_part.json` in S3
//! (jq -r '.layer_metadata | keys[]' | cargo run -p pagectl draw-timeline ) < index_part.json-00000016 > out.svg
//!
//! # enrich with lines for gc_cutoff and a child branch point
//! cat <(jq -r '.historic_layers[] | .layer_file_name' < layers.json) <(echo -e 'gc_cutoff:0000001CE3FE32C9\nbranch:0000001DE3FE32C9') | cargo run --bin pagectl draw-timeline >| out.svg
//! ```
//!
//! ## Viewing
Expand All @@ -48,7 +50,7 @@
//! ```
//!
use anyhow::Result;
use anyhow::{Context, Result};
use pageserver::repository::Key;
use pageserver::METADATA_FILE_NAME;
use std::cmp::Ordering;
Expand Down Expand Up @@ -90,6 +92,33 @@ fn parse_filename(name: &str) -> (Range<Key>, Range<Lsn>) {
(keys, lsns)
}

#[derive(Clone, Copy)]
enum LineKind {
GcCutoff,
Branch,
}

impl From<LineKind> for Fill {
fn from(value: LineKind) -> Self {
match value {
LineKind::GcCutoff => Fill::Color(rgb(255, 0, 0)),
LineKind::Branch => Fill::Color(rgb(0, 255, 0)),
}
}
}

impl FromStr for LineKind {
type Err = anyhow::Error;

fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
Ok(match s {
"gc_cutoff" => LineKind::GcCutoff,
"branch" => LineKind::Branch,
_ => anyhow::bail!("unsupported linekind: {s}"),
})
}
}

pub fn main() -> Result<()> {
// Parse layer filenames from stdin
struct Layer {
Expand All @@ -99,8 +128,29 @@ pub fn main() -> Result<()> {
}
let mut files: Vec<Layer> = vec![];
let stdin = io::stdin();
for line in stdin.lock().lines() {

let mut lines: Vec<(Lsn, LineKind)> = vec![];

for (lineno, line) in stdin.lock().lines().enumerate() {
let lineno = lineno + 1;

let line = line.unwrap();
if let Some((kind, lsn)) = line.split_once(':') {
let (kind, lsn) = LineKind::from_str(kind)
.context("parse kind")
.and_then(|kind| {
if lsn.contains('/') {
Lsn::from_str(lsn)
} else {
Lsn::from_hex(lsn)
}
.map(|lsn| (kind, lsn))
.context("parse lsn")
})
.with_context(|| format!("parse {line:?} on {lineno}"))?;
lines.push((lsn, kind));
continue;
}
let line = PathBuf::from_str(&line).unwrap();
let filename = line.file_name().unwrap();
let filename = filename.to_str().unwrap();
Expand All @@ -117,8 +167,9 @@ pub fn main() -> Result<()> {
}

// Collect all coordinates
let mut keys: Vec<Key> = vec![];
let mut lsns: Vec<Lsn> = vec![];
let mut keys: Vec<Key> = Vec::with_capacity(files.len());
let mut lsns: Vec<Lsn> = Vec::with_capacity(files.len() + lines.len());

for Layer {
key_range: keyr,
lsn_range: lsnr,
Expand All @@ -131,6 +182,8 @@ pub fn main() -> Result<()> {
lsns.push(lsnr.end);
}

lsns.extend(lines.iter().map(|(lsn, _)| *lsn));

// Analyze
let key_map = build_coordinate_compression_map(keys);
let lsn_map = build_coordinate_compression_map(lsns);
Expand All @@ -144,10 +197,13 @@ pub fn main() -> Result<()> {
println!(
"{}",
BeginSvg {
w: key_map.len() as f32,
w: (key_map.len() + 10) as f32,
h: stretch * lsn_map.len() as f32
}
);

let xmargin = 0.05; // Height-dependent margin to disambiguate overlapping deltas

for Layer {
filename,
key_range: keyr,
Expand All @@ -169,7 +225,6 @@ pub fn main() -> Result<()> {
let mut lsn_diff = (lsn_end - lsn_start) as f32;
let mut fill = Fill::None;
let mut ymargin = 0.05 * lsn_diff; // Height-dependent margin to disambiguate overlapping deltas
let xmargin = 0.05; // Height-dependent margin to disambiguate overlapping deltas
let mut lsn_offset = 0.0;

// Fill in and thicken rectangle if it's an
Expand All @@ -189,7 +244,7 @@ pub fn main() -> Result<()> {
println!(
" {}",
rectangle(
key_start as f32 + stretch * xmargin,
5.0 + key_start as f32 + stretch * xmargin,
stretch * (lsn_max as f32 - (lsn_end as f32 - ymargin - lsn_offset)),
key_diff as f32 - stretch * 2.0 * xmargin,
stretch * (lsn_diff - 2.0 * ymargin)
Expand All @@ -200,6 +255,26 @@ pub fn main() -> Result<()> {
.comment(filename)
);
}

for (lsn, kind) in lines {
let lsn_start = *lsn_map.get(&lsn).unwrap();
let lsn_end = lsn_start;
let stretch = 2.0;
let lsn_diff = 0.3;
let lsn_offset = -lsn_diff / 2.0;
let ymargin = 0.05;
println!(
"{}",
rectangle(
0.0f32 + stretch * xmargin,
stretch * (lsn_map.len() as f32 - (lsn_end as f32 - ymargin - lsn_offset)),
(key_map.len() + 10) as f32,
stretch * (lsn_diff - 2.0 * ymargin)
)
.fill(kind)
);
}

println!("{}", EndSvg);

eprintln!("num_images: {}", num_images);
Expand Down
2 changes: 1 addition & 1 deletion pageserver/src/basebackup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ where
// add zenith.signal file
let mut zenith_signal = String::new();
if self.prev_record_lsn == Lsn(0) {
if self.lsn == self.timeline.get_ancestor_lsn() {
if self.timeline.is_ancestor_lsn(self.lsn) {
write!(zenith_signal, "PREV LSN: none")
.map_err(|e| BasebackupError::Server(e.into()))?;
} else {
Expand Down
Loading

0 comments on commit 764d964

Please sign in to comment.