Skip to content

Commit

Permalink
Merge #173
Browse files Browse the repository at this point in the history
173: Run doctests on nightly builds r=JLockerman a=JLockerman



Co-authored-by: Joshua Lockerman <[email protected]>
  • Loading branch information
bors[bot] and JLockerman authored Jun 29, 2021
2 parents cbfd6d0 + b5b587f commit 414e660
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 30 deletions.
36 changes: 14 additions & 22 deletions .github/workflows/nightly_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ on:
pull_request:
paths:
- 'docker/nightly/**'
- '.github/workflows/nightly_build.yml'
workflow_dispatch:
schedule:
# we build at 8am UTC, 3am Eastern, midnight Pacific
Expand Down Expand Up @@ -36,29 +37,20 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

# - name: Build
# id: tester_build
# uses: docker/build-push-action@v2
# with:
# target: toolkit-tools
# push: false
# load: true
# context: .
# file: ./docker/nightly/Dockerfile
# tags: timescaledev/timescaledb-toolkit:tester
# cache-from: type=local,src=/tmp/.buildx-cache
# cache-to: type=local,dest=/tmp/.buildx-cache

# - name: Run Tests
# run: |
# docker run -d --name toolkit_test -e POSTGRES_HOST_AUTH_METHOD=trust timescaledev/timescaledb-toolkit:tester
# docker exec toolkit_test /bin/bash -c 'PATH=\"${PATH}:/root/.cargo/bin\" \
# && cd /rust/timescaledb-analytics/crates \
# && cargo test --release --workspace --exclude timescaledb_toolkit'
# docker stop toolkit_test
# TODO we also want to run our extension tests, but we don't want to restart the DB, need a pgx patch
- name: Build
uses: docker/build-push-action@v2
with:
push: false
load: true
context: .
file: ./docker/nightly/Dockerfile
tags: timescaledev/timescaledb-toolkit:nightly

#TODO run doctests
- name: Run Doc Tests
run: |
docker run -d --name toolkit_test -e POSTGRES_HOST_AUTH_METHOD=trust -p 5432:5432 timescaledev/timescaledb-toolkit:nightly
cargo run --manifest-path ./tools/sql-doctester/Cargo.toml -- -h localhost -s "CREATE EXTENSION timescaledb_toolkit; SET SESSION TIMEZONE TO 'UTC'" -p 5432 docs
#TODO can/should we run our other tests also?

- name: Push
id: image_build
Expand Down
12 changes: 6 additions & 6 deletions docs/percentile_approximation.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ FROM response_times
GROUP BY 1, 2
ORDER BY 3 DESC LIMIT 15;
```
```output
```output, precision(2: 7)
bucket | api_id | avg | median
------------------------+--------+-------------------+--------------------
2020-01-01 00:00:00+00 | 12 | 963.7133258896499 | 718.5239744582293
Expand Down Expand Up @@ -103,7 +103,7 @@ GROUP BY 1, 2
ORDER BY 3 DESC LIMIT 15;
```

```output
```output, precision(2: 7)
bucket | api_id | avg | median
------------------------+--------+--------------------+--------------------
2020-01-01 09:00:00+00 | 9 | 11508.507742106061 | 568.5874170077199
Expand Down Expand Up @@ -134,7 +134,7 @@ FROM response_times
GROUP BY 1, 2
ORDER BY 4 DESC, 2, 1 LIMIT 15;
```
```output
```output, precision(2: 7)
bucket | api_id | avg | median
------------------------+--------+--------------------+--------------------
2020-01-01 00:00:00+00 | 12 | 963.71332588965 | 718.5239744582293
Expand Down Expand Up @@ -175,7 +175,7 @@ FROM response_times
GROUP BY 1, 2
ORDER BY 5 DESC LIMIT 15;
```
```output
```output, precision(2: 7)
bucket | api_id | avg | true_median | approx_median
------------------------+--------+--------------------+--------------------+--------------------
2020-01-01 00:00:00+00 | 12 | 963.71332588965 | 718.5239744582293 | 717.572650368603
Expand Down Expand Up @@ -222,7 +222,7 @@ SELECT
FROM response_times_hourly
ORDER BY 4 DESC, 2, 1 LIMIT 15;
```
```output
```output, precision(2: 7)
bucket | api_id | avg | approx_median
------------------------+--------+--------------------+--------------------
2020-01-01 00:00:00+00 | 12 | 963.71332588965 | 717.572650368603
Expand Down Expand Up @@ -278,7 +278,7 @@ FROM response_times_hourly
GROUP BY api_id
ORDER BY api_id;
```
```output
```output, precision(1: 7)
api_id | avg | approx_median
--------+--------------------+--------------------
1 | 71.55322907526073 | 54.57028044425955
Expand Down
2 changes: 2 additions & 0 deletions tools/sql-doctester/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::{
borrow::Cow,
collections::HashMap,
ffi::OsStr,
fs,
io::{self, Write},
Expand Down Expand Up @@ -92,6 +93,7 @@ pub struct Test {
output: Vec<Vec<String>>,
transactional: bool,
ignore_output: bool,
precision_limits: HashMap<usize, usize>,
}

fn extract_tests(root: &str) -> Vec<TestFile> {
Expand Down
44 changes: 43 additions & 1 deletion tools/sql-doctester/src/parser.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::collections::HashMap;

use pulldown_cmark::{
CodeBlockKind::Fenced,
CowStr, Event, Parser,
Expand Down Expand Up @@ -86,6 +88,7 @@ pub fn extract_tests_from_string(s: &str, file_stem: &str) -> TestFile {
output: Vec::new(),
transactional: code_block_info.transactional,
ignore_output: code_block_info.ignore_output,
precision_limits: code_block_info.precision_limits,
};

// consume the lines of the test
Expand All @@ -108,6 +111,14 @@ pub fn extract_tests_from_string(s: &str, file_stem: &str) -> TestFile {

// output, consume it
BlockKind::Output => {
if !test.precision_limits.is_empty()
&& !code_block_info.precision_limits.is_empty() {
panic!(
"cannot have precision limits on both test and output.\n{}:{} {:?}",
file_stem, current_line, heading_stack
)
}
test.precision_limits = code_block_info.precision_limits;
let _ = parser.next();
break;
}
Expand Down Expand Up @@ -162,6 +173,7 @@ struct CodeBlockInfo {
kind: BlockKind,
transactional: bool,
ignore_output: bool,
precision_limits: HashMap<usize, usize>,
}

#[derive(Clone, Copy)]
Expand All @@ -178,6 +190,7 @@ fn parse_code_block_info(info: &str) -> CodeBlockInfo {
kind: BlockKind::Other,
transactional: true,
ignore_output: false,
precision_limits: HashMap::new(),
};

for token in tokens {
Expand All @@ -191,6 +204,27 @@ fn parse_code_block_info(info: &str) -> CodeBlockInfo {
"ignore-output" => info.ignore_output = true,
"output" => info.kind = BlockKind::Output,
s if s.to_ascii_lowercase() == "sql" => info.kind = BlockKind::SQL,
p if p.starts_with("precision") => {
// syntax `precision(col: bytes)`
let precision_err = || -> ! {
panic!("invalid syntax for `precision(col: bytes)` found `{}`", p)
};
let arg = &p["precision".len()..];
if arg.as_bytes().first() != Some(&b'(') || arg.as_bytes().last() != Some(&b')') {
precision_err()
}
let arg = &arg[1..arg.len()-1];
let args: Vec<_> = arg.split(':').collect();
if args.len() != 2 {
precision_err()
}
let column = args[0].trim().parse().unwrap_or_else(|_| precision_err());
let length = args[1].trim().parse().unwrap_or_else(|_| precision_err());
let old = info.precision_limits.insert(column, length);
if old.is_some() {
panic!("duplicate precision for column {}", column)
}
},
_ => {}
}
}
Expand All @@ -200,6 +234,8 @@ fn parse_code_block_info(info: &str) -> CodeBlockInfo {

#[cfg(test)]
mod test {
use std::collections::HashMap;


#[test]
fn extract() {
Expand Down Expand Up @@ -231,7 +267,7 @@ select * from foo
```SQL,non-transactional
select * from bar
```
```output
```output, precision(1: 3)
a | b
---+---
1 | 2
Expand Down Expand Up @@ -265,6 +301,7 @@ select * from qat
output: vec![],
transactional: true,
ignore_output: false,
precision_limits: HashMap::new(),
},
Test {
location: "/test/file.md:9".to_string(),
Expand All @@ -273,6 +310,7 @@ select * from qat
output: vec![vec!["value".to_string()]],
transactional: true,
ignore_output: false,
precision_limits: HashMap::new(),
},
Test {
location: "/test/file.md:24".to_string(),
Expand All @@ -281,6 +319,7 @@ select * from qat
output: vec![vec!["1".to_string(), "2".to_string()]],
transactional: false,
ignore_output: false,
precision_limits: [(1,3)].iter().cloned().collect(),
},
Test {
location: "/test/file.md:34".to_string(),
Expand All @@ -289,6 +328,7 @@ select * from qat
output: vec![],
transactional: true,
ignore_output: true,
precision_limits: HashMap::new(),
},
Test {
location: "/test/file.md:39".to_string(),
Expand All @@ -297,6 +337,7 @@ select * from qat
output: vec![],
transactional: true,
ignore_output: false,
precision_limits: HashMap::new(),
},
Test {
location: "/test/file.md:44".to_string(),
Expand All @@ -305,6 +346,7 @@ select * from qat
output: vec![],
transactional: true,
ignore_output: false,
precision_limits: HashMap::new(),
},
],
};
Expand Down
19 changes: 18 additions & 1 deletion tools/sql-doctester/src/runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,24 @@ fn validate_output(output: Vec<SimpleQueryMessage>, test: &Test) -> Result<(), T
"output has a different number of rows than expected.",
)));
}
if test.output != rows {

fn clamp_len<'s>(mut col: &'s str, idx: usize, test: &Test) -> &'s str {
let max_len = test.precision_limits.get(&idx);
if let Some(&max_len) = max_len {
if col.len() > max_len {
col = &col[..max_len]
}
}
col
}

let all_eq = test.output.iter().zip(rows.iter()).all(|(out, row)| {
out.len() == row.len()
&& out.iter().zip(row.iter()).enumerate().all(|(i, (o, r))| {
clamp_len(o, i, test) == clamp_len(r, i, test)
})
});
if !all_eq {
return Err(TestError::OutputError(output_error(
"output has a different values than expected.",
)));
Expand Down

0 comments on commit 414e660

Please sign in to comment.