From 69911a6a0dc314ea8e2aebc36a7e51427a7258f8 Mon Sep 17 00:00:00 2001 From: Arno Strouwen Date: Mon, 11 Dec 2023 20:55:48 +0100 Subject: [PATCH] typos CI --- .github/dependabot.yml | 3 +++ .github/workflows/SpellCheck.yml | 13 +++++++++++++ .typos.toml | 2 ++ docs/src/optimizations.md | 2 +- docs/src/randomforest.md | 2 +- docs/src/surrogate.md | 2 +- src/GEK.jl | 2 +- src/GEKPLS.jl | 6 +++--- src/Kriging.jl | 2 +- src/Optimization.jl | 2 +- 10 files changed, 27 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/SpellCheck.yml create mode 100644 .typos.toml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 700707ced..1e8a051e2 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,3 +5,6 @@ updates: directory: "/" # Location of package manifests schedule: interval: "weekly" + ignore: + - dependency-name: "crate-ci/typos" + update-types: ["version-update:semver-patch"] diff --git a/.github/workflows/SpellCheck.yml b/.github/workflows/SpellCheck.yml new file mode 100644 index 000000000..599253c8c --- /dev/null +++ b/.github/workflows/SpellCheck.yml @@ -0,0 +1,13 @@ +name: Spell Check + +on: [pull_request] + +jobs: + typos-check: + name: Spell Check with Typos + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v3 + - name: Check spelling + uses: crate-ci/typos@v1.16.23 \ No newline at end of file diff --git a/.typos.toml b/.typos.toml new file mode 100644 index 000000000..9a032fd3f --- /dev/null +++ b/.typos.toml @@ -0,0 +1,2 @@ +[default.extend-words] +ND = "ND" \ No newline at end of file diff --git a/docs/src/optimizations.md b/docs/src/optimizations.md index 8a248e06f..90b517867 100644 --- a/docs/src/optimizations.md +++ b/docs/src/optimizations.md @@ -28,5 +28,5 @@ surrogate_optimize(obj::Function,sop1::SOP,lb::Number,ub::Number,surrSOP::Abstra To add another optimization method, you just need to define a new SurrogateOptimizationAlgorithm and write its corresponding algorithm, overloading the following: ``` -surrogate_optimize(obj::Function,::NewOptimizatonType,lb,ub,surr::AbstractSurrogate,sample_type::SamplingAlgorithm;maxiters=100,num_new_samples=100) +surrogate_optimize(obj::Function,::NewOptimizationType,lb,ub,surr::AbstractSurrogate,sample_type::SamplingAlgorithm;maxiters=100,num_new_samples=100) ``` diff --git a/docs/src/randomforest.md b/docs/src/randomforest.md index bd336a191..8609bb851 100644 --- a/docs/src/randomforest.md +++ b/docs/src/randomforest.md @@ -32,7 +32,7 @@ plot!(f, label="True function", xlims=(lower_bound, upper_bound), legend=:top) With our sampled points we can build the Random forests surrogate using the `RandomForestSurrogate` function. -`randomforest_surrogate` behaves like an ordinary function which we can simply plot. Addtionally you can specify the number of trees created +`randomforest_surrogate` behaves like an ordinary function which we can simply plot. Additionally you can specify the number of trees created using the parameter num_round ```@example RandomForestSurrogate_tutorial diff --git a/docs/src/surrogate.md b/docs/src/surrogate.md index 5af888fda..0260fc781 100644 --- a/docs/src/surrogate.md +++ b/docs/src/surrogate.md @@ -48,7 +48,7 @@ It's great that you want to add another surrogate to the library! You will need to: 1. Define a new mutable struct and a constructor function -2. Define add\_point!(your\_surrogate::AbstactSurrogate,x\_new,y\_new) +2. Define add\_point!(your\_surrogate::AbstractSurrogate,x\_new,y\_new) 3. Define your\_surrogate(value) for the approximation **Example** diff --git a/src/GEK.jl b/src/GEK.jl index d3f1f7042..75da4ca3b 100644 --- a/src/GEK.jl +++ b/src/GEK.jl @@ -93,7 +93,7 @@ end function GEK(x, y, lb::Number, ub::Number; p = 1.0, theta = 1.0) if length(x) != length(unique(x)) - println("There exists a repetion in the samples, cannot build Kriging.") + println("There exists a repetition in the samples, cannot build Kriging.") return end mu, b, sigma, inverse_of_R = _calc_gek_coeffs(x, y, p, theta) diff --git a/src/GEKPLS.jl b/src/GEKPLS.jl index 33dc2242f..3d3246ac0 100644 --- a/src/GEKPLS.jl +++ b/src/GEKPLS.jl @@ -201,8 +201,8 @@ function _ge_compute_pls(X, y, n_comp, grads, delta_x, xlimits, extra_points) bb_vals = bb_vals .* grads[i, :]' _y = y[i, :] .+ sum(bb_vals, dims = 2) - #_pls.fit(_X, _y) # relic from sklearn versiom; retained for future reference. - #coeff_pls[:, :, i] = _pls.x_rotations_ #relic from sklearn versiom; retained for future reference. + #_pls.fit(_X, _y) # relic from sklearn version; retained for future reference. + #coeff_pls[:, :, i] = _pls.x_rotations_ #relic from sklearn version; retained for future reference. coeff_pls[:, :, i] = _modified_pls(_X, _y, n_comp) #_modified_pls returns the equivalent of SKLearn's _pls.x_rotations_ if extra_points != 0 @@ -304,7 +304,7 @@ end ######end of bb design###### """ -We substract the mean from each variable. Then, we divide the values of each +We subtract the mean from each variable. Then, we divide the values of each variable by its standard deviation. Parameters diff --git a/src/Kriging.jl b/src/Kriging.jl index e0b3a3671..57500a62e 100644 --- a/src/Kriging.jl +++ b/src/Kriging.jl @@ -104,7 +104,7 @@ Constructor for type Kriging. function Kriging(x, y, lb::Number, ub::Number; p = 2.0, theta = 0.5 / max(1e-6 * abs(ub - lb), std(x))^p) if length(x) != length(unique(x)) - println("There exists a repetion in the samples, cannot build Kriging.") + println("There exists a repetition in the samples, cannot build Kriging.") return end diff --git a/src/Optimization.jl b/src/Optimization.jl index 81e8e3523..c8839c878 100755 --- a/src/Optimization.jl +++ b/src/Optimization.jl @@ -1701,7 +1701,7 @@ function surrogate_optimize(obj::Function, sopd::SOP, lb, ub, surrSOPD::Abstract new_points_y[i] = y_best end - #new_points[i] is splitted in new_points_x and new_points_y now contains: + #new_points[i] is split in new_points_x and new_points_y now contains: #[x_1,y_1; x_2,y_2,...,x_{num_new_samples},y_{num_new_samples}] #2.4 Adaptive learning and tabu archive