diff --git a/README.md b/README.md index 0506d96..d26261a 100644 --- a/README.md +++ b/README.md @@ -77,8 +77,6 @@ struct Network : public net::Model { } net::optimizer::SGD optimizer {/*learning rate*/ 0.1}; - - void step() { optimizer.step(); } }; int main() { @@ -94,7 +92,7 @@ int main() { std::cout << loss_function.loss() << std::endl; loss_function.backward(); // backpropagate the gradients - network.step() // triggers the optimizer. + network.optimizer.step() // triggers the optimizer. } ```