Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

Commit

Permalink
added a few methods in the optimizer and layers. (#46)
Browse files Browse the repository at this point in the history
* MNIST example working

* dependency inversion
  • Loading branch information
mr-mapache authored Oct 17, 2023
1 parent 4376e0e commit e8972f6
Show file tree
Hide file tree
Showing 6 changed files with 317 additions and 11 deletions.
263 changes: 263 additions & 0 deletions examples/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,263 @@
# CMAKE generated file: DO NOT EDIT!
# Generated by "Unix Makefiles" Generator, CMake Version 3.22

# Default target executed when no arguments are given to make.
default_target: all
.PHONY : default_target

# Allow only one "make -f Makefile2" at a time, but pass parallelism.
.NOTPARALLEL:

#=============================================================================
# Special targets provided by cmake.

# Disable implicit rules so canonical targets will work.
.SUFFIXES:

# Disable VCS-based implicit rules.
% : %,v

# Disable VCS-based implicit rules.
% : RCS/%

# Disable VCS-based implicit rules.
% : RCS/%,v

# Disable VCS-based implicit rules.
% : SCCS/s.%

# Disable VCS-based implicit rules.
% : s.%

.SUFFIXES: .hpux_make_needs_suffix_list

# Command-line flag to silence nested $(MAKE).
$(VERBOSE)MAKESILENT = -s

#Suppress display of executed commands.
$(VERBOSE).SILENT:

# A target that is always out of date.
cmake_force:
.PHONY : cmake_force

#=============================================================================
# Set environment variables for the build.

# The shell in which to execute make rules.
SHELL = /bin/sh

# The CMake executable.
CMAKE_COMMAND = /usr/bin/cmake

# The command to remove a file.
RM = /usr/bin/cmake -E rm -f

# Escaping for special characters.
EQUALS = =

# The top-level source directory on which CMake was run.
CMAKE_SOURCE_DIR = /home/eric/tmp/deep-learning-library/examples

# The top-level build directory on which CMake was run.
CMAKE_BINARY_DIR = /home/eric/tmp/deep-learning-library/examples

#=============================================================================
# Targets provided globally by CMake.

# Special rule for the target edit_cache
edit_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "No interactive CMake dialog available..."
/usr/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available.
.PHONY : edit_cache

# Special rule for the target edit_cache
edit_cache/fast: edit_cache
.PHONY : edit_cache/fast

# Special rule for the target rebuild_cache
rebuild_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..."
/usr/bin/cmake --regenerate-during-build -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)
.PHONY : rebuild_cache

# Special rule for the target rebuild_cache
rebuild_cache/fast: rebuild_cache
.PHONY : rebuild_cache/fast

# The main all target
all: cmake_check_build_system
$(CMAKE_COMMAND) -E cmake_progress_start /home/eric/tmp/deep-learning-library/examples/CMakeFiles /home/eric/tmp/deep-learning-library/examples//CMakeFiles/progress.marks
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 all
$(CMAKE_COMMAND) -E cmake_progress_start /home/eric/tmp/deep-learning-library/examples/CMakeFiles 0
.PHONY : all

# The main clean target
clean:
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 clean
.PHONY : clean

# The main clean target
clean/fast: clean
.PHONY : clean/fast

# Prepare targets for installation.
preinstall: all
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 preinstall
.PHONY : preinstall

# Prepare targets for installation.
preinstall/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 preinstall
.PHONY : preinstall/fast

# clear depends
depend:
$(CMAKE_COMMAND) -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1
.PHONY : depend

#=============================================================================
# Target rules for targets named cabernet-examples-functions

# Build rule for target.
cabernet-examples-functions: cmake_check_build_system
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 cabernet-examples-functions
.PHONY : cabernet-examples-functions

# fast build rule for target.
cabernet-examples-functions/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-functions.dir/build.make CMakeFiles/cabernet-examples-functions.dir/build
.PHONY : cabernet-examples-functions/fast

#=============================================================================
# Target rules for targets named cabernet-examples-layers

# Build rule for target.
cabernet-examples-layers: cmake_check_build_system
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 cabernet-examples-layers
.PHONY : cabernet-examples-layers

# fast build rule for target.
cabernet-examples-layers/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-layers.dir/build.make CMakeFiles/cabernet-examples-layers.dir/build
.PHONY : cabernet-examples-layers/fast

#=============================================================================
# Target rules for targets named cabernet-examples-operations

# Build rule for target.
cabernet-examples-operations: cmake_check_build_system
$(MAKE) $(MAKESILENT) -f CMakeFiles/Makefile2 cabernet-examples-operations
.PHONY : cabernet-examples-operations

# fast build rule for target.
cabernet-examples-operations/fast:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-operations.dir/build.make CMakeFiles/cabernet-examples-operations.dir/build
.PHONY : cabernet-examples-operations/fast

functions.o: functions.cpp.o
.PHONY : functions.o

# target to build an object file
functions.cpp.o:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-functions.dir/build.make CMakeFiles/cabernet-examples-functions.dir/functions.cpp.o
.PHONY : functions.cpp.o

functions.i: functions.cpp.i
.PHONY : functions.i

# target to preprocess a source file
functions.cpp.i:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-functions.dir/build.make CMakeFiles/cabernet-examples-functions.dir/functions.cpp.i
.PHONY : functions.cpp.i

functions.s: functions.cpp.s
.PHONY : functions.s

# target to generate assembly for a file
functions.cpp.s:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-functions.dir/build.make CMakeFiles/cabernet-examples-functions.dir/functions.cpp.s
.PHONY : functions.cpp.s

layers.o: layers.cpp.o
.PHONY : layers.o

# target to build an object file
layers.cpp.o:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-layers.dir/build.make CMakeFiles/cabernet-examples-layers.dir/layers.cpp.o
.PHONY : layers.cpp.o

layers.i: layers.cpp.i
.PHONY : layers.i

# target to preprocess a source file
layers.cpp.i:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-layers.dir/build.make CMakeFiles/cabernet-examples-layers.dir/layers.cpp.i
.PHONY : layers.cpp.i

layers.s: layers.cpp.s
.PHONY : layers.s

# target to generate assembly for a file
layers.cpp.s:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-layers.dir/build.make CMakeFiles/cabernet-examples-layers.dir/layers.cpp.s
.PHONY : layers.cpp.s

operations.o: operations.cpp.o
.PHONY : operations.o

# target to build an object file
operations.cpp.o:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-operations.dir/build.make CMakeFiles/cabernet-examples-operations.dir/operations.cpp.o
.PHONY : operations.cpp.o

operations.i: operations.cpp.i
.PHONY : operations.i

# target to preprocess a source file
operations.cpp.i:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-operations.dir/build.make CMakeFiles/cabernet-examples-operations.dir/operations.cpp.i
.PHONY : operations.cpp.i

operations.s: operations.cpp.s
.PHONY : operations.s

# target to generate assembly for a file
operations.cpp.s:
$(MAKE) $(MAKESILENT) -f CMakeFiles/cabernet-examples-operations.dir/build.make CMakeFiles/cabernet-examples-operations.dir/operations.cpp.s
.PHONY : operations.cpp.s

# Help Target
help:
@echo "The following are some of the valid targets for this Makefile:"
@echo "... all (the default if no target is provided)"
@echo "... clean"
@echo "... depend"
@echo "... edit_cache"
@echo "... rebuild_cache"
@echo "... cabernet-examples-functions"
@echo "... cabernet-examples-layers"
@echo "... cabernet-examples-operations"
@echo "... functions.o"
@echo "... functions.i"
@echo "... functions.s"
@echo "... layers.o"
@echo "... layers.i"
@echo "... layers.s"
@echo "... operations.o"
@echo "... operations.i"
@echo "... operations.s"
.PHONY : help



#=============================================================================
# Special targets to cleanup operation of make.

# Special rule to run CMake to check the build system integrity.
# No rule that depends on this can have commands that come from listfiles
# because they might be regenerated.
cmake_check_build_system:
$(CMAKE_COMMAND) -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0
.PHONY : cmake_check_build_system

Binary file added examples/cabernet-examples-layers
Binary file not shown.
12 changes: 6 additions & 6 deletions examples/layers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ cmake --build . --target cabernet-examples-layers
struct Autoencoder : public net::Model<Autoencoder> {

Autoencoder() {
encoder.configure_optimizer(std::dynamic_pointer_cast<net::base::Optimizer>(encoder_optimizer));
decoder.configure_optimizer(std::dynamic_pointer_cast<net::base::Optimizer>(decoder_optimizer));
encoder_optimizer.add_parameter(encoder.parameters());
decoder_optimizer.add_parameter(decoder.parameters());
}

net::layer::Sequence encoder {
Expand All @@ -38,16 +38,16 @@ struct Autoencoder : public net::Model<Autoencoder> {
}

void step() {
encoder_optimizer->step();
decoder_optimizer->step();
encoder_optimizer.step();
decoder_optimizer.step();
}
/* you can add different optimizers to different layers
or the same, doesn't matter, the optimizer has a shared pointer
to it's implementation so you can pass instances of it with value
semantics without making deep copies */

std::shared_ptr<net::optimizer::SGD> encoder_optimizer = std::make_shared<net::optimizer::SGD>(/*learning rate*/ 0.1);
std::shared_ptr<net::optimizer::SGD> decoder_optimizer = std::make_shared<net::optimizer::SGD>(/*learning rate*/ 0.2);
net::optimizer::SGD encoder_optimizer{0.01};
net::optimizer::SGD decoder_optimizer{0.01};
};

int main() {
Expand Down
9 changes: 6 additions & 3 deletions examples/model.cpp
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
#include <CaberNet.h>

struct Network : public net::Model<Network> {
Network() : net::Model<Network>(
std::make_shared<net::optimizer::SGD>(/*learning rate*/ 0.1)
) {}

Network() {
optimizer.add_parameter(layers.parameters());
}

net::layer::Sequence layers {
net::layer::Linear(784, 128),
Expand All @@ -15,6 +16,8 @@ struct Network : public net::Model<Network> {
net::Tensor<float> forward(net::Tensor<float> x) {
return layers(x);
}

net::optimizer::SGD optimizer{0.01};
};

int main() {
Expand Down
39 changes: 37 additions & 2 deletions include/CaberNet/layers.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,14 @@ class Linear : public Model<Linear> {
size_type input_features,
size_type output_features,
initializer distribution = initializer::He );

Tensor<float> forward(Tensor<float> x);

void set_optimizer(std::shared_ptr<net::base::Optimizer> optimizer);

std::vector<internal::Tensor*> parameters() const {
return { weight_.internal(), bias_.internal() };
}

private:
Tensor<float> weight_;
Expand All @@ -31,20 +36,35 @@ class Linear : public Model<Linear> {

struct ReLU : public Model<ReLU> {
ReLU() = default;

std::vector<internal::Tensor*> parameters()const {
return {};
}

Tensor<float> forward(Tensor<float> input);
void set_optimizer(std::shared_ptr<net::base::Optimizer> optimizer) { return; }
};

struct Softmax : public Model<Softmax> {
int axis;
Softmax(int axis);

std::vector<internal::Tensor*> parameters()const {
return {};
}

Tensor<float> forward(Tensor<float> input);
void set_optimizer(std::shared_ptr<net::base::Optimizer> optimizer) { return; }
};

struct LogSoftmax : public Model<LogSoftmax> {
int axis;
LogSoftmax(int axis);

std::vector<internal::Tensor*> parameters()const {
return {};
}

Tensor<float> forward(Tensor<float> input);
void set_optimizer(std::shared_ptr<net::base::Optimizer> optimizer) { return; }
};
Expand All @@ -70,9 +90,24 @@ class Sequence : public Model<Sequence> {
}
return input;
}

std::vector<internal::Tensor*> parameters() const {

std::vector<internal::Tensor*> parameter_list;
for (auto& layer : layers_) {
std::visit([&parameter_list](auto&& argument) {
for(auto parameter : argument.parameters()) {
parameter_list.push_back(parameter);
}

}, layer);
}

return parameter_list;
}

void set_optimizer(std::shared_ptr<net::base::Optimizer> optimizer) {
for (auto& layer : layers_) {
std::cout << "visited" << std::endl;
std::visit([optimizer](auto&& argument) { argument.set_optimizer(optimizer); }, layer);
}
}
Expand Down
Loading

0 comments on commit e8972f6

Please sign in to comment.