diff --git a/doc/HalideCMakePackage.md b/doc/HalideCMakePackage.md index ed0a073ab400..41da53348569 100644 --- a/doc/HalideCMakePackage.md +++ b/doc/HalideCMakePackage.md @@ -474,7 +474,9 @@ verbatim. If `GRADIENT_DESCENT` is set, then the module will be built suitably for gradient descent calculation in TensorFlow or PyTorch. See `Generator::build_gradient_module()` for more documentation. This corresponds to -passing `-d 1` at the generator command line. +passing `-d 1` at the generator command line. Note that you *must* also specify +an autoscheduler (via the `AUTOSCHEDULER` argument) when specifying this +argument. If the `C_BACKEND` option is set, this command will invoke the configured C++ compiler on a generated source. Note that a `.runtime` target is _not_ diff --git a/src/AbstractGenerator.cpp b/src/AbstractGenerator.cpp index d94232e55b61..57c8f76e8a9a 100644 --- a/src/AbstractGenerator.cpp +++ b/src/AbstractGenerator.cpp @@ -218,8 +218,7 @@ Module AbstractGenerator::build_gradient_module(const std::string &function_name if (!asp.name.empty()) { auto_schedule_results = grad_pipeline.apply_autoscheduler(context.target(), asp); } else { - user_warning << "Autoscheduling is not enabled in build_gradient_module(), so the resulting " - "gradient module will be unscheduled; this is very unlikely to be what you want.\n"; + user_error << "An autoscheduler must be specified when producing a gradient-descent module().\n"; } Module result = grad_pipeline.compile_to_module(gradient_inputs, function_name, context.target(), linkage_type);