Program Listing for File autograd.h¶
↰ Return to documentation for file (torch/csrc/autograd/autograd.h
)
#pragma once
#include <torch/csrc/autograd/variable.h>
namespace torch::autograd {
// When inputs are provided and a given input is not a leaf,
// the current implementation will call its grad_fn (even though it is not
// strictly needed to get this gradients). It is an implementation detail
// on which the user should not rely. See
// https://github.com/pytorch/pytorch/pull/60521#issuecomment-867061780 for
// more details.
TORCH_API void backward(
const variable_list& tensors,
const variable_list& grad_tensors = {},
std::optional<bool> retain_graph = std::nullopt,
bool create_graph = false,
const variable_list& inputs = {});
TORCH_API variable_list grad(
const variable_list& outputs,
const variable_list& inputs,
const variable_list& grad_outputs = {},
std::optional<bool> retain_graph = std::nullopt,
bool create_graph = false,
bool allow_unused = false);
namespace forward_ad {
TORCH_API uint64_t enter_dual_level();
TORCH_API void exit_dual_level(uint64_t level);
} // namespace forward_ad
} // namespace torch::autograd