Prev Next sacado_det_minor.cpp

Sacado Speed: Gradient of Determinant Using Expansion by Minors

Operation Sequence
Note that the expansion by minors operation sequence does not depends on the matrix being factored. Yet there does not seem to be a way to reuse the operation sequence to compute derivatives for multiple matrices.

compute_det_minor
Routine that computes the gradient of determinant using Sacado:
 
# include <vector>
# include <Sacado.hpp>
# include <cppad/speed/det_by_minor.hpp>
# include <cppad/speed/uniform_01.hpp>

bool compute_det_minor(
	size_t                     size     , 
	size_t                     repeat   , 
	CppAD::vector<double>     &matrix   ,
	CppAD::vector<double>     &gradient )
{
	// -----------------------------------------------------
	// setup

	// object for computing determinant
	typedef Sacado::Rad::ADvar<double>    ADScalar; 
	typedef CppAD::vector<ADScalar>        ADVector; 
	CppAD::det_by_minor<ADScalar>         Det(size);

	size_t i;                // temporary index
	size_t n = size * size;  // number of independent variables
	ADScalar   detA;         // AD value of the determinant
	ADVector   A(n);         // AD version of matrix 
	
	// ------------------------------------------------------
	while(repeat--)
       {	// get the next matrix
		CppAD::uniform_01(n, matrix);

		// set independent variable values
		for(i = 0; i < n; i++)
			A[i] = matrix[i];

		// compute the determinant
		detA = Det(A);

		// Compute the gradient of detA
		ADScalar::Gradcomp();

		// return gradient using reverse mode
		for(i =0; i < n; i++)
			gradient[i] = A[i].adj(); // partial detA w.r.t A[i]
	}
	// ---------------------------------------------------------
	return true;
}

Input File: speed/sacado/det_minor.cpp