Example 2: Skipping A Gradient Component

This example uses the same data as in the One-Sided Differences example. Here we assume that the second component of the gradient is analytically known. Therefore only the first gradient component needs numerical approximation. The input values of array options specify that numerical differentiation with respect to y_2 is skipped.
using System;
using Imsl.Math;

public class NumericalDerivativesEx2 : NumericalDerivatives.IJacobian 
{
    static int m = 1, n = 2;
    static double a, b, c;
    

    public double[] F(int varIndex, double[] y) 
    {
        double[] tmp = new double[m];    
        tmp[0] = a * Math.Exp(b * y[0]) + c * y[0] * y[1] * y[1]; 
        return tmp;
    }
           
 
    public double[,] Jacobian(double[] y) 
    {
        double[,] tmp = new double[m, n];
                
        // The second component partial is skipped,
        // since it is known analytically
        tmp[0, 1] = 2.0e0 * c * y[0] * y[1];
                
        return tmp;
    }
    

    public static void Main(String[] args) 
    {
        NumericalDerivatives.DifferencingMethod[] options = 
            new NumericalDerivatives.DifferencingMethod[n];
        double u;
        double[] y = new double[n];
        double[] valueF = new double[m];
        double[] scale = new double[n];
        double[,] actual = new double[m, n];
        double[] re = new double[2];
    
        // Define data and point of evaluation:
        a = 2.5e6;
        b = 3.4e0;
        c = 4.5e0;
        y[0] = 2.1e0;
        y[1] = 3.2e0;
        
        // Precision, for measuring errors
        u = Math.Sqrt(2.220446049250313e-016);
        
        // Set scaling:
        scale[0] = 1.0e0;
        // Increase scale to account for large value of a.
        scale[1] = 8.0e3;
        
        // Compute true values of partials.
        actual[0, 0] = a * b * Math.Exp(b * y[0]) + c * y[1] * y[1];
        actual[0, 1] = 2 * c * y[0] * y[1];
        
        options[0] = NumericalDerivatives.DifferencingMethod.OneSided;
        options[1] = NumericalDerivatives.DifferencingMethod.Skip;
        
        valueF[0] = a * Math.Exp(b * y[0]) + c * y[0] * y[1] * y[1];
        
        NumericalDerivatives derv = 
            new NumericalDerivatives(new NumericalDerivativesEx2());
        derv.SetDifferencingMethods(options);
        derv.SetScalingFactors(scale);
        derv.SetInitialF(valueF);
        double[,] jacobian = derv.EvaluateJ(y);

        PrintMatrixFormat pmf = new PrintMatrixFormat();
        pmf.NumberFormat = "0.00";
        new PrintMatrix("Numerical gradient:").Print(pmf, jacobian);
        new PrintMatrix("Analytic gradient:").Print(pmf, actual);       

        jacobian[0, 0] = (jacobian[0, 0] - actual[0, 0]) / actual[0, 0];
        jacobian[0, 1] = (jacobian[0, 1] - actual[0, 1]) / actual[0, 1];
        re[0] = jacobian[0, 0];
        re[1] = jacobian[0, 1];
        
        Console.Out.WriteLine("Relative accuracy:");
        Console.Out.WriteLine("df/dy_1       df/dy_2");
        Console.Out.WriteLine(" {0:F2}u        {1:F2}u", 
            re[0]/u, re[1]/u);
        Console.Out.WriteLine("({0:#.###e+00})  ({1:#.###e+00})", 
            re[0], re[1]);
    }
}

Output

   Numerical gradient:
         0           1    
0  10722141696.00  60.48  

    Analytic gradient:
         0           1    
0  10722141353.42  60.48  

Relative accuracy:
df/dy_1       df/dy_2
 2.14u        0.00u
(3.195e-08)  (0e+00)

Link to C# source.