Example 2: Backwards Mode Multivariate Autodiff

use f64ad_core::ComplexField;
use f64ad_core::f64ad::{GlobalComputationGraphs};

fn main() {
    // Create a computation graph.
    let computation_graph = GlobalComputationGraphs::get(None, None);

    // Spawn an f64ad_ variables from computation graph.
    let v0 = computation_graph.spawn_variable(2.0);
    let v1 = computation_graph.spawn_variable(4.0);
    let v2 = computation_graph.spawn_variable(6.0);
    let v3 = computation_graph.spawn_variable(8.0);

    // compute some result using our variables
    let result = v0.sin() * v1 + 5.0 * v2.log(v3);
    println!("Result: {:?}", result);

    // compute derivatives in backwards direction from result.  Using backwards mode automatic
    // differentiation makes sense in this case because our number of outputs (1) is less than
    // our number of input variables (4).
    let derivatives = result.backwards_mode_grad(false);

    // access derivatives for each input variable from our `derivatives` object.
    let d_result_d_v0 = derivatives.wrt(&v0);
    let d_result_d_v1 = derivatives.wrt(&v1);
    let d_result_d_v2 = derivatives.wrt(&v2);
    let d_result_d_v3 = derivatives.wrt(&v3);

    // print results
    println!("d_result_d_v0: {:?}", d_result_d_v0);
    println!("d_result_d_v1: {:?}", d_result_d_v1);
    println!("d_result_d_v2: {:?}", d_result_d_v2);
    println!("d_result_d_v3: {:?}", d_result_d_v3);
}

Output

Result: f64ad_var_f(f64ad_var_f{ value: 7.9454605418379876, node_idx: 8 })
d_result_d_v0: f64(-1.6645873461885696)
d_result_d_v1: f64(0.9092974268256817)
d_result_d_v2: f64(0.40074862246915655)
d_result_d_v3: f64(-0.25898004032460736)