@@ -634,6 +634,80 @@ function test_objective_broadcasted_tanh()
634634 return
635635end
636636
637+ function test_objective_reduce_sum ()
638+ model = ArrayDiff. Model ()
639+ x1 = MOI. VariableIndex (1 )
640+ x2 = MOI. VariableIndex (2 )
641+ x3 = MOI. VariableIndex (3 )
642+ ArrayDiff. set_objective (model, :(reduce (+ , [$ x1, $ x2, $ x3])))
643+ evaluator = ArrayDiff. Evaluator (model, ArrayDiff. Mode (), [x1, x2, x3])
644+ MOI. initialize (evaluator, [:Grad ])
645+ sizes = evaluator. backend. objective. expr. sizes
646+ @test sizes. ndims == [0 , 0 , 0 , 0 , 0 ]
647+ @test sizes. size_offset == [0 , 0 , 0 , 0 , 0 ]
648+ @test sizes. size == []
649+ @test sizes. storage_offset == [0 , 1 , 2 , 3 , 4 , 5 ]
650+ x1 = 1.0
651+ x2 = 2.0
652+ x3 = 3.0
653+ @test MOI. eval_objective (evaluator, [x1, x2, x3]) == 6.0
654+ g = ones (3 )
655+ MOI. eval_objective_gradient (evaluator, g, [x1, x2, x3])
656+ @test g == [1.0 , 1.0 , 1.0 ]
657+ return
658+ end
659+
660+ function test_objective_reduce_prod ()
661+ model = ArrayDiff. Model ()
662+ x1 = MOI. VariableIndex (1 )
663+ x2 = MOI. VariableIndex (2 )
664+ x3 = MOI. VariableIndex (3 )
665+ ArrayDiff. set_objective (model, :(reduce (* , [$ x1, $ x2, $ x3])))
666+ evaluator = ArrayDiff. Evaluator (model, ArrayDiff. Mode (), [x1, x2, x3])
667+ MOI. initialize (evaluator, [:Grad ])
668+ sizes = evaluator. backend. objective. expr. sizes
669+ @test sizes. ndims == [0 , 0 , 0 , 0 , 0 ]
670+ @test sizes. size_offset == [0 , 0 , 0 , 0 , 0 ]
671+ @test sizes. size == []
672+ @test sizes. storage_offset == [0 , 1 , 2 , 3 , 4 , 5 ]
673+ x1 = 1.0
674+ x2 = 2.0
675+ x3 = 3.0
676+ @test MOI. eval_objective (evaluator, [x1, x2, x3]) == 6.0
677+ g = ones (3 )
678+ MOI. eval_objective_gradient (evaluator, g, [x1, x2, x3])
679+ @test g == [6.0 / x1, 6.0 / x2, 6.0 / x3]
680+ return
681+ end
682+
683+ function test_objective_reduce_atan ()
684+ model = ArrayDiff. Model ()
685+ x1 = MOI. VariableIndex (1 )
686+ x2 = MOI. VariableIndex (2 )
687+ x3 = MOI. VariableIndex (3 )
688+ ArrayDiff. set_objective (model, :(reduce (atan, [$ x1, $ x2, $ x3])))
689+ evaluator = ArrayDiff. Evaluator (model, ArrayDiff. Mode (), [x1, x2, x3])
690+ MOI. initialize (evaluator, [:Grad ])
691+ sizes = evaluator. backend. objective. expr. sizes
692+ @test sizes. ndims == [0 , 0 , 0 , 0 , 0 ]
693+ @test sizes. size_offset == [0 , 0 , 0 , 0 , 0 ]
694+ @test sizes. size == []
695+ @test sizes. storage_offset == [0 , 1 , 2 , 3 , 4 , 5 ]
696+ x1 = 1.0
697+ x2 = 2.0
698+ x3 = 3.0
699+ @test MOI. eval_objective (evaluator, [x1, x2, x3]) ==
700+ atan (atan (x1, x2), x3)
701+ g = ones (3 )
702+ MOI. eval_objective_gradient (evaluator, g, [x1, x2, x3])
703+ @test g ≈ [
704+ x2 * x3 / ((x1^ 2 + x2^ 2 ) * (x3^ 2 + atan (x1, x2)^ 2 )),
705+ - x1 * x3 / ((x1^ 2 + x2^ 2 ) * (x3^ 2 + atan (x1, x2)^ 2 )),
706+ - atan (x1, x2) / (x3^ 2 + atan (x1, x2)^ 2 ),
707+ ]
708+ return
709+ end
710+
637711end # module
638712
639713TestArrayDiff. runtests ()
0 commit comments