@@ -121,6 +121,10 @@ public GeneralisedObjectiveFunction<TargetT>
121121 // PoissonLogLikelihoodWithLinearModelForMean();
122122
123123 // ! Computes the gradient of the data fit term
124+ /* !
125+ This function is implemented in terms of \c actual_compute_sub_gradient_without_penalty()
126+ by setting do_subtraction = true
127+ */
124128 virtual void
125129 compute_sub_gradient_without_penalty (TargetT& gradient,
126130 const TargetT ¤t_estimate,
@@ -130,12 +134,8 @@ public GeneralisedObjectiveFunction<TargetT>
130134 /* !
131135 This function is used for instance by OSMAPOSL.
132136
133- This computes
134- \f[ {\partial L \over \partial \lambda_v} + P_v =
135- \sum_b P_{bv} {y_b \over Y_b}
136- \f]
137- (see the class general documentation).
138- The sum will however be restricted to a subset.
137+ This function is implemented in terms of \c actual_compute_sub_gradient_without_penalty()
138+ by setting do_subtraction = false
139139 */
140140 virtual void
141141 compute_sub_gradient_without_penalty_plus_sensitivity (TargetT& gradient,
@@ -253,7 +253,20 @@ public GeneralisedObjectiveFunction<TargetT>
253253 */
254254 void compute_sensitivities ();
255255
256+ // ! computes the objective function subset gradient without the penalty
257+ /* !
258+ If do_subtraction is false, this computes
259+ \f[ {\partial L \over \partial \lambda_v} + P_v =
260+ \sum_b P_{bv} {y_b \over Y_b}
261+ \f]
262+ (see the class general documentation).
263+ The sum will however be restricted to a subset.
256264
265+ However, if do_subtraction is true, this function will instead compute
266+ \f[ {\partial L \over \partial \lambda_v} =
267+ \sum_b P_{bv} ({y_b \over Y_b} - 1)
268+ \f]
269+ */
257270 virtual void
258271 actual_compute_sub_gradient_without_penalty (TargetT& gradient,
259272 const TargetT ¤t_estimate,
0 commit comments