@@ -44,29 +44,50 @@ const find_beta_from_ci = ({ci_lower, ci_upper}) => {
4444 let result = ( f_h - f ) / h
4545 return result
4646 }
47+
48+ // backtracking line search
49+ // <https://en.wikipedia.org/wiki/Backtracking_line_search>
50+ // Once we know the direction, how far to go along it?
51+ const get_optimal_step_size_a = ( { a, b, d, is_a} ) => {
52+ let dir = d_a > 0 ? 1 : - 1
53+
54+ let step_size_min = 0
55+ let loss_s_min = is_a ? loss ( a + step_size_min * dir , b ) : loss ( a , b + step_size_min * dir )
56+
57+ let step_size_max = 0.1
58+ let loss_s_max = is_a ? loss ( a + step_size_max * dir , b ) : loss ( a , b + step_size_max * dir )
59+
60+
61+ for ( let i = 0 ; i < 20 ; i ++ ) {
62+ if ( loss_s_min < loss_s_max ) {
63+ step_size_max = ( step_size_max + step_size_min ) / 2
64+ loss_s_max = is_a ? loss ( a + step_size_max * dir , b ) : loss ( a , b + step_size_max * dir )
65+ } else {
66+ step_size_min = ( step_size_max + step_size_min ) / 2
67+ loss_s_min = is_a ? loss ( a + step_size_min * dir , b ) : loss ( a , b + step_size_min * dir )
68+ }
69+ }
70+ return ( step_size_min + step_size_max ) / 2
71+ }
4772
4873 // gradient descent step
4974 const gradient_descent = ( a_init , b_init ) => {
50- let epsilon = 2 ** ( - 14 ) // 1/16384
51- let n_a = 2
52- let n_b = 2
5375 let a = a_init
5476 let b = b_init
5577 let max_steps = 2000
5678 for ( let i = 0 ; i < max_steps ; i ++ ) {
5779 // gradient step for a
5880 let dir_a = - df_da ( a , b )
81+ // console.log(dir_a)
5982 let stepsize_a = 0.0005 // 1/n_a
60- let step_a = stepsize_a * dir_a
83+ let step_a = stepsize_a // * dir_a
6184 a = Math . max ( a + step_a , 0 )
62- n_a = n_a + 1
6385
6486 // gradient step for b
6587 let dir_b = - df_db ( a , b )
6688 let stepsize_b = 0.0005 // 1/n_b
67- let step_b = stepsize_b * dir_b
89+ let step_b = stepsize_b // * dir_b
6890 b = Math . max ( b + step_b , 0 )
69- n_b = n_b + 1
7091 // console.log(`a: ${a}, b: ${b}`)
7192 }
7293 return [ a , b ]
0 commit comments