n := 400; s := 72; plot( theta^s * ( 1 - theta )^( n - s ), theta = 0 .. 1 ); likelihood := ( theta, n, s ) -> theta^s * ( 1 - theta )^( n - s ); plot( likelihood( theta, n, s ), theta = 0 .. 1 ); plot( likelihood( theta, n, s ), theta = 0.12 .. 0.25 ); log_likelihood := ( theta, n, s ) -> s * log(theta) + ( n - s ) * log( 1 - theta ); plot( log_likelihood( theta, n, s ), theta = 0 .. 1 ); unassign( 'n', 's' ); diff( log_likelihood( theta, n, s ), theta ); solve( diff( log_likelihood( theta, n, s ), theta ) = 0, theta ); diff( log_likelihood( theta, n, s ), theta$2 ); simplify( diff( log_likelihood( theta, n, s ), theta$2 ) ); eval( - diff( log_likelihood( theta, n, s ), theta$2 ), theta = s / n ); simplify( eval( - diff( log_likelihood( theta, n, s ), theta$2 ), theta = s / n ) ); ############################################################################## ll := ( theta, s, n ) -> s * log( theta ) + ( n - s ) * log( 1 - theta ); # the next line is needed for maple in environments such as unix or linux plotsetup( x11 ); plot( { ll( theta, 72, 400 ) - evalf( ll( 72 / 400, 72, 400 ) ), ll( theta, 288, 1600 ) - evalf( ll( 288 / 1600, 288, 1600 ) ) }, theta = 0.12 .. 0.25, color = black ); score := ( theta, s, n ) -> simplify( diff( ll( theta, s, n ), theta ) ); score := (theta, s, n) -> simplify(diff(ll(theta, s, n), theta)) score( theta, s, n ); s - n theta - ------------------ theta (-1 + theta) plot( score( theta, 72, 400 ), theta = 0.12 .. 0.25 ); diff2 := ( theta, s, n ) -> simplify( diff( score( theta, s, n ), theta ) ); diff2 := (theta, s, n) -> simplify(diff(score(theta, s, n), theta)) diff2( theta, s, n ); 2 -n theta - s + 2 s theta ------------------------- 2 2 theta (-1 + theta) information := ( s, n ) -> simplify( eval( - diff2( theta, s, n ), theta = s / n ) ); information( s, n ); 3 n - ---------- s (-n + s) variance := ( s, n ) -> 1 / information( s, n ); 1 variance := (s, n) -> ----------------- information(s, n) variance( s, n ); s (-n + s) - ---------- 3 n ##############################################################################