
    0Ph-                         d Z ddlZddlZddlZddlmZ ddlmZm	Z	  G d de
          Z	 dd	Zdd
Z	 	 	 	 	 	 	 ddZddZdS )a  
Our own implementation of the Newton algorithm

Unlike the scipy.optimize version, this version of the Newton conjugate
gradient solver uses only one function call to retrieve the
func value, the gradient value and a callable for the Hessian matvec
product. If the function call is very expensive (e.g. for logistic
regression with large design matrix), this approach gives very
significant speedups.
    N   )ConvergenceWarning   )line_search_wolfe1line_search_wolfe2c                       e Zd ZdS )_LineSearchErrorN)__name__
__module____qualname__     V/var/www/html/test/jupyter/venv/lib/python3.11/site-packages/sklearn/utils/optimize.pyr	   r	      s        Dr   r	   c           	      X   |dk    }	dt          j        t          j        |          j                  j        z  }
|	r0t          d           t          d|
            t          d           t          | ||||||fi |}|	r!|d         dnd	}t          d
|z   dz              |d         |                    dt                                } | ||z   g|R  }t          j	        ||
z            }||z
  }t          j	        |          |k    }|	r*t          dt          j	        |           d| d|            |rt          j                            |d          } |||z   g|R  }t          j                            |d          }||k     }|	rt          d| d| d|            |rd|d         dz   |d         dz   |||f}|d         G|	rt          d           t          | ||||||fi |}|	r!|d         dnd	}t          d|z   dz              |d         t                      |S )a  
    Same as line_search_wolfe1, but fall back to line_search_wolfe2 if
    suitable step length is not found, and raise an exception if a
    suitable step length is not found.

    Raises
    ------
    _LineSearchError
        If no suitable step size is found.

    r      z  Line Searchz    eps=16 * finfo.eps=z    try line search wolfe1r   Nznot  z    wolfe1 line search was 
successfulargsz2    check loss |improvement| <= eps * |loss_old|:  <=  r   )ordz1    check sum(|gradient|) < sum(|gradient_old|): z <       ?z'    last resort: try line search wolfe2z    wolfe2 line search was )npfinfoasarraydtypeepsprintr   gettupleabsscipylinalgnormr   r	   )ffprimexkpkgfkold_fvalold_old_fvalverbosekwargs
is_verboser   ret_not_r   fval	tiny_lossloss_improvementchecksum_abs_grad_oldgradsum_abs_grads                        r   _line_search_wolfe12r8      s    AJ
rx
8,,2337
7C ,o---...*+++
QBX|
V
Vv
V
VC Da&.b+e3lBCCC
1v~ zz&%''**qb 4   F8c>**	(?'((I5 	FF+,,F F2;F F>CF F    	$|00!0<<6"r')D)))D <,,Tq,99L #33E C#C C(8C C;@C C    FQJFQJ 1v~
  	=;<<< vr2sHl
 
>D
 
  	H!!fnFF"E/%7,FGGG
1v~   Jr   c                 H   dt          j        t           j                  j        z  }t          j        t          |          |j                  }t          j        |          }| }d}	t          j        ||          }
|
}|dk    }|	|k    rRt          j	        t          j
        |                    |k    r?|r<t          d|	 dt          j	        t          j
        |                     d|            n | |          }t          j        ||          }d|cxk    r	||z  k    r'n n$|r!t          d|	 d| d	| d
| d||z   
           n|dk     r;|	dk    r|rt          d|	 d|            nn||
|z  |z  z  }|rt          d           nQ|
|z  }|||z  z  }|||z  z  }t          j        ||          }||
z  }| ||z  z   }||dz  |z  z   }|	dz   }	|}
|	|k    R|rB|	|k    r<t          d|	dz
   dt          j	        t          j
        |                                |S )aP  
    Solve iteratively the linear system 'fhess_p . xsupi = fgrad'
    with a conjugate gradient descent.

    Parameters
    ----------
    fhess_p : callable
        Function that takes the gradient as a parameter and returns the
        matrix product of the Hessian and gradient.

    fgrad : ndarray of shape (n_features,) or (n_features + 1,)
        Gradient vector.

    maxiter : int
        Number of CG iterations.

    tol : float
        Stopping criterion.

    Returns
    -------
    xsupi : ndarray of shape (n_features,) or (n_features + 1,)
        Estimated solution.
    r   )r   r   r   z  Inner CG solver iteration z+ stopped with
    sum(|residuals|) <= tol: r   z2 stopped with
    tiny_|p| = eps * ||p||^2, eps = z, squred L2 norm ||p||^2 = z
    curvature <= tiny_|p|: z. stopped with negative curvature, curvature = z;  Inner CG solver iteration 0 fell back to steepest descentr   z+  Inner CG solver stopped reaching maxiter=z with sum(|residuals|) = )r   r   float64r   zeroslenr   copydotsumr!   r   )fhess_pfgradmaxitertolr,   r   xsupiripsupiidri0psupi_norm2r.   Apcurvalphaidri1betais                     r   _cgrO   q   s   2 rx
##'
'CHSZZu{333E	BCE	A6"b>>DKAJ
w,,6"&**$$ R1 R R46F26"::4F4FR RLOR R   WU^^veR  ))))k)))))) P1 P P;>P P0;P P 37P P =@+<MP P   AXX1uu 9q 9 9269 9    u,, YWXXX%
frkvb"~~teem#UAX33E[ w,,\  
a'kk7!a% 7 7"$&"4"47 7	
 	
 	
 Lr   r   -C6?d      Tc                 ~   t          j        |                                          }t          j        |          }d}|r ||g|R  }d}nd}|
dk    }||k     r% | |g|R  \  }}t          j        |          }t          j        |          }||k    }|r9t          d|            t          d           t          d| d| d|            |rnt          j        |          }t          dt          j	        |          g          }||z  }t          |||||
	          }d
}|rE	 t          ||||||||
|	  	        \  }}}}}}n$# t          $ r t          j        d           Y nw xY w|||z  z  }|dz  }||k     %|	r%||k    rt          j        d| dt                     n|rt          d| d           ||fS )a  
    Minimization of scalar function of one or more variables using the
    Newton-CG algorithm.

    Parameters
    ----------
    grad_hess : callable
        Should return the gradient and a callable returning the matvec product
        of the Hessian.

    func : callable
        Should return the value of the function.

    grad : callable
        Should return the function value and the gradient. This is used
        by the linesearch functions.

    x0 : array of float
        Initial guess.

    args : tuple, default=()
        Arguments passed to func_grad_hess, func and grad.

    tol : float, default=1e-4
        Stopping criterion. The iteration will stop when
        ``max{|g_i | i = 1, ..., n} <= tol``
        where ``g_i`` is the i-th component of the gradient.

    maxiter : int, default=100
        Number of Newton iterations.

    maxinner : int, default=200
        Number of CG iterations.

    line_search : bool, default=True
        Whether to use a line search or not.

    warn : bool, default=True
        Whether to warn when didn't converge.

    Returns
    -------
    xk : ndarray of float
        Estimated minimum.
    r   NzNewton-CG iter = z  Check Convergencez    max |gradient| <= tol: r   r   g      ?)rB   rC   r,   r   )r,   r   zLine Search failedr   z'newton-cg failed to converge at loss = z$. Increase the number of iterations.z   Solver did converge at loss = .)r   r   flattenr=   r!   maxr   r?   minsqrtrO   r8   r	   warningswarnr   )	grad_hessfuncr6   x0r   rC   rB   maxinnerline_searchrZ   r,   r'   kr*   r+   r.   rA   r@   absgradmax_absgradr4   maggradetatermcondrD   alphakfcgcgfkp1s                                r   
_newton_cgrj      sx   t 
B			!	!B	B	A 4?T???1J g++ #2----w&--fWoos" 	P)a))***'(((NNNNNuNNOOO 	&//3(()**= GUH(GTTT 	@T #
A 
A 
A=B, $   2333 	fun	QW g++Z  	>W)( ) ) ) 	
 	
 	
 	
 
 ><<<<===q5Ls   'E E('E(c                    | dk    rt|j         dk    rI|j        }d                    | |j         |          }||d|z   z  }t          j        |t
          d           |t          |j        |          }n|j        }nt          |S )a  Check the OptimizeResult for successful convergence

    Parameters
    ----------
    solver : str
       Solver name. Currently only `lbfgs` is supported.

    result : OptimizeResult
       Result of the scipy.optimize.minimize function.

    max_iter : int, default=None
       Expected maximum number of iterations.

    extra_warning_msg : str, default=None
        Extra warning message.

    Returns
    -------
    n_iter : int
       Number of iterations.
    lbfgsr   z{} failed to converge (status={}):
{}.

Increase the number of iterations (max_iter) or scale the data as shown in:
    https://scikit-learn.org/stable/modules/preprocessing.htmlN
r   )
stacklevel)	statusmessageformatrY   rZ   r   rW   nitNotImplementedError)solverresultmax_iterextra_warning_msgresult_messagewarning_msgn_iter_is          r   _check_optimize_resultr{   K  s    . =A#^N%
 fVV]N;;  !,t&777M+'9aHHHH 6:x00HHzHH!!Or   )r   )r   rP   rQ   rR   TTr   )NN)__doc__rY   numpyr   r"   
exceptionsr   fixesr   r   RuntimeErrorr	   r8   rO   rj   r{   r   r   r   <module>r      s  	 	"       + + + + + + 9 9 9 9 9 9 9 9	 	 	 	 	| 	 	 	
 =>O O O OdV V V V| 
	~ ~ ~ ~B. . . . . .r   