
    0PhI                         d Z ddlZddlmZmZ ddlmZmZ ddlmZ ddl	Z
ddlmZ ddlmZ dd	lmZmZmZ dd
lmZ ddlmZ ddlmZ ddlmZmZ ddlmZmZ d Z G d deee          ZdS )zBase class for mixture models.    N)ABCMetaabstractmethod)IntegralReal)time)	logsumexp   )cluster)BaseEstimatorDensityMixin_fit_context)kmeans_plusplus)ConvergenceWarning)check_random_state)Interval
StrOptions)check_is_fittedvalidate_datac                 ~    t          j        |           } | j        |k    rt          d|d|d| j                  dS )zValidate the shape of the input parameter 'param'.

    Parameters
    ----------
    param : array

    param_shape : tuple

    name : str
    zThe parameter 'z' should have the shape of z
, but got N)nparrayshape
ValueError)paramparam_shapenames      U/var/www/html/test/jupyter/venv/lib/python3.11/site-packages/sklearn/mixture/_base.py_check_shaper      sR     HUOOE{k!!jtt[[[%++/
 
 	
 "!    c                      e Zd ZU dZ eeddd          g eeddd          g eeddd          g eeddd          g eeddd          g eh d          gd	gd
gdg eeddd          gd
Ze	e
d<   d Zed             Zd Zed             Zd&dZ ed          d&d            Zd Zed             Zed             Zed             Zd Zd&dZd Zd Zd'dZd Zed              Zed!             Zd" Zd# Z d$ Z!d% Z"dS )(BaseMixturezBase class for mixture models.

    This abstract class specifies an interface for all mixture classes and
    provides basic common methods for mixture models.
       Nleft)closedg        r   >   kmeansrandomrandom_from_data	k-means++random_statebooleanverbose
n_componentstol	reg_covarmax_itern_initinit_paramsr)   
warm_startr+   verbose_interval_parameter_constraintsc                     || _         || _        || _        || _        || _        || _        || _        || _        |	| _        |
| _	        d S Nr,   )selfr-   r.   r/   r0   r1   r2   r)   r3   r+   r4   s              r   __init__zBaseMixture.__init__@   sT     )" &($ 0r   c                     dS )zCheck initial parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)
        N r8   Xs     r   _check_parameterszBaseMixture._check_parametersX   	     	r   c                 \   |j         \  }}| j        dk    rjt          j        || j        f          }t          j        | j        d|                              |          j        }d|t          j	        |          |f<   n| j        dk    rK|
                    || j        f          }||                    d          ddt          j        f         z  }n| j        dk    rWt          j        || j        f          }|                    || j        d	
          }d||t          j	        | j                  f<   n^| j        dk    rSt          j        || j        f          }t          || j        |          \  }}d||t          j	        | j                  f<   |                     ||           dS )a?  Initialize the model parameters.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        random_state : RandomState
            A random number generator instance that controls the random seed
            used for the method chosen to initialize the parameters.
        r%   r"   )
n_clustersr1   r)   r&   sizeaxisNr'   F)rC   replacer(   )r)   )r   r2   r   zerosr-   r
   KMeansfitlabels_arangeuniformsumnewaxischoicer   _initialize)r8   r=   r)   	n_samples_resplabelindicess           r   _initialize_parametersz"BaseMixture._initialize_parametersb   s    w	1x''8Y(9:;;D#0   Q  12D9%%u,--))''i9J-K'LLDDHH!H$$QQQ
]33DD!3338Y(9:;;D")) 15 *  G ;<D")D$566677,,8Y(9:;;D(!)  JAw
 ;<D")D$56667D!!!!!r   c                     dS )zInitialize the model parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        resp : array-like of shape (n_samples, n_components)
        Nr;   )r8   r=   rS   s      r   rP   zBaseMixture._initialize   s	     	r   c                 2    |                      ||           | S )a  Estimate model parameters with the EM algorithm.

        The method fits the model ``n_init`` times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for ``max_iter``
        times until the change of likelihood or lower bound is less than
        ``tol``, otherwise, a ``ConvergenceWarning`` is raised.
        If ``warm_start`` is ``True``, then ``n_init`` is ignored and a single
        initialization is performed upon the first call. Upon consecutive
        calls, training starts where it left off.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        self : object
            The fitted mixture.
        )fit_predictr8   r=   ys      r   rI   zBaseMixture.fit   s    6 	Ar   T)prefer_skip_nested_validationc                 V   t          | |t          j        t          j        gd          }|j        d         | j        k     r%t          d| j         d|j        d                    |                     |           | j        ot          | d           }|r| j
        nd}t          j         }d| _        t          | j                  }|j        \  }}t          |          D ]N}	|                     |	           |r|                     ||           |rt          j         n| j        }
| j        dk    r|                                 }d}hd}t          d| j        dz             D ]}|
}|                     |          \  }}|                     ||           |                     ||          }
|
|z
  }|                     ||           t3          |          | j        k     rd	} n|                     |
|           |
|k    s|t          j         k    r|
}|                                 }|}|| _        P| j        s%| j        dk    rt9          j        d
t<                     |                     |           || _         || _        |                     |          \  }}|!                    d          S )a  Estimate model parameters using X and predict the labels for X.

        The method fits the model n_init times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for `max_iter`
        times until the change of likelihood or lower bound is less than
        `tol`, otherwise, a :class:`~sklearn.exceptions.ConvergenceWarning` is
        raised. After fitting, it predicts the most probable label for the
        input data points.

        .. versionadded:: 0.20

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        r	   )dtypeensure_min_samplesr   z:Expected n_samples >= n_components but got n_components = z, n_samples = 
converged_r"   FTzBest performing initialization did not converge. Try different init parameters, or increase max_iter, tol, or check for degenerate data.rD   )"r   r   float64float32r   r-   r   r>   r3   hasattrr1   infr`   r   r)   range_print_verbose_msg_init_begrV   lower_bound_r0   _get_parameters_e_step_m_step_compute_lower_bound_print_verbose_msg_iter_endabsr.   _print_verbose_msg_init_endwarningswarnr   _set_parametersn_iter_argmax)r8   r=   r[   do_initr1   max_lower_boundr)   rQ   rR   initlower_boundbest_paramsbest_n_iter	convergedn_iterprev_lower_boundlog_prob_normlog_respchanges                      r   rY   zBaseMixture.fit_predict   s   8 $"*bj)AVWXXX71:))),*.*;, , wqz, ,  
 	q!!! F74+F+FG '.Q6')$*;<<w	1&MM !	0 !	0D,,T222 =++A|<<<%,C26''$2CK}!!"2244!	#At}q'899  F'2$.2ll1oo+M8LLH---"&";";Hm"T"TK(+;;F44VVDDD6{{TX--$(	 . 00iHHH00Ow4N4N&1O"&"6"6"8"8K"(K&/DO
  	4=1#4#4M9 #   	[)))"+
 ll1oo8A&&&r   c                 ^    |                      |          \  }}t          j        |          |fS )a  E step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : float
            Mean of the logarithms of the probabilities of each sample in X

        log_responsibility : array, shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        )_estimate_log_prob_respr   mean)r8   r=   r}   r~   s       r   ri   zBaseMixture._e_step!  s1      #'">">q"A"Axw}%%x//r   c                     dS )a*  M step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        log_resp : array-like of shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        Nr;   )r8   r=   r~   s      r   rj   zBaseMixture._m_step4  s	     	r   c                     d S r7   r;   r8   s    r   rh   zBaseMixture._get_parametersB      r   c                     d S r7   r;   )r8   paramss     r   rq   zBaseMixture._set_parametersF  r   r   c                     t          |            t          | |d          }t          |                     |          d          S )a  Compute the log-likelihood of each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        log_prob : array, shape (n_samples,)
            Log-likelihood of each sample in `X` under the current model.
        Fresetr"   rD   )r   r   r   _estimate_weighted_log_probr<   s     r   score_sampleszBaseMixture.score_samplesJ  sG     	$///99!<<1EEEEr   c                 P    |                      |                                          S )a  Compute the per-sample average log-likelihood of the given data X.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_dimensions)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        log_likelihood : float
            Log-likelihood of `X` under the Gaussian mixture model.
        )r   r   rZ   s      r   scorezBaseMixture.score]  s$    " !!!$$))+++r   c                     t          |            t          | |d          }|                     |                              d          S )a  Predict the labels for the data samples in X using trained model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        Fr   r"   rD   )r   r   r   rs   r<   s     r   predictzBaseMixture.predictp  sK     	$/////2299q9AAAr   c                     t          |            t          | |d          }|                     |          \  }}t          j        |          S )a  Evaluate the components' density for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        resp : array, shape (n_samples, n_components)
            Density of each Gaussian component for each sample in X.
        Fr   )r   r   r   r   exp)r8   r=   rR   r~   s       r   predict_probazBaseMixture.predict_proba  sM     	$///221558vhr   c                     t                      |dk     rt          d j        z             j        j        \  }t           j                                      | j                  } j	        dk    r:t          j        fdt           j         j        |          D                       }nz j	        dk    r5t          j         fdt           j        |          D                       }n:t          j        fdt           j         j        |          D                       }t          j        d t          |          D                       }||fS )	ay  Generate random samples from the fitted Gaussian distribution.

        Parameters
        ----------
        n_samples : int, default=1
            Number of samples to generate.

        Returns
        -------
        X : array, shape (n_samples, n_features)
            Randomly generated sample.

        y : array, shape (nsamples,)
            Component labels.
        r"   zNInvalid value for 'n_samples': %d . The sampling requires at least one sample.fullc           	      `    g | ]*\  }}}                     ||t          |                    +S r;   )multivariate_normalint).0r   
covariancesamplerngs       r   
<listcomp>z&BaseMixture.sample.<locals>.<listcomp>  sG       2z6 ++D*c&kkJJ  r   tiedc           	      h    g | ].\  }}                     |j        t          |                    /S r;   )r   covariances_r   )r   r   r   r   r8   s      r   r   z&BaseMixture.sample.<locals>.<listcomp>  sH       &v ++D$2CS[[QQ  r   c                 x    g | ]6\  }}}|                     |f           t          j        |          z  z   7S )rB   )standard_normalr   sqrt)r   r   r   r   
n_featuresr   s       r   r   z&BaseMixture.sample.<locals>.<listcomp>  sa        3z6 ))
/C)DDgj))**  r   c                 L    g | ]!\  }}t          j        ||t                     "S ))r^   )r   r   r   )r   jr   s      r   r   z&BaseMixture.sample.<locals>.<listcomp>  s-    VVVyq&RWVQc***VVVr   )r   r   r-   means_r   r   r)   multinomialweights_covariance_typer   vstackzipr   concatenate	enumerate)r8   rQ   rR   n_samples_compr=   r[   r   r   s   `     @@r   r   zBaseMixture.sample  s     	q==$'+'8:  
 ): !233DMBB6))	   69T%67 7   AA !V++	    *-dk>*J*J   AA 	     7:T%67 7	  	 	A NVVIn<U<UVVV
 
 1vr   c                 V    |                      |          |                                 z   S )a  Estimate the weighted log-probabilities, log P(X | Z) + log weights.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        weighted_log_prob : array, shape (n_samples, n_component)
        )_estimate_log_prob_estimate_log_weightsr<   s     r   r   z'BaseMixture._estimate_weighted_log_prob  s)     &&q))D,F,F,H,HHHr   c                     dS )zEstimate log-weights in EM algorithm, E[ log pi ] in VB algorithm.

        Returns
        -------
        log_weight : array, shape (n_components, )
        Nr;   r   s    r   r   z!BaseMixture._estimate_log_weights  r?   r   c                     dS )a9  Estimate the log-probabilities log P(X | Z).

        Compute the log-probabilities per each component for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob : array, shape (n_samples, n_component)
        Nr;   r<   s     r   r   zBaseMixture._estimate_log_prob  s	     	r   c                     |                      |          }t          |d          }t          j        d          5  ||ddt          j        f         z
  }ddd           n# 1 swxY w Y   ||fS )a@  Estimate log probabilities and responsibilities for each sample.

        Compute the log probabilities, weighted log probabilities per
        component and responsibilities for each sample in X with respect to
        the current state of the model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : array, shape (n_samples,)
            log p(X)

        log_responsibilities : array, shape (n_samples, n_components)
            logarithm of the responsibilities
        r"   rD   ignore)underN)r   r   r   errstaterN   )r8   r=   weighted_log_probr}   r~   s        r   r   z#BaseMixture._estimate_log_prob_resp  s    & !<<Q??!"3!<<<[x((( 	H 	H(=BJ+GGH	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H h&&s   A""A&)A&c                     | j         dk    rt          d|z             dS | j         dk    r3t          d|z             t                      | _        | j        | _        dS dS )(Print verbose message on initialization.r"   zInitialization %dr	   N)r+   printr   _init_prev_time_iter_prev_time)r8   r1   s     r   rf   z'BaseMixture._print_verbose_msg_init_beg  so    <1%./////\Q%.///#'66D #'#7D    r   c                     || j         z  dk    r^| j        dk    rt          d|z             dS | j        dk    r6t                      }t          d||| j        z
  |fz             || _        dS dS dS )r   r   r"   z  Iteration %dr	   z0  Iteration %d	 time lapse %.5fs	 ll change %.5fN)r4   r+   r   r   r   )r8   r{   diff_llcur_times       r   rl   z'BaseMixture._print_verbose_msg_iter_end  s    D))Q..|q  &/00000""66Hx$*>>HI   (0$$$ /. #"r   c           	          |rdnd}| j         dk    rt          d| d           d
S | j         dk    r3t                      | j        z
  }t          d| d|dd	|dd           d
S d
S )z.Print verbose message on the end of iteration.rz   zdid not converger"   zInitialization .r	   z. time lapse z.5fzs	 lower bound N)r+   r   r   r   )r8   lbinit_has_convergedconverged_msgts        r   rn   z'BaseMixture._print_verbose_msg_init_end*  s    '9Q?Q<14M44455555\Q--A-  aT         r   r7   )r"   )#__name__
__module____qualname____doc__r   r   r   r   r5   dict__annotations__r9   r   r>   rV   rP   rI   r   rY   ri   rj   rh   rq   r   r   r   r   r   r   r   r   r   rf   rl   rn   r;   r   r   r!   r!   *   s          "(AtFCCCDsD8889htS$v>>>?Xh4???@8Haf===>JLLLMM
 (( k;%Xh4GGGH$ $D   1 1 10   ^)" )" )"V 	 	 ^	   < \555g' g' g' 65g'R0 0 0&   ^   ^   ^F F F&, , , ,&B B B$     &< < < <|I I I   ^   ^' ' '48 8 80 0 0
 
 
 
 
r   r!   )	metaclass) r   ro   abcr   r   numbersr   r   r   numpyr   scipy.specialr    r
   baser   r   r   r   
exceptionsr   utilsr   utils._param_validationr   r   utils.validationr   r   r   r!   r;   r   r   <module>r      sc   $ $
  ' ' ' ' ' ' ' ' " " " " " " " "           # # # # # #       < < < < < < < < < < % % % % % % + + + + + + & & & & & & : : : : : : : : = = = = = = = =
 
 
&J J J J J, J J J J J Jr   