
    0Ph                         d Z ddlmZmZ ddlZddlmZ ddlm	Z	m
Z
mZ ddlmZmZmZ ddlmZmZ  G d	 d
e
ee	e          ZdS )z)Principal Component Analysis Base Classes    )ABCMetaabstractmethodN)linalg   )BaseEstimatorClassNamePrefixFeaturesOutMixinTransformerMixin)_fill_or_add_to_diagonaldeviceget_namespace)check_is_fittedvalidate_datac                   `    e Zd ZdZd Zd Zedd            Zd ZddZ	d	 Z
ed
             ZdS )_BasePCAzwBase class for PCA methods.

    Warning: This class should not be used directly.
    Use derived classes instead.
    c           
         t          | j                  \  }}| j        }| j        }| j        r,||                    |ddt
          j        f                   z  }|| j        z
  }|                    || j        k    ||	                    dt          |          |j                            }|j        |z  |z  }t          || j        |           |S )as  Compute data covariance with the generative model.

        ``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
        where S**2 contains the explained variances, and sigma2 contains the
        noise variances.

        Returns
        -------
        cov : array of shape=(n_features, n_features)
            Estimated covariance of data.
        N        )r   dtype)r   components_explained_variance_whitensqrtnpnewaxisnoise_variance_whereasarrayr   r   Tr
   )selfxp_r   exp_varexp_var_diffcovs          [/var/www/html/test/jupyter/venv/lib/python3.11/site-packages/sklearn/decomposition/_base.pyget_covariancez_BasePCA.get_covariance   s     d.//A&*; 	H%2:0F(G(GGK!55xxd**JJs6'??'-JHH
 

 }|+{: d&:B???
    c           
         t          | j                  \  }}| j        j        d         }| j        dk    r|                    |          | j        z  S |r|j        j        }nt          j        }| j        dk    r ||                                           S | j        }| j	        }| j
        r,||                    |ddt          j        f                   z  }|| j        z
  }|                    || j        k    ||                    dt!          |                              }||j        z  | j        z  }t%          |d|z  |           |j         ||          z  |z  }|| j        dz   z  }t%          |d| j        z  |           |S )a8  Compute data precision matrix with the generative model.

        Equals the inverse of the covariance but computed with
        the matrix inversion lemma for efficiency.

        Returns
        -------
        precision : array, shape=(n_features, n_features)
            Estimated precision of data.
           r   r   N)r   g      ?r   )r   r   shapen_components_eyer   r   invr%   r   r   r   r   r   r   r   r   r   r
   )	r   r   is_array_api_compliant
n_features
linalg_invr   r!   r"   	precisions	            r$   get_precisionz_BasePCA.get_precision5   s    &343C%D%D""%+A.
 ""66*%%(<<<! 	$JJJ3&&:d1133444 &*; 	H%2:0F(G(GGK!55xxd**JJs6'??J33
 

  +-/$2FF	 C,,>CCCMJJy$9$99KG	t+Q.//	 C$2F,FKKKr&   Nc                     dS )a  Placeholder for fit. Subclasses should implement this method!

        Fit the model with X.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            Training data, where `n_samples` is the number of samples and
            `n_features` is the number of features.

        Returns
        -------
        self : object
            Returns the instance itself.
        N )r   Xys      r$   fitz_BasePCA.fitb   s      r&   c                     t          || j        | j                  \  }}t          |            t	          | ||j        |j        gdd          }|                     ||d          S )a  Apply dimensionality reduction to X.

        X is projected on the first principal components previously extracted
        from a training set.

        Parameters
        ----------
        X : {array-like, sparse matrix} of shape (n_samples, n_features)
            New data, where `n_samples` is the number of samples
            and `n_features` is the number of features.

        Returns
        -------
        X_new : array-like of shape (n_samples, n_components)
            Projection of X in the first principal components, where `n_samples`
            is the number of samples and `n_components` is the number of the components.
        )csrcscF)r   accept_sparsereset)r   x_is_centered)r   r   r   r   r   float64float32
_transform)r   r4   r   r    s       r$   	transformz_BasePCA.transformt   ss    $ a!143KLLA:rz*(
 
 
 qRu===r&   Fc                    || j         j        z  }|s+||                    | j        d          | j         j        z  z  }| j        rG|                    | j                  }|                    |j                  j	        }||||k     <   ||z  }|S )N)r(   )
r   r   reshapemean_r   r   r   finfor   eps)r   r4   r   r<   X_transformedscale	min_scales          r$   r?   z_BasePCA._transform   s    D,.. 	R RZZ
G<<t?O?QQQM; 	#
 GGD455E--1I'0E%)#$U"Mr&   c                     t          |          \  }}| j        rC|                    | j        ddt          j        f                   | j        z  }||z  | j        z   S || j        z  | j        z   S )a  Transform data back to its original space.

        In other words, return an input `X_original` whose transform would be X.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_components)
            New data, where `n_samples` is the number of samples
            and `n_components` is the number of components.

        Returns
        -------
        X_original array-like of shape (n_samples, n_features)
            Original data, where `n_samples` is the number of samples
            and `n_features` is the number of features.

        Notes
        -----
        If whitening is enabled, inverse_transform will compute the
        exact inverse operation, which includes reversing whitening.
        N)r   r   r   r   r   r   r   rD   )r   r4   r   r    scaled_componentss        r$   inverse_transformz_BasePCA.inverse_transform   sw    , a  A; 	50BJ?@@4CSS  ((4:55t''$*44r&   c                 &    | j         j        d         S )z&Number of transformed output features.r   )r   r)   )r   s    r$   _n_features_outz_BasePCA._n_features_out   s     %a((r&   )N)F)__name__
__module____qualname____doc__r%   r1   r   r6   r@   r?   rL   propertyrN   r3   r&   r$   r   r      s           8+ + +Z    ^"> > >>   (5 5 5@ ) ) X) ) )r&   r   )	metaclass)rR   abcr   r   numpyr   scipyr   baser   r   r	   utils._array_apir
   r   r   utils.validationr   r   r   r3   r&   r$   <module>r[      s    / /
 ( ' ' ' ' ' ' '           S S S S S S S S S S N N N N N N N N N N = = = = = = = =z) z) z) z) z)#%5}PWz) z) z) z) z) z)r&   