
    0PhP                         d dl mZmZ d dlZddlmZmZ ddlm	Z	m
Z
 ddlmZ ddlmZmZmZmZ dd	lmZ dd
lmZmZ  G d dee          ZdS )    )IntegralRealN   )OneToOneFeatureMixin_fit_context)Interval
StrOptions)type_of_target)_check_feature_names_in_check_ycheck_consistent_lengthcheck_is_fitted   )_BaseEncoder)_fit_encoding_fast_fit_encoding_fast_auto_smoothc            	       P    e Zd ZU dZ edh          eg eh d          g edh           eeddd          g eeddd          gd	gd
gdZ	e
ed<   	 	 	 	 	 	 ddZ ed          d             Z ed          d             Zd Zd Zd Zd Zd ZddZ fdZ xZS )TargetEncoderu  Target Encoder for regression and classification targets.

    Each category is encoded based on a shrunk estimate of the average target
    values for observations belonging to the category. The encoding scheme mixes
    the global target mean with the target mean conditioned on the value of the
    category (see [MIC]_).

    When the target type is "multiclass", encodings are based
    on the conditional probability estimate for each class. The target is first
    binarized using the "one-vs-all" scheme via
    :class:`~sklearn.preprocessing.LabelBinarizer`, then the average target
    value for each class and each category is used for encoding, resulting in
    `n_features` * `n_classes` encoded output features.

    :class:`TargetEncoder` considers missing values, such as `np.nan` or `None`,
    as another category and encodes them like any other category. Categories
    that are not seen during :meth:`fit` are encoded with the target mean, i.e.
    `target_mean_`.

    For a demo on the importance of the `TargetEncoder` internal cross-fitting,
    see
    :ref:`sphx_glr_auto_examples_preprocessing_plot_target_encoder_cross_val.py`.
    For a comparison of different encoders, refer to
    :ref:`sphx_glr_auto_examples_preprocessing_plot_target_encoder.py`. Read
    more in the :ref:`User Guide <target_encoder>`.

    .. note::
        `fit(X, y).transform(X)` does not equal `fit_transform(X, y)` because a
        :term:`cross fitting` scheme is used in `fit_transform` for encoding.
        See the :ref:`User Guide <target_encoder>` for details.

    .. versionadded:: 1.3

    Parameters
    ----------
    categories : "auto" or list of shape (n_features,) of array-like, default="auto"
        Categories (unique values) per feature:

        - `"auto"` : Determine categories automatically from the training data.
        - list : `categories[i]` holds the categories expected in the i-th column. The
          passed categories should not mix strings and numeric values within a single
          feature, and should be sorted in case of numeric values.

        The used categories are stored in the `categories_` fitted attribute.

    target_type : {"auto", "continuous", "binary", "multiclass"}, default="auto"
        Type of target.

        - `"auto"` : Type of target is inferred with
          :func:`~sklearn.utils.multiclass.type_of_target`.
        - `"continuous"` : Continuous target
        - `"binary"` : Binary target
        - `"multiclass"` : Multiclass target

        .. note::
            The type of target inferred with `"auto"` may not be the desired target
            type used for modeling. For example, if the target consisted of integers
            between 0 and 100, then :func:`~sklearn.utils.multiclass.type_of_target`
            will infer the target as `"multiclass"`. In this case, setting
            `target_type="continuous"` will specify the target as a regression
            problem. The `target_type_` attribute gives the target type used by the
            encoder.

        .. versionchanged:: 1.4
           Added the option 'multiclass'.

    smooth : "auto" or float, default="auto"
        The amount of mixing of the target mean conditioned on the value of the
        category with the global target mean. A larger `smooth` value will put
        more weight on the global target mean.
        If `"auto"`, then `smooth` is set to an empirical Bayes estimate.

    cv : int, default=5
        Determines the number of folds in the :term:`cross fitting` strategy used in
        :meth:`fit_transform`. For classification targets, `StratifiedKFold` is used
        and for continuous targets, `KFold` is used.

    shuffle : bool, default=True
        Whether to shuffle the data in :meth:`fit_transform` before splitting into
        folds. Note that the samples within each split will not be shuffled.

    random_state : int, RandomState instance or None, default=None
        When `shuffle` is True, `random_state` affects the ordering of the
        indices, which controls the randomness of each fold. Otherwise, this
        parameter has no effect.
        Pass an int for reproducible output across multiple function calls.
        See :term:`Glossary <random_state>`.

    Attributes
    ----------
    encodings_ : list of shape (n_features,) or (n_features * n_classes) of                     ndarray
        Encodings learnt on all of `X`.
        For feature `i`, `encodings_[i]` are the encodings matching the
        categories listed in `categories_[i]`. When `target_type_` is
        "multiclass", the encoding for feature `i` and class `j` is stored in
        `encodings_[j + (i * len(classes_))]`. E.g., for 2 features (f) and
        3 classes (c), encodings are ordered:
        f0_c0, f0_c1, f0_c2, f1_c0, f1_c1, f1_c2,

    categories_ : list of shape (n_features,) of ndarray
        The categories of each input feature determined during fitting or
        specified in `categories`
        (in order of the features in `X` and corresponding with the output
        of :meth:`transform`).

    target_type_ : str
        Type of target.

    target_mean_ : float
        The overall mean of the target. This value is only used in :meth:`transform`
        to encode categories.

    n_features_in_ : int
        Number of features seen during :term:`fit`.

    feature_names_in_ : ndarray of shape (`n_features_in_`,)
        Names of features seen during :term:`fit`. Defined only when `X`
        has feature names that are all strings.

    classes_ : ndarray or None
        If `target_type_` is 'binary' or 'multiclass', holds the label for each class,
        otherwise `None`.

    See Also
    --------
    OrdinalEncoder : Performs an ordinal (integer) encoding of the categorical features.
        Contrary to TargetEncoder, this encoding is not supervised. Treating the
        resulting encoding as a numerical features therefore lead arbitrarily
        ordered values and therefore typically lead to lower predictive performance
        when used as preprocessing for a classifier or regressor.
    OneHotEncoder : Performs a one-hot encoding of categorical features. This
        unsupervised encoding is better suited for low cardinality categorical
        variables as it generate one new feature per unique category.

    References
    ----------
    .. [MIC] :doi:`Micci-Barreca, Daniele. "A preprocessing scheme for high-cardinality
       categorical attributes in classification and prediction problems"
       SIGKDD Explor. Newsl. 3, 1 (July 2001), 27–32. <10.1145/507533.507538>`

    Examples
    --------
    With `smooth="auto"`, the smoothing parameter is set to an empirical Bayes estimate:

    >>> import numpy as np
    >>> from sklearn.preprocessing import TargetEncoder
    >>> X = np.array([["dog"] * 20 + ["cat"] * 30 + ["snake"] * 38], dtype=object).T
    >>> y = [90.3] * 5 + [80.1] * 15 + [20.4] * 5 + [20.1] * 25 + [21.2] * 8 + [49] * 30
    >>> enc_auto = TargetEncoder(smooth="auto")
    >>> X_trans = enc_auto.fit_transform(X, y)

    >>> # A high `smooth` parameter puts more weight on global mean on the categorical
    >>> # encodings:
    >>> enc_high_smooth = TargetEncoder(smooth=5000.0).fit(X, y)
    >>> enc_high_smooth.target_mean_
    np.float64(44...)
    >>> enc_high_smooth.encodings_
    [array([44..., 44..., 44...])]

    >>> # On the other hand, a low `smooth` parameter puts more weight on target
    >>> # conditioned on the value of the categorical:
    >>> enc_low_smooth = TargetEncoder(smooth=1.0).fit(X, y)
    >>> enc_low_smooth.encodings_
    [array([20..., 80..., 43...])]
    auto>   r   binary
continuous
multiclassr   Nleft)closedr   booleanrandom_state)
categoriestarget_typesmoothcvshuffler   _parameter_constraints   Tc                 Z    || _         || _        || _        || _        || _        || _        d S N)r   r   r   r    r!   r   )selfr   r   r   r    r!   r   s          e/var/www/html/test/jupyter/venv/lib/python3.11/site-packages/sklearn/preprocessing/_target_encoder.py__init__zTargetEncoder.__init__   s6     %&(    )prefer_skip_nested_validationc                 2    |                      ||           | S )a  Fit the :class:`TargetEncoder` to X and y.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            The data to determine the categories of each feature.

        y : array-like of shape (n_samples,)
            The target data used to encode the categories.

        Returns
        -------
        self : object
            Fitted encoder.
        )_fit_encodings_all)r&   Xys      r'   fitzTargetEncoder.fit   s    " 	1%%%r)   c           	         ddl m}m} |                     ||          \  }}}}| j        dk    r || j        | j        | j                  }	n || j        | j        | j                  }	| j        dk    rNt          j	        |j
        d         |j
        d         t          | j                  z  ft          j                  }
n t          j        |t          j                  }
|	                    ||          D ]\  }}||d	d	f         ||         }}t          j        |d
          }| j        dk    r|                     ||||          }n|                     ||||          }|                     |
|| |||           |
S )a  Fit :class:`TargetEncoder` and transform X with the target encoding.

        .. note::
            `fit(X, y).transform(X)` does not equal `fit_transform(X, y)` because a
            :term:`cross fitting` scheme is used in `fit_transform` for encoding.
            See the :ref:`User Guide <target_encoder>`. for details.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            The data to determine the categories of each feature.

        y : array-like of shape (n_samples,)
            The target data used to encode the categories.

        Returns
        -------
        X_trans : ndarray of shape (n_samples, n_features) or                     (n_samples, (n_features * n_classes))
            Transformed input.
        r   )KFoldStratifiedKFoldr   )r!   r   r   r   r   dtypeNaxis)model_selectionr1   r2   r,   target_type_r    r!   r   npemptyshapelenclasses_float64
empty_likesplitmean_fit_encoding_multiclass"_fit_encoding_binary_or_continuous_transform_X_ordinal)r&   r-   r.   r1   r2   	X_ordinalX_known_mask	y_encodedn_categoriesr    X_out	train_idxtest_idxX_trainy_trainy_train_mean	encodingss                    r'   fit_transformzTargetEncoder.fit_transform   s   . 	=<<<<<<<;?;R;RSTVW;X;X8	<L
 ,,tw4CTUUUBB D<M  B
 ,,H#Y_Q%7#dm:L:L%LMj  EE
 M)2:>>>E#%88Aq>> 	 	Ix(AAA6	)8LWG77333L L00 99  	 		 !CC  	 	 %%    r)   c                    |                      |dd          \  }}| j        dk    rNt          j        |j        d         |j        d         t          | j                  z  ft          j                  }n t          j        |t          j                  }| 	                    ||| t          d          | j        | j                   |S )	a  Transform X with the target encoding.

        .. note::
            `fit(X, y).transform(X)` does not equal `fit_transform(X, y)` because a
            :term:`cross fitting` scheme is used in `fit_transform` for encoding.
            See the :ref:`User Guide <target_encoder>`. for details.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            The data to determine the categories of each feature.

        Returns
        -------
        X_trans : ndarray of shape (n_samples, n_features) or                     (n_samples, (n_features * n_classes))
            Transformed input.
        ignore	allow-nanhandle_unknownensure_all_finiter   r   r   r3   N)
_transformr8   r9   r:   r;   r<   r=   r>   r?   rD   slice
encodings_target_mean_)r&   r-   rE   rF   rI   s        r'   	transformzTargetEncoder.transform4  s    & #'//h+ #2 #
 #
	<
 ,,H#Y_Q%7#dm:L:L%LMj  EE
 M)2:>>>E!!M$KKO	
 	
 	
 r)   c                    ddl m}m} t          ||           |                     |dd           | j        dk    r5d}t          |d	          }||vrt          d
|d| d          || _        n| j        | _        d| _	        | j        dk    r, |            }|
                    |          }|j	        | _	        nI| j        dk    r, |            }|
                    |          }|j	        | _	        nt          |d|           }t          j        |d          | _        |                     |dd          \  }	}
t          j        d | j        D             t          j        t'          | j                            }| j        dk    r|                     |	||| j                  }n|                     |	||| j                  }|| _        |	|
||fS )z(Fit a target encoding with all the data.r   )LabelBinarizerLabelEncoderrR   rS   rT   r   )r   r   r   r.   )
input_namez3Unknown label type: Target type was inferred to be z. Only z are supported.Nr   r   T)	y_numeric	estimatorr   r5   c              3   4   K   | ]}t          |          V  d S r%   )r<   ).0category_for_features     r'   	<genexpr>z3TargetEncoder._fit_encodings_all.<locals>.<genexpr>  s,      TT+?S%&&TTTTTTr)   )r4   count)preprocessingr]   r^   r   _fitr   r
   
ValueErrorr8   r=   rP   r   r9   rA   rZ   rW   fromitercategories_int64r<   rB   rC   rY   )r&   r-   r.   r]   r^   accepted_target_typesinferred_type_of_targetlabel_encoderlabel_binarizerrE   rF   rH   rO   s                r'   r,   z TargetEncoder._fit_encodings_all^  sP   	
 	
 	
 	
 	
 	
 	
 	

 	 1%%%		!H	LLLv%%$J!&4Q3&G&G&G#&.CCC !.! !9N! ! !  
 !8D $ 0D(((LNNM++A..A)2DMM,..,n..O--a00A+4DMMdd;;;AGAA..."&//h+ #2 #
 #
	< {TT4CSTTT(d&''
 
 

 ,,55!	 II ??!	 I $,<77r)   c                     | j         dk    r(t          j        |          }t          |||||          }nt	          |||| j         |          }|S )zLearn target encodings.r   )r   r9   varr   r   )r&   rE   r.   rH   target_mean
y_variancerO   s          r'   rC   z0TargetEncoder._fit_encoding_binary_or_continuous  sh     ;&  J6 II + I r)   c                 8  	
 | j         t          | j                  
g 	t          
          D ]A}|dd|f         }|                     |||||                   }	                    |           B
fdt                    D             }	fd|D             S )aD  Learn multiclass encodings.

        Learn encodings for each class (c) then reorder encodings such that
        the same features (f) are grouped together. `reorder_index` enables
        converting from:
        f0_c0, f1_c0, f0_c1, f1_c1, f0_c2, f1_c2
        to:
        f0_c0, f0_c1, f0_c2, f1_c0, f1_c1, f1_c2
        Nc              3   J   K   | ]}t          |z            D ]}|V  d S r%   )range)rc   startidx	n_classes
n_featuress      r'   re   z9TargetEncoder._fit_encoding_multiclass.<locals>.<genexpr>  s_       
 
UY%;jII
 
  
 
 
 
 
 
 
r)   c                      g | ]
}|         S  r}   )rc   ry   rO   s     r'   
<listcomp>z:TargetEncoder._fit_encoding_multiclass.<locals>.<listcomp>  s    8883	#888r)   )n_features_in_r<   r=   rw   rC   extend)r&   rE   r.   rH   rs   iy_classencodingreorder_indexrO   rz   r{   s            @@@r'   rB   z&TargetEncoder._fit_encoding_multiclass  s     (
&&		y!! 	' 	'A1gG>>A	 H X&&&&
 
 
 
 
z**
 
 

 9888-8888r)   c                 T   | j         dk    rat          | j                  }t          |          D ];\  }}	||z  }
||z  }|	|||
f                  |||f<   ||         ||dd|
f         |f<   <dS t          |          D ]+\  }}	|	|||f                  |||f<   |||dd|f         |f<   ,dS )a  Transform X_ordinal using encodings.

        In the multiclass case, `X_ordinal` and `X_unknown_mask` have column
        (axis=1) size `n_features`, while `encodings` has length of size
        `n_features * n_classes`. `feat_idx` deals with this by repeating
        feature indices by `n_classes` E.g., for 3 features, 2 classes:
        0,0,1,1,2,2

        Additionally, `target_mean` is of shape (`n_classes`,) so `mean_idx`
        cycles through 0 to `n_classes` - 1, `n_features` times.
        r   N)r8   r<   r=   	enumerate)r&   rI   rE   X_unknown_maskrow_indicesrO   rs   rz   e_idxr   feat_idxmean_idxs               r'   rD   z"TargetEncoder._transform_X_ordinal  s   ( ,,DM**I#,Y#7#7 R Rx I- 9,,4Y{H?T5U,Vk5()<G<QnQQQ[15899R R $-Y#7#7 E Ex,4Y{E?Q5R,Sk5()9DnQQQX.566E Er)   c                      t           d           t           |          } j        dk    r) fd|D             }t          j        |t
                    S |S )a  Get output feature names for transformation.

        Parameters
        ----------
        input_features : array-like of str or None, default=None
            Not used, present here for API consistency by convention.

        Returns
        -------
        feature_names_out : ndarray of str objects
            Transformed feature names. `feature_names_in_` is used unless it is
            not defined, in which case the following input feature names are
            generated: `["x0", "x1", ..., "x(n_features_in_ - 1)"]`.
            When `type_of_target_` is "multiclass" the names are of the format
            '<feature_name>_<class_name>'.
        r   r   c                 2    g | ]}j         D ]	}| d | 
S )_)r=   )rc   feature_name
class_namer&   s      r'   r~   z7TargetEncoder.get_feature_names_out.<locals>.<listcomp>
  sP        "&-    ..*..   r)   r3   )r   r   r8   r9   asarrayobject)r&   input_featuresfeature_namess   `  r'   get_feature_names_outz#TargetEncoder.get_feature_names_out  sx    " 	.////nEE,,   $1  M
 :m6::::  r)   c                 `    t                                                      }d|j        _        |S )NT)super__sklearn_tags__target_tagsrequired)r&   tags	__class__s     r'   r   zTargetEncoder.__sklearn_tags__  s(    ww''))$(!r)   )r   r   r   r#   TNr%   )__name__
__module____qualname____doc__r	   listr   r   r   r"   dict__annotations__r(   r   r/   rP   r[   r,   rC   rB   rD   r   r   __classcell__)r   s   @r'   r   r      s        e eP "z6(++T2"
#Q#Q#QRRS:vh''$4)O)O)OPx!T&999:;'($ $D    ) ) ) )  \555  65& \555G G 65GR( ( (T>8 >8 >8@  .9 9 9> E  E  ED! ! ! !:        r)   r   )numbersr   r   numpyr9   baser   r   utils._param_validationr   r	   utils.multiclassr
   utils.validationr   r   r   r   	_encodersr   _target_encoder_fastr   r   r   r}   r)   r'   <module>r      s   # " " " " " " "     5 5 5 5 5 5 5 5 : : : : : : : : - - - - - -            $ # # # # # T T T T T T T TA A A A A(, A A A A Ar)   