o
    \hI                     @   s   d Z ddlZddlmZmZ ddlmZmZ ddlmZ ddl	Z
ddlmZ ddlmZ dd	lmZmZmZ dd
lmZ ddlmZ ddlmZ ddlmZmZ ddlmZmZ dd ZG dd deeedZdS )zBase class for mixture models.    N)ABCMetaabstractmethod)IntegralReal)time)	logsumexp   )cluster)BaseEstimatorDensityMixin_fit_context)kmeans_plusplus)ConvergenceWarning)check_random_state)Interval
StrOptions)check_is_fittedvalidate_datac                 C   s,   t | } | j|krtd||| jf dS )zValidate the shape of the input parameter 'param'.

    Parameters
    ----------
    param : array

    param_shape : tuple

    name : str
    z:The parameter '%s' should have the shape of %s, but got %sN)nparrayshape
ValueError)paramparam_shapename r   X/home/air/segue/gemini/backup/venv/lib/python3.10/site-packages/sklearn/mixture/_base.py_check_shape   s   


r   c                   @   sv  e Zd ZU dZeeddddgeeddddgeeddddgeeddddgeeddddgeh dgd	gd
gdgeeddddgd
Ze	e
d< dd Zedd Zdd Zedd Zd<ddZeddd<ddZdd Zedd Zed d! Zed"d# Zd$d% Zd<d&d'Zd(d) Zd*d+ Zd=d,d-Zd.d/ Zed0d1 Zed2d3 Zd4d5 Zd6d7 Z d8d9 Z!d:d; Z"dS )>BaseMixturezBase class for mixture models.

    This abstract class specifies an interface for all mixture classes and
    provides basic common methods for mixture models.
       Nleft)closedg        r   >   kmeansrandomrandom_from_data	k-means++random_statebooleanverbose
n_componentstol	reg_covarmax_itern_initinit_paramsr&   
warm_startr(   verbose_interval_parameter_constraintsc                 C   s@   || _ || _|| _|| _|| _|| _|| _|| _|	| _|
| _	d S Nr)   )selfr*   r+   r,   r-   r.   r/   r&   r0   r(   r1   r   r   r   __init__@   s   
zBaseMixture.__init__c                 C      dS )zCheck initial parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)
        Nr   r4   Xr   r   r   _check_parametersX      zBaseMixture._check_parametersc                 C   s"  |j \}}| jdkr)t|| jf}tj| jd|d|j}d|t	||f< n`| jdkrG|j
|| jfd}||jddddtjf  }nB| jdkrht|| jf}|j|| jd	d
}d||t	| jf< n!| jdkrt|| jf}t|| j|d\}}d||t	| jf< | || dS )a?  Initialize the model parameters.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        random_state : RandomState
            A random number generator instance that controls the random seed
            used for the method chosen to initialize the parameters.
        r"   r   )
n_clustersr.   r&   r#   sizeaxisNr$   F)r=   replacer%   )r&   )r   r/   r   zerosr*   r	   KMeansfitlabels_arangeuniformsumnewaxischoicer   _initialize)r4   r8   r&   	n_samples_resplabelindicesr   r   r   _initialize_parametersb   s8   


 


z"BaseMixture._initialize_parametersc                 C   r6   )zInitialize the model parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        resp : array-like of shape (n_samples, n_components)
        Nr   )r4   r8   rM   r   r   r   rJ      s   
zBaseMixture._initializec                 C   s   |  || | S )a  Estimate model parameters with the EM algorithm.

        The method fits the model ``n_init`` times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for ``max_iter``
        times until the change of likelihood or lower bound is less than
        ``tol``, otherwise, a ``ConvergenceWarning`` is raised.
        If ``warm_start`` is ``True``, then ``n_init`` is ignored and a single
        initialization is performed upon the first call. Upon consecutive
        calls, training starts where it left off.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        self : object
            The fitted mixture.
        )fit_predictr4   r8   yr   r   r   rC      s   zBaseMixture.fitT)prefer_skip_nested_validationc                 C   s  t | |tjtjgdd}|jd | jk r"td| j d|jd  | | | jo.t	| d }|r5| j
nd}tj }d| _t| j}|j\}}t|D ]v}	| |	 |r[| || |ratj n| j}
| jdkrp|  }d}qLd}td| jd D ],}|
}| |\}}| || | ||}
|
| }| || t|| jk rd	} nqz| |
| |
|ks|tj kr|
}|  }|}|| _qL| js| jdkrtd
t | | || _ || _| |\}}|j!ddS )a  Estimate model parameters using X and predict the labels for X.

        The method fits the model n_init times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for `max_iter`
        times until the change of likelihood or lower bound is less than
        `tol`, otherwise, a :class:`~sklearn.exceptions.ConvergenceWarning` is
        raised. After fitting, it predicts the most probable label for the
        input data points.

        .. versionadded:: 0.20

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        r   )dtypeensure_min_samplesr   z:Expected n_samples >= n_components but got n_components = z, n_samples = 
converged_r   FTzBest performing initialization did not converge. Try different init parameters, or increase max_iter, tol, or check for degenerate data.r>   )"r   r   float64float32r   r*   r   r9   r0   hasattrr.   infrW   r   r&   range_print_verbose_msg_init_begrP   lower_bound_r-   _get_parameters_e_step_m_step_compute_lower_bound_print_verbose_msg_iter_endabsr+   _print_verbose_msg_init_endwarningswarnr   _set_parametersn_iter_argmax)r4   r8   rS   do_initr.   max_lower_boundr&   rK   rL   initlower_boundbest_paramsbest_n_iter	convergedn_iterprev_lower_boundlog_prob_normlog_respchanger   r   r   rQ      sj   





	zBaseMixture.fit_predictc                 C   s   |  |\}}t||fS )a  E step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : float
            Mean of the logarithms of the probabilities of each sample in X

        log_responsibility : array, shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        )_estimate_log_prob_respr   mean)r4   r8   rt   ru   r   r   r   r`   !  s   zBaseMixture._e_stepc                 C   r6   )a*  M step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        log_resp : array-like of shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        Nr   )r4   r8   ru   r   r   r   ra   4  s   zBaseMixture._m_stepc                 C      d S r3   r   r4   r   r   r   r_   B     zBaseMixture._get_parametersc                 C   ry   r3   r   )r4   paramsr   r   r   rh   F  r{   zBaseMixture._set_parametersc                 C   s(   t |  t| |dd}t| |ddS )a  Compute the log-likelihood of each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        log_prob : array, shape (n_samples,)
            Log-likelihood of each sample in `X` under the current model.
        Fresetr   r>   )r   r   r   _estimate_weighted_log_probr7   r   r   r   score_samplesJ  s   zBaseMixture.score_samplesc                 C   s   |  | S )a  Compute the per-sample average log-likelihood of the given data X.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_dimensions)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        log_likelihood : float
            Log-likelihood of `X` under the Gaussian mixture model.
        )r   rx   rR   r   r   r   score]  s   zBaseMixture.scorec                 C   s(   t |  t| |dd}| |jddS )a  Predict the labels for the data samples in X using trained model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        Fr}   r   r>   )r   r   r   rj   r7   r   r   r   predictp  s   zBaseMixture.predictc                 C   s.   t |  t| |dd}| |\}}t|S )a  Evaluate the components' density for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        resp : array, shape (n_samples, n_components)
            Density of each Gaussian component for each sample in X.
        Fr}   )r   r   rw   r   exp)r4   r8   rL   ru   r   r   r   predict_proba  s   
zBaseMixture.predict_probac                    s   t  |dk rtdj jj\} tj|j}j	dkr9t
fddtjj|D }n*j	dkrPt
fddtj|D }nt
 fddtjj|D }t
d	d t|D }||fS )
ay  Generate random samples from the fitted Gaussian distribution.

        Parameters
        ----------
        n_samples : int, default=1
            Number of samples to generate.

        Returns
        -------
        X : array, shape (n_samples, n_features)
            Randomly generated sample.

        y : array, shape (nsamples,)
            Component labels.
        r   zNInvalid value for 'n_samples': %d . The sampling requires at least one sample.fullc                    s$   g | ]\}}}  ||t|qS r   )multivariate_normalint.0rx   
covariancesample)rngr   r   
<listcomp>  s    z&BaseMixture.sample.<locals>.<listcomp>tiedc                    s$   g | ]\}}  |jt|qS r   )r   covariances_r   )r   rx   r   )r   r4   r   r   r     s    c                    s0   g | ]\}}}|j | fd t|  qS )r<   )standard_normalr   sqrtr   )
n_featuresr   r   r   r     s    c                 S   s    g | ]\}}t j||td qS ))rU   )r   r   r   )r   jr   r   r   r   r     s     )r   r   r*   means_r   r   r&   multinomialweights_covariance_typer   vstackzipr   concatenate	enumerate)r4   rK   rL   n_samples_compr8   rS   r   )r   r   r4   r   r     sF   






zBaseMixture.samplec                 C   s   |  ||   S )a  Estimate the weighted log-probabilities, log P(X | Z) + log weights.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        weighted_log_prob : array, shape (n_samples, n_component)
        )_estimate_log_prob_estimate_log_weightsr7   r   r   r   r     s   z'BaseMixture._estimate_weighted_log_probc                 C   r6   )zEstimate log-weights in EM algorithm, E[ log pi ] in VB algorithm.

        Returns
        -------
        log_weight : array, shape (n_components, )
        Nr   rz   r   r   r   r     r:   z!BaseMixture._estimate_log_weightsc                 C   r6   )a9  Estimate the log-probabilities log P(X | Z).

        Compute the log-probabilities per each component for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob : array, shape (n_samples, n_component)
        Nr   r7   r   r   r   r     s   zBaseMixture._estimate_log_probc                 C   sf   |  |}t|dd}tjdd ||ddtjf  }W d   ||fS 1 s*w   Y  ||fS )a@  Estimate log probabilities and responsibilities for each sample.

        Compute the log probabilities, weighted log probabilities per
        component and responsibilities for each sample in X with respect to
        the current state of the model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : array, shape (n_samples,)
            log p(X)

        log_responsibilities : array, shape (n_samples, n_components)
            logarithm of the responsibilities
        r   r>   ignore)underN)r   r   r   errstaterH   )r4   r8   weighted_log_probrt   ru   r   r   r   rw     s   

z#BaseMixture._estimate_log_prob_respc                 C   sH   | j dkrtd|  dS | j dkr"td|  t | _| j| _dS dS )(Print verbose message on initialization.r   zInitialization %dr   N)r(   printr   _init_prev_time_iter_prev_time)r4   r.   r   r   r   r]     s   

z'BaseMixture._print_verbose_msg_init_begc                 C   sb   || j  dkr-| jdkrtd|  dS | jdkr/t }td||| j |f  || _dS dS dS )r   r   r   z  Iteration %dr   z0  Iteration %d	 time lapse %.5fs	 ll change %.5fN)r1   r(   r   r   r   )r4   rr   diff_llcur_timer   r   r   rc     s   


z'BaseMixture._print_verbose_msg_iter_endc                 C   sh   |rdnd}| j dkrtd| d d
S | j dkr2t | j }td| d|dd	|dd d
S d
S )z.Print verbose message on the end of iteration.rq   zdid not converger   zInitialization .r   z. time lapse z.5fzs	 lower bound N)r(   r   r   r   )r4   lbinit_has_convergedconverged_msgtr   r   r   re   *  s   

z'BaseMixture._print_verbose_msg_init_endr3   )r   )#__name__
__module____qualname____doc__r   r   r   r   r2   dict__annotations__r5   r   r9   rP   rJ   rC   r   rQ   r`   ra   r_   rh   r   r   r   r   r   r   r   r   rw   r]   rc   re   r   r   r   r   r   *   sX   
 

	+

i




>
	
	r   )	metaclass) r   rf   abcr   r   numbersr   r   r   numpyr   scipy.specialr    r	   baser
   r   r   r   
exceptionsr   utilsr   utils._param_validationr   r   utils.validationr   r   r   r   r   r   r   r   <module>   s     