o
    ?Hh(                     @   s   d dl Z d dlmZ d dlZd dlmZ d dlmZ ddl	m
Z
mZmZ ddlmZ ddlmZ dd	lmZmZ dd
lmZmZ ddlmZmZ ddlmZ G dd deee
ZdS )    N)Real)sparse)linprog   )BaseEstimatorRegressorMixin_fit_context)ConvergenceWarning)_safe_indexing)Interval
StrOptions)parse_version
sp_version)_check_sample_weightvalidate_data   )LinearModelc                       s   e Zd ZU dZeeddddgeeddddgdgeh d	gedgd
Zee	d< dddddd
ddZ
edddddZ fddZ  ZS )QuantileRegressora  Linear regression model that predicts conditional quantiles.

    The linear :class:`QuantileRegressor` optimizes the pinball loss for a
    desired `quantile` and is robust to outliers.

    This model uses an L1 regularization like
    :class:`~sklearn.linear_model.Lasso`.

    Read more in the :ref:`User Guide <quantile_regression>`.

    .. versionadded:: 1.0

    Parameters
    ----------
    quantile : float, default=0.5
        The quantile that the model tries to predict. It must be strictly
        between 0 and 1. If 0.5 (default), the model predicts the 50%
        quantile, i.e. the median.

    alpha : float, default=1.0
        Regularization constant that multiplies the L1 penalty term.

    fit_intercept : bool, default=True
        Whether or not to fit the intercept.

    solver : {'highs-ds', 'highs-ipm', 'highs', 'interior-point',             'revised simplex'}, default='highs'
        Method used by :func:`scipy.optimize.linprog` to solve the linear
        programming formulation.

        It is recommended to use the highs methods because
        they are the fastest ones. Solvers "highs-ds", "highs-ipm" and "highs"
        support sparse input data and, in fact, always convert to sparse csc.

        From `scipy>=1.11.0`, "interior-point" is not available anymore.

        .. versionchanged:: 1.4
           The default of `solver` changed to `"highs"` in version 1.4.

    solver_options : dict, default=None
        Additional parameters passed to :func:`scipy.optimize.linprog` as
        options. If `None` and if `solver='interior-point'`, then
        `{"lstsq": True}` is passed to :func:`scipy.optimize.linprog` for the
        sake of stability.

    Attributes
    ----------
    coef_ : array of shape (n_features,)
        Estimated coefficients for the features.

    intercept_ : float
        The intercept of the model, aka bias term.

    n_features_in_ : int
        Number of features seen during :term:`fit`.

        .. versionadded:: 0.24

    feature_names_in_ : ndarray of shape (`n_features_in_`,)
        Names of features seen during :term:`fit`. Defined only when `X`
        has feature names that are all strings.

        .. versionadded:: 1.0

    n_iter_ : int
        The actual number of iterations performed by the solver.

    See Also
    --------
    Lasso : The Lasso is a linear model that estimates sparse coefficients
        with l1 regularization.
    HuberRegressor : Linear regression model that is robust to outliers.

    Examples
    --------
    >>> from sklearn.linear_model import QuantileRegressor
    >>> import numpy as np
    >>> n_samples, n_features = 10, 2
    >>> rng = np.random.RandomState(0)
    >>> y = rng.randn(n_samples)
    >>> X = rng.randn(n_samples, n_features)
    >>> # the two following lines are optional in practice
    >>> from sklearn.utils.fixes import sp_version, parse_version
    >>> reg = QuantileRegressor(quantile=0.8).fit(X, y)
    >>> np.mean(y <= reg.predict(X))
    np.float64(0.8)
    r   r   neither)closedNleftboolean>   revised simplexhighshighs-ds	highs-ipminterior-pointquantilealphafit_interceptsolversolver_options_parameter_constraintsg      ?g      ?Tr   c                C   s"   || _ || _|| _|| _|| _d S Nr   )selfr   r   r    r!   r"    r&   ^/home/air/sanwanet/gpt-api/venv/lib/python3.10/site-packages/sklearn/linear_model/_quantile.py__init__   s
   	
zQuantileRegressor.__init__)prefer_skip_nested_validationc                 C   s  t | ||g dddd\}}t||}|jd }|}| jr!|d7 }t|| j }| jdkr=tt	dkr=t
d| j d	t|rP| jd
vrPt
d| j d| jdu r_| jdkr_ddi}n| j}t|d }t|}	|	t|k r|| }t||}t||}ttjd| |d|| j |d| j  g}
| jrd|
d< d|
|< | jd
v rtj|	|jdd}| jrttj|	df|jd}tj||| | || gdd}n<tj|| || gdd}n.t|	}| jrt|	df}tj||| | || gdd}ntj|| || gdd}|}t|
||| j|d}|j}|jsAddddd}td|j d||jd d d  |j  t! |d| ||d|   }|j"| _#| jre|dd | _$|d | _%| S || _$d!| _%| S )"a  Fit the model according to the given training data.

        Parameters
        ----------
        X : {array-like, sparse matrix} of shape (n_samples, n_features)
            Training data.

        y : array-like of shape (n_samples,)
            Target values.

        sample_weight : array-like of shape (n_samples,), default=None
            Sample weights.

        Returns
        -------
        self : object
            Returns self.
        )csccsrcooTF)accept_sparse	y_numericmulti_outputr   r   z1.11.0zSolver z- is not anymore available in SciPy >= 1.11.0.)r   r   r   z; does not support sparse X. Use solver 'highs' for example.Nlstsqr   r   )
fill_valuer*   )dtypeformat)shaper2   )r3   )axis)cA_eqb_eqmethodoptionszIteration limit reached.z!Problem appears to be infeasible.z Problem appears to be unbounded.z#Numerical difficulties encountered.)r   r         zDLinear programming for QuantileRegressor did not succeed.
Status is z: zunknown reason
zResult message of linprog:
g        )&r   r   r4   r    npsumr   r!   r   r   
ValueErrorr   issparser"   nonzerolenr
   concatenatefullr   eyer2   
csc_matrixoneshstackr   xsuccesswarningswarnstatus
setdefaultmessager	   nitn_iter_coef_
intercept_)r%   Xysample_weight
n_featuresn_paramsr   r"   indices	n_indicesr6   rF   rH   r7   r8   resultsolutionfailureparamsr&   r&   r'   fit   s   






"
"
zQuantileRegressor.fitc                    s   t   }d|j_|S )NT)super__sklearn_tags__
input_tagsr   )r%   tags	__class__r&   r'   rb   *  s   
z"QuantileRegressor.__sklearn_tags__r$   )__name__
__module____qualname____doc__r   r   r   dictr#   __annotations__r(   r   r`   rb   __classcell__r&   r&   re   r'   r      s*   
 Y r   )rL   numbersr   numpyr>   scipyr   scipy.optimizer   baser   r   r   
exceptionsr	   utilsr
   utils._param_validationr   r   utils.fixesr   r   utils.validationr   r   _baser   r   r&   r&   r&   r'   <module>   s   