
    sg                     (    d Z ddlmZ ddlmZ ddZy)z&This module contains utility routines.   )is_classifier   )
_BinMapperNc                    |dvrt        dj                  |            | j                         }|d   dk(  rt        d      |d   rt        d      dd	|d
k(  rdndddd}i d||d      d|d   d|d   d|d   d|d   d|d   d|d   d|d   dddd d!d"d#|d$   rd%nd&d'd(d)d*d+t	               j
                  d,d-d.|d/   }|d   d0k(  r'|d
kD  r"|dxx   d
z  cc<   ||dxx   ||dz
  z  z  cc<   d2d3|d
k(  rd4nd5d6d7d}d8d9||d      |d   |d   |d   |d   xs d"|d   |d   d |d$   rd
nd"|d$   d"k(  d:|d/   d;}d<d=|d
k(  rd>nd?d1d@d}||d      |d   |d   |d   |d   |d   dAdBt        |d$         dC	}	|dDk(  r#d"dElm}
m	} t        |       r |
dIi |S  |dIi |S |dFk(  r#d"dGlm}m} t        |       r |dIi |S  |dIi |S d"dHlm}m} t        |       r |dIi |	S  |dIi |	S )Ja  Return an unfitted estimator from another lib with matching hyperparams.

    This utility function takes care of renaming the sklearn parameters into
    their LightGBM, XGBoost or CatBoost equivalent parameters.

    # unmapped XGB parameters:
    # - min_samples_leaf
    # - min_data_in_bin
    # - min_split_gain (there is min_split_loss though?)

    # unmapped Catboost parameters:
    # max_leaves
    # min_*
    )lightgbmxgboostcatboostz:accepted libs are lightgbm, xgboost, and catboost.  got {}lossautozaauto loss is not accepted. We need to know if the problem is binary or multiclass classification.early_stoppingz%Early stopping should be deactivated.regression_l2regression_l1   binary
multiclassgammapoisson)squared_errorabsolute_errorlog_lossr   r   	objectivelearning_raten_estimatorsmax_iter
num_leavesmax_leaf_nodes	max_depthmin_data_in_leafmin_samples_leaf
reg_lambdal2_regularizationmax_binmax_binsmin_data_in_binr   min_sum_hessian_in_leafgMbP?min_split_gain    	verbosityverbose
   iboost_from_averageTenable_bundleFsubsample_for_binpoisson_max_delta_stepg-q=feature_fraction_bynodemax_featuresr   Nz
reg:linear LEAST_ABSOLUTE_DEV_NOT_SUPPORTEDzreg:logisticzmulti:softmaxz	reg:gammazcount:poissonhist	lossguide)tree_methodgrow_policyr   r   r   
max_leavesr   lambdar"   min_child_weightr(   silentn_jobscolsample_bynodeRMSE LEAST_ASBOLUTE_DEV_NOT_SUPPORTEDLogloss
MultiClassPoissonMedianNewton)	loss_functionr   
iterationsdepthr    r"   feature_border_typeleaf_estimation_methodr)   r   )LGBMClassifierLGBMRegressorr   )XGBClassifierXGBRegressor)CatBoostClassifierCatBoostRegressor )
ValueErrorformat
get_paramsNotImplementedErrorr   	subsampleboolr   rI   rJ   r   r   rK   rL   r	   rM   rN   )	estimatorlib	n_classessklearn_paramslightgbm_loss_mappinglightgbm_paramsxgboost_loss_mappingxgboost_paramscatboost_loss_mappingcatboost_paramsrI   rJ   rK   rL   rM   rN   s                   a/var/www/html/venv/lib/python3.12/site-packages/sklearn/ensemble/_hist_gradient_boosting/utils.pyget_equivalent_estimatorra   
   s     55HOOPST
 	
 ))+Nf'B
 	
 &'!"IJJ )) )QHL*>&+AB8 	z2 	n%56	
 	^K0 	N+=> 	n%89 	>*- 	1 	"4 	! 	>)4R# 	d 	 	Z\33  	!%!" 	">.#A#O( f+	A12a72
  O,	Y]0KK, &<&/1nN/" ").*@A'8&z2$%56#K05A !45!*- (3Q +q0*>:N&  <!*aI\ /~f/EF'8$Z0,$%89!*-'"*y12
O j:#!4O44 3?33			7# 2>221.11 	C#%888$777    )r   N)__doc__baser   binningr   ra   rO   rb   r`   <module>rf      s    ,
 " K8rb   