回归模型使用实战xgb.XGBRegressor

from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
import xgboost as xgb,numpy as np
from sklearn.metrics import mean_squared_error  

boston = load_boston()
X = boston.data  # 特征值
y = boston.target  # 目标值

# 划分数据集,80% 训练数据和 20% 测试数据
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
print(X_train.shape)
print(X_train)
(404, 13)
[[2.59406e+01 0.00000e+00 1.81000e+01 ... 2.02000e+01 1.27360e+02
  2.66400e+01]
 [1.88360e-01 0.00000e+00 6.91000e+00 ... 1.79000e+01 3.96900e+02
  1.41500e+01]
 [8.87300e-02 2.10000e+01 5.64000e+00 ... 1.68000e+01 3.95560e+02
  1.34500e+01]
 ...
 [3.73800e-02 0.00000e+00 5.19000e+00 ... 2.02000e+01 3.89400e+02
  6.75000e+00]
 [1.40520e-01 0.00000e+00 1.05900e+01 ... 1.86000e+01

猜你喜欢

转载自blog.csdn.net/qq_38735017/article/details/111203258
xgb