在这里插入图片描述

RandomForestRegressor1 = RandomForestRegressor()
RandomForestRegressor1.fit(X, y1)
MSE1 = sklearn.metrics.mean_squared_error(y1, RandomForestRegressor1.predict(X))
r2_score1 = sklearn.metrics.r2_score(RandomForestRegressor1.predict(X), y1)
print("RandomForestRegressor MSE1", MSE1)
print("RandomForestRegressor r2_score1", r2_score1)

在这里插入图片描述

joblib.dump(RandomForestRegressor1, 'RandomForestRegressor1.pkl')

在这里插入图片描述RandomForestRegressor1 = joblib.load('RandomForestRegressor1.pkl')

在这里插入图片描述

在这里插入图片描述

import sklearn
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
from sklearn.linear_model import BayesianRidge
from sklearn.linear_model import BayesianRidge
from sklearn.svm import SVR
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from xgboost import XGBRegressor
from lightgbm import LGBMRegressor
import pandas as pd
import joblib

# csv_data = pd.read_csv('multiple_train.csv',header=None, index_col=None)
csv_data = pd.read_csv('add_train.csv',header=None, index_col=None)
datas = csv_data.values
X = datas[:, 4:(4+7)]
# y = datas[:, :4]
y = datas[:, (4+7+4):(4+7+4+4)]
y1 = y[:, 0]
y2 = y[:, 1]
y3 = y[:, 2]
y4 = y[:, 3]

# #贝叶斯回归模型
# BR1 = BayesianRidge()
# BR1.fit(X, y1)
# MSE1 = sklearn.metrics.mean_squared_error(y1, BR1.predict(X))
# r2_score1 = sklearn.metrics.r2_score(BR1.predict(X), y1)
# print("BayesianRidge MSE1", MSE1)
# print("BayesianRidge r2_score1", r2_score1)
# print('-------------------------------------')
#
# BR2 = BayesianRidge()
# BR2.fit(X, y2)
# MSE2 = sklearn.metrics.mean_squared_error(y2, BR2.predict(X))
# r2_score2 = sklearn.metrics.r2_score(BR2.predict(X), y2)
# print("BayesianRidge MSE2", MSE2)
# print("BayesianRidge r2_score2", r2_score2)
# print('-------------------------------------')
#
# BR3 = BayesianRidge()
# BR3.fit(X, y3)
# MSE3 = sklearn.metrics.mean_squared_error(y3, BR3.predict(X))
# r2_score3 = sklearn.metrics.r2_score(BR3.predict(X), y3)
# print("BayesianRidge MSE3", MSE3)
# print("BayesianRidge r2_score3", r2_score3)
# print('-------------------------------------')
#
# BR4 = BayesianRidge()
# BR4.fit(X, y4)
# MSE4 = sklearn.metrics.mean_squared_error(y4, BR4.predict(X))
# r2_score4 = sklearn.metrics.r2_score(BR4.predict(X), y4)
# print("BayesianRidge MSE4", MSE4)
# print("BayesianRidge r2_score4", r2_score4)
# print('-------------------------------------')

# #支持向量回归模型
# svr = SVR()
# svr.fit(X, np.squeeze(y1))
# MSE = sklearn.metrics.mean_squared_error(y1, svr.predict(X))
# r2_score = sklearn.metrics.r2_score(svr.predict(X), y1)
# print("svr MSE", MSE)
# print("svr r2_score", r2_score)
# print('-------------------------------------')
#
# #决策树回归模型
# dtr = DecisionTreeRegressor()
# dtr.fit(X, np.squeeze(y1))
# MSE = sklearn.metrics.mean_squared_error(y1, dtr.predict(X))
# r2_score = sklearn.metrics.r2_score(dtr.predict(X), y1)
# print("DecisionTreeRegressor MSE", MSE)
# print("DecisionTreeRegressor r2_score", r2_score)
# print('-------------------------------------')
#
# #随机森林回归模型
# RFR = RandomForestRegressor()
# RFR.fit(X, np.squeeze(y1))
# MSE = sklearn.metrics.mean_squared_error(y1, RFR.predict(X))
# r2_score = sklearn.metrics.r2_score(RFR.predict(X), y1)
# print("RandomForestRegressor MSE", MSE)
# print("RandomForestRegressor r2_score", r2_score)
# print('-------------------------------------')

# #XGBoost模型
# XGB = XGBRegressor()
# XGB.fit(X, np.squeeze(y1))
# MSE = sklearn.metrics.mean_squared_error(y1, XGB.predict(X))
# r2_score = sklearn.metrics.r2_score(XGB.predict(X), y1)
# print("XGBRegressor MSE", MSE)
# print("XGBRegressor r2_score", r2_score)
# print('-------------------------------------')

# #LGBM模型
# LGB = LGBMRegressor()
# LGB.fit(X, np.squeeze(y1))
# MSE = sklearn.metrics.mean_squared_error(y1, LGB.predict(X))
# r2_score = sklearn.metrics.r2_score(LGB.predict(X), y1)
# print("LGBMRegressor MSE", MSE)
# print("LGBMRegressor r2_score", r2_score)
# print('-------------------------------------')

#-----------------------------------------------------------------------------#

RandomForestRegressor1 = RandomForestRegressor()
RandomForestRegressor1.fit(X, y1)
MSE1 = sklearn.metrics.mean_squared_error(y1, RandomForestRegressor1.predict(X))
r2_score1 = sklearn.metrics.r2_score(RandomForestRegressor1.predict(X), y1)
print("RandomForestRegressor MSE1", MSE1)
print("RandomForestRegressor r2_score1", r2_score1)
print('-------------------------------------')

RandomForestRegressor2 = RandomForestRegressor()
RandomForestRegressor2.fit(X, y2)
MSE2 = sklearn.metrics.mean_squared_error(y2, RandomForestRegressor2.predict(X))
r2_score2 = sklearn.metrics.r2_score(RandomForestRegressor2.predict(X), y2)
print("RandomForestRegressor MSE2", MSE2)
print("RandomForestRegressor r2_score2", r2_score2)
print('-------------------------------------')

RandomForestRegressor3 = RandomForestRegressor()
RandomForestRegressor3.fit(X, y3)
MSE3 = sklearn.metrics.mean_squared_error(y3, RandomForestRegressor3.predict(X))
r2_score3 = sklearn.metrics.r2_score(RandomForestRegressor3.predict(X), y3)
print("RandomForestRegressor MSE3", MSE3)
print("RandomForestRegressor r2_score3", r2_score3)
print('-------------------------------------')

RandomForestRegressor4 = RandomForestRegressor()
RandomForestRegressor4.fit(X, y4)
MSE4 = sklearn.metrics.mean_squared_error(y4, RandomForestRegressor4.predict(X))
r2_score4 = sklearn.metrics.r2_score(RandomForestRegressor4.predict(X), y4)
print("RandomForestRegressor MSE4", MSE4)
print("RandomForestRegressor r2_score4", r2_score4)
print('-------------------------------------')

# joblib.dump(XGB1, 'XGB1.pkl')
# joblib.dump(XGB2, 'XGB2.pkl')
# joblib.dump(XGB3, 'XGB3.pkl')
# joblib.dump(XGB4, 'XGB4.pkl')

joblib.dump(RandomForestRegressor1, 'RandomForestRegressor1.pkl')
joblib.dump(RandomForestRegressor2, 'RandomForestRegressor2.pkl')
joblib.dump(RandomForestRegressor3, 'RandomForestRegressor3.pkl')
joblib.dump(RandomForestRegressor4, 'RandomForestRegressor4.pkl')


# from xgboost import plot_importance
# fig,ax = plt.subplots(figsize=(10,15))
# plot_importance(XGB1,height=0.5,max_num_features=7,ax=ax)
# plt.show()
# plot_importance(XGB2,height=0.5,max_num_features=7,ax=ax)
# plt.show()
# plot_importance(XGB3,height=0.5,max_num_features=7,ax=ax)
# plt.show()
# plot_importance(XGB3,height=0.5,max_num_features=7,ax=ax)
# plt.show()

在这里插入图片描述

from xgboost import XGBRegressor
import xgboost
import pandas as pd
from sklearn.metrics import mean_squared_error, r2_score
import joblib
import matplotlib.pyplot as plt
import numpy as np

# csv_data = pd.read_csv('multiple_train.csv',header=None, index_col=None)
csv_data = pd.read_csv('add_test.csv', header=None, index_col=None)
datas = csv_data.values
X = datas[:, 4:(4+7)]
# y = datas[:, :4]
y = datas[:, (4+7+4):(4+7+4+4)]
y1 = y[:, 0]
y2 = y[:, 1]
y3 = y[:, 2]
y4 = y[:, 3]

#模型定义
RandomForestRegressor1 = joblib.load('RandomForestRegressor1.pkl')
RandomForestRegressor2 = joblib.load('RandomForestRegressor2.pkl')
RandomForestRegressor3 = joblib.load('RandomForestRegressor3.pkl')
RandomForestRegressor4 = joblib.load('RandomForestRegressor4.pkl')

y1_hat = RandomForestRegressor1.predict(X)
y2_hat = RandomForestRegressor2.predict(X)
y3_hat = RandomForestRegressor3.predict(X)
y4_hat = RandomForestRegressor4.predict(X)

y_hat = np.concatenate([y1_hat[:,np.newaxis],y2_hat[:,np.newaxis],y3_hat[:,np.newaxis],y4_hat[:,np.newaxis]], axis=1)

mse1 = mean_squared_error(y1,y1_hat)
mse2 = mean_squared_error(y2,y2_hat)
mse3 = mean_squared_error(y3,y3_hat)
mse4 = mean_squared_error(y4,y4_hat)
r2_score1 = r2_score(y1,y1_hat)
r2_score2 = r2_score(y2,y2_hat)
r2_score3 = r2_score(y3,y3_hat)
r2_score4 = r2_score(y4,y4_hat)
print('RandomForestRegressor mse1:', mse1)
print('RandomForestRegressor mse2:', mse2)
print('RandomForestRegressor mse3:', mse3)
print('RandomForestRegressor mse4:', mse4)
print('RandomForestRegressor r2_score1:', r2_score1)
print('RandomForestRegressor r2_score2:', r2_score2)
print('RandomForestRegressor r2_score3:', r2_score3)
print('RandomForestRegressor r2_score4:', r2_score4)

for i in range(4):
    plt.figure('multiple test {} : ua and ua_hat'.format(i+1),dpi=500)
    plt.subplot(4,1,1)
    plt.plot(y1[100*i:100*(i+1)], color='r', linewidth=0.6)
    plt.plot(y1_hat[100*i:100*(i+1)], color='b', linewidth=0.6)
    plt.legend(['ua1','ua1_hat'])
    plt.grid(True)
    plt.title('ua1')

    plt.subplot(4,1,2)
    plt.plot(y2[100*i:100*(i+1)], color='r', linewidth=0.6)
    plt.plot(y2_hat[100*i:100*(i+1)], color='b', linewidth=0.6)
    plt.legend(['ua2','ua2_hat'])
    plt.grid(True)
    plt.title('ua2')

    plt.subplot(4,1,3)
    plt.plot(y3[100*i:100*(i+1)], color='r', linewidth=0.6)
    plt.plot(y3_hat[100*i:100*(i+1)], color='b', linewidth=0.6)
    plt.legend(['ua3','ua3_hat'])
    plt.grid(True)
    plt.title('ua3')

    plt.subplot(4,1,4)
    plt.plot(y4[100*i:100*(i+1)], color='r', linewidth=0.6)
    plt.plot(y4_hat[100*i:100*(i+1)], color='b', linewidth=0.6)
    plt.legend(['ua4','ua4_hat'])
    plt.grid(True)
    plt.title('ua4')

    plt.savefig('multiple test {} ua and ua_hat.png'.format(i+1))
    plt.savefig('multiple test {} ua and ua_hat.svg'.format(i+1))


names = ['ua1','ua2','ua3','ua4']

y_hat = pd.DataFrame(y_hat,columns=names)
y_hat.to_csv('RandomForestRegressor_predict_add.csv')