探究不同阶数对4种线性回归影响
【摘要】 import numpy as np
from sklearn.linear_model import LinearRegression, RidgeCV, LassoCV, ElasticNetCV
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import Pip...
import numpy as np
from sklearn.linear_model import LinearRegression, RidgeCV, LassoCV, ElasticNetCV
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import Pipeline
from sklearn.exceptions import ConvergenceWarning
import matplotlib as mpl
import matplotlib.pyplot as plt
import warnings
def xss(y, y_hat): y = y.ravel() y_hat = y_hat.ravel() # Version 1 tss = ((y - np.average(y)) ** 2).sum() rss = ((y_hat - y) ** 2).sum() ess = ((y_hat - np.average(y)) ** 2).sum() r2 = 1 - rss / tss # print 'RSS:', rss, '\t ESS:', ess # print 'TSS:', tss, 'RSS + ESS = ', rss + ess # Version 2 # tss = np.var(y) # rss = np.average((y_hat - y) ** 2) # r2 = 1 - rss / tss corr_coef = np.corrcoef(y, y_hat)[0, 1] tss_list.append(tss) rss_list.append(rss) ess_list.append(ess) ess_rss_list.append(rss + ess) retur
- 1
- 2
- 3
- 4
- 5
- 6
- 7
- 8
- 9
- 10
- 11
- 12
- 13
- 14
- 15
- 16
- 17
- 18
- 19
- 20
- 21
- 22
- 23
- 24
- 25
- 26
- 27
- 28
文章来源: maoli.blog.csdn.net,作者:刘润森!,版权归原作者所有,如需转载,请联系作者。
原文链接:maoli.blog.csdn.net/article/details/89457256
【版权声明】本文为华为云社区用户转载文章,如果您发现本社区中有涉嫌抄袭的内容,欢迎发送邮件进行举报,并提供相关证据,一经查实,本社区将立刻删除涉嫌侵权内容,举报邮箱:
cloudbbs@huaweicloud.com
- 点赞
- 收藏
- 关注作者
评论(0)