参考的是b站 南方小鱼儿 的代码,参考网址:SVR模型对连续量的预测(SVM)02 - 生成样本数据_哔哩哔哩_bilibili
代码如下
import numpy as np from sklearn.svm import SVR import matplotlib.pyplot as plt X = np.sort(np.random.rand(40,1)*5,axis=0) y = np.sin(X).ravel() y[::5] += (0.5 - np.random.rand(8))*0.1 svr_rbf = SVR(kernel='rbf',C=100,gamma=0.1,epsilon=0.01) svr_rbf.fit(X,y) ''' C值越大,模型越复杂 ''' svr_lin = SVR(kernel='linear',C=1,gamma=0.1,epsilon=0.01) svr_lin.fit(X,y) svr_poly = SVR(kernel='poly',C=1,gamma=0.1,epsilon=0.01,degree=3) svr_poly.fit(X,y) svrs = [svr_rbf,svr_lin,svr_poly] model_labels = ['RBF','Linear','Polynomial'] model_colors = ['m','c','g'] fig,axes = plt.subplots(1,3,figsize=(15,10)) for ix,svr in enumerate(svrs): axes[ix].plot( X, svr.predict(X), lw = 3, color = model_colors[ix], label = f'{model_labels[ix]}model' ) axes[ix].scatter( X[svr.support_], y[svr.support_], s = 30, edgecolor = model_colors[ix], facecolor = 'none', label=f'{model_labels[ix]}support vector' ) #其他的一些点,即排除掉支持向量的点 axes[ix].scatter( X[np.setdiff1d(np.arange(40),svr.support_)], y[np.setdiff1d(np.arange(40),svr.support_)], s = 30, edgecolor = 'k', facecolor = 'none', label=f'{model_labels[ix]}other data' ) axes[ix].legend( loc = 'upper center', bbox_to_anchor = (0.5,1.1) ) fig.suptitle('Support Vector Regression',fontsize=15) plt.show()
最后结果图片如下
标签:ix,python,SVR,axes,np,model,svr,sklearn From: https://www.cnblogs.com/lpj1393822011/p/17019906.html