from sklearn.neighbors import KNeighborsClassifier as KNN
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier as RFC
import matplotlib.pyplot as plt
from sklearn.model_selection import cross_val_score
import pandas as pd
import numpy as np
df = pd.read_csv("./digit recognizor.csv")
x = df.iloc[:,1:]
y = df.iloc[:,0]
pca = PCA().fit(x)
np.cumsum(pca.explained_variance_ratio_)
array([0.09748938, 0.16909204, 0.23055107, 0.28434409, 0.33328671,
0.37631885, 0.40908936, 0.43801039, 0.46567942, 0.48916813,
0.51016138, 0.53075139, 0.54777693, 0.5647048 , 0.58051606,
0.59534846, 0.60854534, 0.62137261, 0.63325237, 0.64477992,
0.65550183, 0.66565382, 0.67530285, 0.68443131, 0.69330771,
0.70169538, 0.70981394, 0.71758799, 0.72499434, 0.73186096,
0.73844078, 0.74482877, 0.75082244, 0.75671157, 0.76235492,
0.76776459, 0.77285681, 0.77773186, 0.78248756, 0.787153 ,
0.79168253, 0.79613242, 0.80031497, 0.80429003, 0.80813545,
0.81188465, 0.81549478, 0.81898 , 0.82234488, 0.82555226,
0.82870693, 0.83179839, 0.83473548, 0.83760089, 0.84040849,
0.84310467, 0.84576298, 0.84832597, 0.85086418, 0.85332596,
0.85572312, 0.85811052, 0.86038644, 0.86260162, 0.86474096,
0.86680229, 0.8688308 , 0.87079057, 0.87272695, 0.87461181,
0.87647932, 0.87829602, 0.88006493, 0.88179085, 0.88345206,
0.88508516, 0.88669117, 0.8882359 , 0.88970439, 0.89112815,
0.89253914, 0.89394142, 0.89532977, 0.89668394, 0.89800702,
0.89931481, 0.90061155, 0.90185396, 0.90307645, 0.90427269,
0.9054311 , 0.90656969, 0.90769232, 0.90879707, 0.9098784 ,
0.91095253, 0.91199118, 0.9130244 , 0.91403935, 0.91503932,
0.91601414, 0.91695919, 0.91789783, 0.91881006, 0.91971737,
0.92060624, 0.92146994, 0.92231418, 0.92314972, 0.92396637,
0.92475405, 0.92553561, 0.92631307, 0.92708501, 0.92784285,
0.92859307, 0.92932754, 0.93005331, 0.93076864, 0.93146896,
0.93216201, 0.93284775, 0.93352769, 0.9341934 , 0.93484954,
0.93549434, 0.93612974, 0.93675586, 0.93737438, 0.93798012,
0.93858397, 0.93917542, 0.93976132, 0.94034596, 0.94092144,
0.94149115, 0.94205565, 0.94260883, 0.94314317, 0.94366895,
0.94419091, 0.94470211, 0.94520725, 0.94570718, 0.9462025 ,
0.94669485, 0.94717924, 0.94765593, 0.9481306 , 0.94859849,
0.94906378, 0.94952515, 0.94998148, 0.95043324, 0.95088273,
0.95132408, 0.95176287, 0.95218726, 0.95260758, 0.95302393,
0.95343776, 0.95384486, 0.95424341, 0.95463777, 0.95503184,
0.95542324, 0.9558049 , 0.95618414, 0.95655967, 0.9569335 ,
0.95729984, 0.95766292, 0.95802222, 0.95837926, 0.95873228,
0.95908504, 0.95943034, 0.95977362, 0.96011551, 0.96045467,
0.96078954, 0.96111938, 0.96144489, 0.96176959, 0.9620906 ,
0.96241104, 0.96272823, 0.96304505, 0.96335597, 0.96366614,
0.96397297, 0.96427679, 0.9645765 , 0.96487483, 0.96516892,
0.96546232, 0.96575529, 0.96604457, 0.96632956, 0.96661326,
0.96689434, 0.9671706 , 0.9674447 , 0.9677163 , 0.96798382,
0.96825062, 0.96851351, 0.96877556, 0.96903704, 0.9692953 ,
0.9695519 , 0.96980729, 0.9700614 , 0.97031421, 0.97056513,
0.97081341, 0.97106103, 0.97130549, 0.97154878, 0.97178977,
0.97203005, 0.97226941, 0.972508 , 0.97274447, 0.97297653,
0.97320752, 0.9734365 , 0.97366378, 0.97388975, 0.97411453,
0.97433551, 0.97455496, 0.97477235, 0.97498828, 0.97520323,
0.97541684, 0.97562833, 0.97583919, 0.97604731, 0.97625248,
0.97645622, 0.9766595 , 0.97686128, 0.97706095, 0.97725893,
0.97745574, 0.97765056, 0.97784427, 0.97803694, 0.97822906,
0.97841931, 0.9786077 , 0.97879453, 0.97897952, 0.97916422,
0.97934832, 0.97953111, 0.97971343, 0.97989482, 0.98007444,
0.98025115, 0.98042676, 0.98060162, 0.9807751 , 0.98094763,
0.9811191 , 0.98129024, 0.98145901, 0.98162714, 0.98179484,
0.98196111, 0.9821254 , 0.9822893 , 0.98245233, 0.98261458,
0.98277501, 0.98293529, 0.98309413, 0.98325252, 0.98340977,
0.98356519, 0.98371964, 0.98387327, 0.9840251 , 0.98417599,
0.98432639, 0.98447543, 0.98462371, 0.98477041, 0.98491633,
0.9850598 , 0.98520321, 0.98534616, 0.98548803, 0.98562946,
0.98576978, 0.98590813, 0.98604557, 0.98618195, 0.9863174 ,
0.98645256, 0.98658697, 0.98672036, 0.98685265, 0.9869834 ,
0.98711329, 0.98724183, 0.98736968, 0.98749665, 0.98762326,
0.98774915, 0.9878743 , 0.98799832, 0.98812087, 0.98824319,
0.98836437, 0.98848524, 0.98860532, 0.98872434, 0.98884258,
0.98895981, 0.98907555, 0.98919091, 0.98930547, 0.9894191 ,
0.98953229, 0.98964404, 0.98975433, 0.9898643 , 0.98997364,
0.99008282, 0.9901913 , 0.9902991 , 0.99040601, 0.99051274,
0.99061882, 0.99072328, 0.99082768, 0.99093075, 0.99103296,
0.99113433, 0.99123493, 0.99133469, 0.99143413, 0.99153272,
0.99162973, 0.99172671, 0.991823 , 0.99191762, 0.99201174,
0.99210479, 0.99219736, 0.99228894, 0.99238002, 0.99247052,
0.99256055, 0.99265017, 0.99273915, 0.99282747, 0.992915 ,
0.99300212, 0.99308802, 0.99317307, 0.99325811, 0.99334275,
0.99342658, 0.99350931, 0.99359181, 0.99367398, 0.99375483,
0.99383454, 0.99391355, 0.99399193, 0.99406996, 0.99414742,
0.99422402, 0.99429977, 0.9943747 , 0.99444894, 0.99452211,
0.99459512, 0.99466767, 0.99473991, 0.99481167, 0.99488294,
0.99495249, 0.99502169, 0.99509024, 0.9951581 , 0.99522513,
0.99529162, 0.99535747, 0.99542247, 0.99548635, 0.99554926,
0.99561193, 0.99567325, 0.99573401, 0.99579428, 0.99585376,
0.99591256, 0.99597083, 0.99602858, 0.99608608, 0.99614236,
0.99619845, 0.99625344, 0.9963079 , 0.99636186, 0.99641569,
0.99646882, 0.99652158, 0.99657378, 0.99662515, 0.99667641,
0.99672705, 0.99677714, 0.99682649, 0.99687575, 0.99692402,
0.99697123, 0.99701808, 0.99706419, 0.99711014, 0.99715572,
0.99720009, 0.99724371, 0.99728662, 0.99732922, 0.99737165,
0.99741347, 0.99745459, 0.99749509, 0.99753549, 0.99757559,
0.99761396, 0.99765188, 0.99768955, 0.99772703, 0.99776424,
0.99780073, 0.99783682, 0.99787213, 0.99790726, 0.99794203,
0.99797646, 0.99801048, 0.99804421, 0.99807742, 0.99811054,
0.99814268, 0.99817423, 0.99820553, 0.99823635, 0.99826651,
0.99829633, 0.99832578, 0.99835487, 0.99838376, 0.9984122 ,
0.99844046, 0.99846835, 0.99849525, 0.99852193, 0.99854755,
0.99857312, 0.99859853, 0.99862336, 0.99864809, 0.99867248,
0.99869671, 0.99872075, 0.99874436, 0.99876742, 0.9987901 ,
0.99881247, 0.99883424, 0.99885562, 0.99887684, 0.99889795,
0.99891874, 0.99893939, 0.9989597 , 0.99897966, 0.99899944,
0.99901892, 0.99903783, 0.99905667, 0.99907523, 0.99909341,
0.9991113 , 0.99912901, 0.99914644, 0.99916369, 0.9991798 ,
0.99919561, 0.9992113 , 0.99922696, 0.99924226, 0.99925744,
0.99927245, 0.99928719, 0.99930165, 0.99931595, 0.99933016,
0.99934405, 0.99935791, 0.99937159, 0.999385 , 0.99939824,
0.99941141, 0.99942393, 0.99943626, 0.99944858, 0.99946082,
0.99947266, 0.99948432, 0.99949595, 0.99950754, 0.99951901,
0.99953008, 0.99954101, 0.99955167, 0.99956212, 0.99957249,
0.9995828 , 0.99959266, 0.99960216, 0.99961145, 0.99962064,
0.99962971, 0.99963868, 0.99964759, 0.99965617, 0.99966468,
0.99967287, 0.99968087, 0.99968877, 0.99969649, 0.99970416,
0.99971161, 0.99971901, 0.99972629, 0.99973334, 0.99974028,
0.99974697, 0.9997536 , 0.99976007, 0.99976651, 0.99977259,
0.99977862, 0.99978445, 0.99979012, 0.99979566, 0.99980109,
0.99980643, 0.99981163, 0.99981681, 0.99982192, 0.99982693,
0.99983185, 0.99983666, 0.99984141, 0.99984595, 0.99985031,
0.99985459, 0.9998588 , 0.99986285, 0.99986685, 0.99987082,
0.99987466, 0.99987847, 0.99988225, 0.99988581, 0.99988922,
0.99989259, 0.99989585, 0.99989898, 0.99990203, 0.99990506,
0.99990805, 0.99991088, 0.99991359, 0.99991626, 0.99991887,
0.99992148, 0.99992403, 0.99992654, 0.99992899, 0.99993136,
0.99993367, 0.99993594, 0.99993817, 0.99994037, 0.99994251,
0.99994456, 0.99994651, 0.99994839, 0.99995017, 0.99995194,
0.99995369, 0.99995535, 0.99995695, 0.99995855, 0.99996012,
0.9999616 , 0.99996303, 0.99996443, 0.99996581, 0.99996709,
0.99996837, 0.99996961, 0.99997082, 0.999972 , 0.99997313,
0.99997422, 0.99997528, 0.99997629, 0.99997723, 0.99997815,
0.99997903, 0.99997989, 0.99998073, 0.99998155, 0.99998235,
0.99998313, 0.9999839 , 0.99998462, 0.99998532, 0.99998601,
0.9999867 , 0.99998736, 0.999988 , 0.99998862, 0.99998922,
0.99998977, 0.99999032, 0.99999084, 0.99999135, 0.99999184,
0.99999233, 0.99999281, 0.99999319, 0.99999357, 0.99999393,
0.99999428, 0.99999461, 0.99999493, 0.99999525, 0.99999555,
0.99999585, 0.99999613, 0.99999638, 0.99999663, 0.99999686,
0.99999709, 0.99999731, 0.99999752, 0.99999772, 0.99999791,
0.99999809, 0.99999828, 0.99999846, 0.99999863, 0.99999879,
0.9999989 , 0.99999901, 0.99999911, 0.99999919, 0.99999927,
0.99999934, 0.99999941, 0.99999948, 0.99999953, 0.99999958,
0.99999962, 0.99999967, 0.99999971, 0.99999975, 0.99999978,
0.9999998 , 0.99999982, 0.99999984, 0.99999986, 0.99999988,
0.9999999 , 0.99999992, 0.99999993, 0.99999995, 0.99999996,
0.99999997, 0.99999997, 0.99999998, 0.99999999, 0.99999999,
0.99999999, 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. , 1. ,
1. , 1. , 1. , 1. ])
plt.figure(figsize=(20,5))
plt.plot(range(x.shape[1]), np.cumsum(pca.explained_variance_ratio_))
plt.show()
score = []
for i in range(100,201,10):
x_dr = PCA(i).fit_transform(x)
corss = cross_val_score(RFC(n_estimators=10,random_state=0),x_dr,y,cv=5).mean()
score.append(corss)
plt.figure(figsize=(20,5))
plt.plot(range(100,201,10), score)
plt.show()
score = []
for i in range(1,101,10):
x_dr = PCA(i).fit_transform(x)
corss = cross_val_score(RFC(n_estimators=10,random_state=0),x_dr,y,cv=5).mean()
score.append(corss)
plt.figure(figsize=(20,5))
plt.plot(range(1,101,10), score)
plt.xticks(range(1,101,10))
plt.show()
score = []
for i in range(10,23):
x_dr = PCA(i).fit_transform(x)
corss = cross_val_score(RFC(n_estimators=10,random_state=0),x_dr,y,cv=5).mean()
score.append(corss)
plt.figure(figsize=(20,5))
plt.plot(range(10,23), score)
plt.xticks(range(10,23))
plt.show()
x_dr = PCA(21).fit_transform(x)
score = cross_val_score(RFC(n_estimators=10,random_state=0), x_dr, y, cv=5).mean()
score
0.918452380952381
cross_val_score(RFC(n_estimators=100,random_state=0), x_dr, y, cv=5).mean()
0.9436190476190477
cross_val_score(KNN(), x_dr, y, cv=5).mean()
0.9675476190476191
score = []
for i in range(10):
x_dr = PCA(21).fit_transform(x)
once = cross_val_score(KNN(i+1), x_dr, y, cv=5).mean()
score.append(once)
plt.figure()
plt.plot(range(1,11), score)
plt.xticks(range(1,11))
plt.show()
cross_val_score(KNN(3), x_dr, y, cv=5).mean()
0.968
%%timeit
cross_val_score(KNN(3), x_dr, y, cv=5).mean()
2.45 s ± 49.8 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
标签:__,784,plt,score,10,range,PAC,dr,mean
From: https://www.cnblogs.com/thankcat/p/17304533.html