非原创,代码来自葁sir
import numpy as np
import pandas as pd
from pandas import Series,DataFrame
import seaborn as sns
import matplotlib.pyplot as plt
%matplotlib inline
from sklearn.datasets import load_iris
X, y = load_iris(return_X_y=True)
X.shape
(150, 4)
y.shape
(150,)
from sklearn.linear_model import LogisticRegression
import warnings
warnings.filterwarnings('ignore')
lr = LogisticRegression()
lr.fit(X,y)
lr.score(X,y)
0.9733333333333334
# 特征选择过滤掉第二列数据
X1 = X[:,[0,2,3]]
X1.shape
(150, 3)
lr1 = LogisticRegression(max_iter=70)
lr1.fit(X1,y)
lr1.score(X1,y)
0.9666666666666667
# 先用无监督学习的方式进行降维
from sklearn.decomposition import PCA
# n_components =None
# 设置整数:表示降维后所保留的特征个数(从信息保有量最高的特征 从上到下 依次保存)
# 设置浮点数:描述原始信息的百分比进行降维
# 方法1: 整数
pca = PCA(n_components=2)
X2 = pca.fit_transform(X)
X2
array([[-2.68412563, 0.31939725],
[-2.71414169, -0.17700123],
[-2.88899057, -0.14494943],
[-2.74534286, -0.31829898],
[-2.72871654, 0.32675451],
[-2.28085963, 0.74133045],
[-2.82053775, -0.08946138],
[-2.62614497, 0.16338496],
[-2.88638273, -0.57831175],
[-2.6727558 , -0.11377425],
[-2.50694709, 0.6450689 ],
[-2.61275523, 0.01472994],
[-2.78610927, -0.235112 ],
[-3.22380374, -0.51139459],
[-2.64475039, 1.17876464],
[-2.38603903, 1.33806233],
[-2.62352788, 0.81067951],
[-2.64829671, 0.31184914],
[-2.19982032, 0.87283904],
[-2.5879864 , 0.51356031],
[-2.31025622, 0.39134594],
[-2.54370523, 0.43299606],
[-3.21593942, 0.13346807],
[-2.30273318, 0.09870885],
[-2.35575405, -0.03728186],
[-2.50666891, -0.14601688],
[-2.46882007, 0.13095149],
[-2.56231991, 0.36771886],
[-2.63953472, 0.31203998],
[-2.63198939, -0.19696122],
[-2.58739848, -0.20431849],
[-2.4099325 , 0.41092426],
[-2.64886233, 0.81336382],
[-2.59873675, 1.09314576],
[-2.63692688, -0.12132235],
[-2.86624165, 0.06936447],
[-2.62523805, 0.59937002],
[-2.80068412, 0.26864374],
[-2.98050204, -0.48795834],
[-2.59000631, 0.22904384],
[-2.77010243, 0.26352753],
[-2.84936871, -0.94096057],
[-2.99740655, -0.34192606],
[-2.40561449, 0.18887143],
[-2.20948924, 0.43666314],
[-2.71445143, -0.2502082 ],
[-2.53814826, 0.50377114],
[-2.83946217, -0.22794557],
[-2.54308575, 0.57941002],
[-2.70335978, 0.10770608],
[ 1.28482569, 0.68516047],
[ 0.93248853, 0.31833364],
[ 1.46430232, 0.50426282],
[ 0.18331772, -0.82795901],
[ 1.08810326, 0.07459068],
[ 0.64166908, -0.41824687],
[ 1.09506066, 0.28346827],
[-0.74912267, -1.00489096],
[ 1.04413183, 0.2283619 ],
[-0.0087454 , -0.72308191],
[-0.50784088, -1.26597119],
[ 0.51169856, -0.10398124],
[ 0.26497651, -0.55003646],
[ 0.98493451, -0.12481785],
[-0.17392537, -0.25485421],
[ 0.92786078, 0.46717949],
[ 0.66028376, -0.35296967],
[ 0.23610499, -0.33361077],
[ 0.94473373, -0.54314555],
[ 0.04522698, -0.58383438],
[ 1.11628318, -0.08461685],
[ 0.35788842, -0.06892503],
[ 1.29818388, -0.32778731],
[ 0.92172892, -0.18273779],
[ 0.71485333, 0.14905594],
[ 0.90017437, 0.32850447],
[ 1.33202444, 0.24444088],
[ 1.55780216, 0.26749545],
[ 0.81329065, -0.1633503 ],
[-0.30558378, -0.36826219],
[-0.06812649, -0.70517213],
[-0.18962247, -0.68028676],
[ 0.13642871, -0.31403244],
[ 1.38002644, -0.42095429],
[ 0.58800644, -0.48428742],
[ 0.80685831, 0.19418231],
[ 1.22069088, 0.40761959],
[ 0.81509524, -0.37203706],
[ 0.24595768, -0.2685244 ],
[ 0.16641322, -0.68192672],
[ 0.46480029, -0.67071154],
[ 0.8908152 , -0.03446444],
[ 0.23054802, -0.40438585],
[-0.70453176, -1.01224823],
[ 0.35698149, -0.50491009],
[ 0.33193448, -0.21265468],
[ 0.37621565, -0.29321893],
[ 0.64257601, 0.01773819],
[-0.90646986, -0.75609337],
[ 0.29900084, -0.34889781],
[ 2.53119273, -0.00984911],
[ 1.41523588, -0.57491635],
[ 2.61667602, 0.34390315],
[ 1.97153105, -0.1797279 ],
[ 2.35000592, -0.04026095],
[ 3.39703874, 0.55083667],
[ 0.52123224, -1.19275873],
[ 2.93258707, 0.3555 ],
[ 2.32122882, -0.2438315 ],
[ 2.91675097, 0.78279195],
[ 1.66177415, 0.24222841],
[ 1.80340195, -0.21563762],
[ 2.1655918 , 0.21627559],
[ 1.34616358, -0.77681835],
[ 1.58592822, -0.53964071],
[ 1.90445637, 0.11925069],
[ 1.94968906, 0.04194326],
[ 3.48705536, 1.17573933],
[ 3.79564542, 0.25732297],
[ 1.30079171, -0.76114964],
[ 2.42781791, 0.37819601],
[ 1.19900111, -0.60609153],
[ 3.49992004, 0.4606741 ],
[ 1.38876613, -0.20439933],
[ 2.2754305 , 0.33499061],
[ 2.61409047, 0.56090136],
[ 1.25850816, -0.17970479],
[ 1.29113206, -0.11666865],
[ 2.12360872, -0.20972948],
[ 2.38800302, 0.4646398 ],
[ 2.84167278, 0.37526917],
[ 3.23067366, 1.37416509],
[ 2.15943764, -0.21727758],
[ 1.44416124, -0.14341341],
[ 1.78129481, -0.49990168],
[ 3.07649993, 0.68808568],
[ 2.14424331, 0.1400642 ],
[ 1.90509815, 0.04930053],
[ 1.16932634, -0.16499026],
[ 2.10761114, 0.37228787],
[ 2.31415471, 0.18365128],
[ 1.9222678 , 0.40920347],
[ 1.41523588, -0.57491635],
[ 2.56301338, 0.2778626 ],
[ 2.41874618, 0.3047982 ],
[ 1.94410979, 0.1875323 ],
[ 1.52716661, -0.37531698],
[ 1.76434572, 0.07885885],
[ 1.90094161, 0.11662796],
[ 1.39018886, -0.28266094]])
lr.fit(X2,y)
LogisticRegression()
lr.score(X2,y)
0.9666666666666667
# 方法2 浮点数
pca2 = PCA(n_components=0.99)
X3 = pca2.fit_transform(X)
lr.fit(X3,y)
LogisticRegression()
lr.score(X3,y)
0.9733333333333334
X3
array([[-2.68412563, 0.31939725, -0.02791483],
[-2.71414169, -0.17700123, -0.21046427],
[-2.88899057, -0.14494943, 0.01790026],
[-2.74534286, -0.31829898, 0.03155937],
[-2.72871654, 0.32675451, 0.09007924],
[-2.28085963, 0.74133045, 0.16867766],
[-2.82053775, -0.08946138, 0.25789216],
[-2.62614497, 0.16338496, -0.02187932],
[-2.88638273, -0.57831175, 0.02075957],
[-2.6727558 , -0.11377425, -0.19763272],
[-2.50694709, 0.6450689 , -0.07531801],
[-2.61275523, 0.01472994, 0.10215026],
[-2.78610927, -0.235112 , -0.20684443],
[-3.22380374, -0.51139459, 0.06129967],
[-2.64475039, 1.17876464, -0.15162752],
[-2.38603903, 1.33806233, 0.2777769 ],
[-2.62352788, 0.81067951, 0.13818323],
[-2.64829671, 0.31184914, 0.02666832],
[-2.19982032, 0.87283904, -0.12030552],
[-2.5879864 , 0.51356031, 0.21366517],
[-2.31025622, 0.39134594, -0.23944404],
[-2.54370523, 0.43299606, 0.20845723],
[-3.21593942, 0.13346807, 0.29239675],
[-2.30273318, 0.09870885, 0.03912326],
[-2.35575405, -0.03728186, 0.12502108],
[-2.50666891, -0.14601688, -0.25342004],
[-2.46882007, 0.13095149, 0.09491058],
[-2.56231991, 0.36771886, -0.07849421],
[-2.63953472, 0.31203998, -0.1459089 ],
[-2.63198939, -0.19696122, 0.04077108],
[-2.58739848, -0.20431849, -0.07722299],
[-2.4099325 , 0.41092426, -0.14552497],
[-2.64886233, 0.81336382, 0.22566915],
[-2.59873675, 1.09314576, 0.15781081],
[-2.63692688, -0.12132235, -0.14304958],
[-2.86624165, 0.06936447, -0.16433231],
[-2.62523805, 0.59937002, -0.26835038],
[-2.80068412, 0.26864374, 0.09369908],
[-2.98050204, -0.48795834, 0.07292705],
[-2.59000631, 0.22904384, -0.0800823 ],
[-2.77010243, 0.26352753, 0.07724769],
[-2.84936871, -0.94096057, -0.34923038],
[-2.99740655, -0.34192606, 0.19250921],
[-2.40561449, 0.18887143, 0.26386795],
[-2.20948924, 0.43666314, 0.29874275],
[-2.71445143, -0.2502082 , -0.09767814],
[-2.53814826, 0.50377114, 0.16670564],
[-2.83946217, -0.22794557, 0.08372685],
[-2.54308575, 0.57941002, -0.01711502],
[-2.70335978, 0.10770608, -0.08929401],
[ 1.28482569, 0.68516047, -0.40656803],
[ 0.93248853, 0.31833364, -0.01801419],
[ 1.46430232, 0.50426282, -0.33832576],
[ 0.18331772, -0.82795901, -0.17959139],
[ 1.08810326, 0.07459068, -0.3077579 ],
[ 0.64166908, -0.41824687, 0.04107609],
[ 1.09506066, 0.28346827, 0.16981024],
[-0.74912267, -1.00489096, 0.01230292],
[ 1.04413183, 0.2283619 , -0.41533608],
[-0.0087454 , -0.72308191, 0.28114143],
[-0.50784088, -1.26597119, -0.26981718],
[ 0.51169856, -0.10398124, 0.13054775],
[ 0.26497651, -0.55003646, -0.69414683],
[ 0.98493451, -0.12481785, -0.06211441],
[-0.17392537, -0.25485421, 0.09045769],
[ 0.92786078, 0.46717949, -0.31462098],
[ 0.66028376, -0.35296967, 0.32802753],
[ 0.23610499, -0.33361077, -0.27116184],
[ 0.94473373, -0.54314555, -0.49951905],
[ 0.04522698, -0.58383438, -0.2350021 ],
[ 1.11628318, -0.08461685, 0.45962099],
[ 0.35788842, -0.06892503, -0.22985389],
[ 1.29818388, -0.32778731, -0.34785435],
[ 0.92172892, -0.18273779, -0.23107178],
[ 0.71485333, 0.14905594, -0.32180094],
[ 0.90017437, 0.32850447, -0.31620907],
[ 1.33202444, 0.24444088, -0.52170278],
[ 1.55780216, 0.26749545, -0.16492098],
[ 0.81329065, -0.1633503 , 0.0354245 ],
[-0.30558378, -0.36826219, -0.31849158],
[-0.06812649, -0.70517213, -0.24421381],
[-0.18962247, -0.68028676, -0.30642056],
[ 0.13642871, -0.31403244, -0.17724277],
[ 1.38002644, -0.42095429, 0.01616713],
[ 0.58800644, -0.48428742, 0.4444335 ],
[ 0.80685831, 0.19418231, 0.38896306],
[ 1.22069088, 0.40761959, -0.23716701],
[ 0.81509524, -0.37203706, -0.61472084],
[ 0.24595768, -0.2685244 , 0.18836681],
[ 0.16641322, -0.68192672, -0.06000923],
[ 0.46480029, -0.67071154, -0.02430686],
[ 0.8908152 , -0.03446444, -0.00994693],
[ 0.23054802, -0.40438585, -0.22941024],
[-0.70453176, -1.01224823, -0.10569115],
[ 0.35698149, -0.50491009, 0.01661717],
[ 0.33193448, -0.21265468, 0.08320429],
[ 0.37621565, -0.29321893, 0.07799635],
[ 0.64257601, 0.01773819, -0.20539497],
[-0.90646986, -0.75609337, -0.01259965],
[ 0.29900084, -0.34889781, 0.01058166],
[ 2.53119273, -0.00984911, 0.76016543],
[ 1.41523588, -0.57491635, 0.29632253],
[ 2.61667602, 0.34390315, -0.11078788],
[ 1.97153105, -0.1797279 , 0.10842466],
[ 2.35000592, -0.04026095, 0.28538956],
[ 3.39703874, 0.55083667, -0.34843756],
[ 0.52123224, -1.19275873, 0.5456593 ],
[ 2.93258707, 0.3555 , -0.42023994],
[ 2.32122882, -0.2438315 , -0.34830439],
[ 2.91675097, 0.78279195, 0.42333542],
[ 1.66177415, 0.24222841, 0.24244019],
[ 1.80340195, -0.21563762, -0.03764817],
[ 2.1655918 , 0.21627559, 0.03332664],
[ 1.34616358, -0.77681835, 0.28190288],
[ 1.58592822, -0.53964071, 0.62902933],
[ 1.90445637, 0.11925069, 0.47963982],
[ 1.94968906, 0.04194326, 0.04418617],
[ 3.48705536, 1.17573933, 0.13389487],
[ 3.79564542, 0.25732297, -0.51376776],
[ 1.30079171, -0.76114964, -0.34499504],
[ 2.42781791, 0.37819601, 0.21911932],
[ 1.19900111, -0.60609153, 0.51185551],
[ 3.49992004, 0.4606741 , -0.57318224],
[ 1.38876613, -0.20439933, -0.06452276],
[ 2.2754305 , 0.33499061, 0.28615009],
[ 2.61409047, 0.56090136, -0.20553452],
[ 1.25850816, -0.17970479, 0.0458477 ],
[ 1.29113206, -0.11666865, 0.23125646],
[ 2.12360872, -0.20972948, 0.15418002],
[ 2.38800302, 0.4646398 , -0.44953019],
[ 2.84167278, 0.37526917, -0.49889808],
[ 3.23067366, 1.37416509, -0.11454821],
[ 2.15943764, -0.21727758, 0.20876317],
[ 1.44416124, -0.14341341, -0.15323389],
[ 1.78129481, -0.49990168, -0.17287519],
[ 3.07649993, 0.68808568, -0.33559229],
[ 2.14424331, 0.1400642 , 0.73487894],
[ 1.90509815, 0.04930053, 0.16218024],
[ 1.16932634, -0.16499026, 0.28183584],
[ 2.10761114, 0.37228787, 0.02729113],
[ 2.31415471, 0.18365128, 0.32269375],
[ 1.9222678 , 0.40920347, 0.1135866 ],
[ 1.41523588, -0.57491635, 0.29632253],
[ 2.56301338, 0.2778626 , 0.29256952],
[ 2.41874618, 0.3047982 , 0.50448266],
[ 1.94410979, 0.1875323 , 0.17782509],
[ 1.52716661, -0.37531698, -0.12189817],
[ 1.76434572, 0.07885885, 0.13048163],
[ 1.90094161, 0.11662796, 0.72325156],
[ 1.39018886, -0.28266094, 0.36290965]])
pca.explained_variance_ # 空间转换后的方差大小
array([4.22824171, 0.24267075])
pca2.explained_variance_
array([4.22824171, 0.24267075, 0.0782095 ])
# 有监督的降维方式:LDA
# 线性判别分析法 有监督的降维
# 有利于做线性分割
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
data = X[:,:2] # 取两个
data
array([[5.1, 3.5],
[4.9, 3. ],
[4.7, 3.2],
[4.6, 3.1],
[5. , 3.6],
[5.4, 3.9],
[4.6, 3.4],
[5. , 3.4],
[4.4, 2.9],
[4.9, 3.1],
[5.4, 3.7],
[4.8, 3.4],
[4.8, 3. ],
[4.3, 3. ],
[5.8, 4. ],
[5.7, 4.4],
[5.4, 3.9],
[5.1, 3.5],
[5.7, 3.8],
[5.1, 3.8],
[5.4, 3.4],
[5.1, 3.7],
[4.6, 3.6],
[5.1, 3.3],
[4.8, 3.4],
[5. , 3. ],
[5. , 3.4],
[5.2, 3.5],
[5.2, 3.4],
[4.7, 3.2],
[4.8, 3.1],
[5.4, 3.4],
[5.2, 4.1],
[5.5, 4.2],
[4.9, 3.1],
[5. , 3.2],
[5.5, 3.5],
[4.9, 3.6],
[4.4, 3. ],
[5.1, 3.4],
[5. , 3.5],
[4.5, 2.3],
[4.4, 3.2],
[5. , 3.5],
[5.1, 3.8],
[4.8, 3. ],
[5.1, 3.8],
[4.6, 3.2],
[5.3, 3.7],
[5. , 3.3],
[7. , 3.2],
[6.4, 3.2],
[6.9, 3.1],
[5.5, 2.3],
[6.5, 2.8],
[5.7, 2.8],
[6.3, 3.3],
[4.9, 2.4],
[6.6, 2.9],
[5.2, 2.7],
[5. , 2. ],
[5.9, 3. ],
[6. , 2.2],
[6.1, 2.9],
[5.6, 2.9],
[6.7, 3.1],
[5.6, 3. ],
[5.8, 2.7],
[6.2, 2.2],
[5.6, 2.5],
[5.9, 3.2],
[6.1, 2.8],
[6.3, 2.5],
[6.1, 2.8],
[6.4, 2.9],
[6.6, 3. ],
[6.8, 2.8],
[6.7, 3. ],
[6. , 2.9],
[5.7, 2.6],
[5.5, 2.4],
[5.5, 2.4],
[5.8, 2.7],
[6. , 2.7],
[5.4, 3. ],
[6. , 3.4],
[6.7, 3.1],
[6.3, 2.3],
[5.6, 3. ],
[5.5, 2.5],
[5.5, 2.6],
[6.1, 3. ],
[5.8, 2.6],
[5. , 2.3],
[5.6, 2.7],
[5.7, 3. ],
[5.7, 2.9],
[6.2, 2.9],
[5.1, 2.5],
[5.7, 2.8],
[6.3, 3.3],
[5.8, 2.7],
[7.1, 3. ],
[6.3, 2.9],
[6.5, 3. ],
[7.6, 3. ],
[4.9, 2.5],
[7.3, 2.9],
[6.7, 2.5],
[7.2, 3.6],
[6.5, 3.2],
[6.4, 2.7],
[6.8, 3. ],
[5.7, 2.5],
[5.8, 2.8],
[6.4, 3.2],
[6.5, 3. ],
[7.7, 3.8],
[7.7, 2.6],
[6. , 2.2],
[6.9, 3.2],
[5.6, 2.8],
[7.7, 2.8],
[6.3, 2.7],
[6.7, 3.3],
[7.2, 3.2],
[6.2, 2.8],
[6.1, 3. ],
[6.4, 2.8],
[7.2, 3. ],
[7.4, 2.8],
[7.9, 3.8],
[6.4, 2.8],
[6.3, 2.8],
[6.1, 2.6],
[7.7, 3. ],
[6.3, 3.4],
[6.4, 3.1],
[6. , 3. ],
[6.9, 3.1],
[6.7, 3.1],
[6.9, 3.1],
[5.8, 2.7],
[6.8, 3.2],
[6.7, 3.3],
[6.7, 3. ],
[6.3, 2.5],
[6.5, 3. ],
[6.2, 3.4],
[5.9, 3. ]])
plt.scatter(data[:,0],data[:,1],c=y)
lr = LogisticRegression()
lr.fit(data,y)
LogisticRegression()
lr.score(data,y)
0.82
lda = LinearDiscriminantAnalysis(n_components=2)
lda.fit(X,y)
LinearDiscriminantAnalysis(n_components=2)
X.shape
(150, 4)
lda_data = lda.transform(X)
lda_data
array([[ 8.06179978e+00, 3.00420621e-01],
[ 7.12868772e+00, -7.86660426e-01],
[ 7.48982797e+00, -2.65384488e-01],
[ 6.81320057e+00, -6.70631068e-01],
[ 8.13230933e+00, 5.14462530e-01],
[ 7.70194674e+00, 1.46172097e+00],
[ 7.21261762e+00, 3.55836209e-01],
[ 7.60529355e+00, -1.16338380e-02],
[ 6.56055159e+00, -1.01516362e+00],
[ 7.34305989e+00, -9.47319209e-01],
[ 8.39738652e+00, 6.47363392e-01],
[ 7.21929685e+00, -1.09646389e-01],
[ 7.32679599e+00, -1.07298943e+00],
[ 7.57247066e+00, -8.05464137e-01],
[ 9.84984300e+00, 1.58593698e+00],
[ 9.15823890e+00, 2.73759647e+00],
[ 8.58243141e+00, 1.83448945e+00],
[ 7.78075375e+00, 5.84339407e-01],
[ 8.07835876e+00, 9.68580703e-01],
[ 8.02097451e+00, 1.14050366e+00],
[ 7.49680227e+00, -1.88377220e-01],
[ 7.58648117e+00, 1.20797032e+00],
[ 8.68104293e+00, 8.77590154e-01],
[ 6.25140358e+00, 4.39696367e-01],
[ 6.55893336e+00, -3.89222752e-01],
[ 6.77138315e+00, -9.70634453e-01],
[ 6.82308032e+00, 4.63011612e-01],
[ 7.92461638e+00, 2.09638715e-01],
[ 7.99129024e+00, 8.63787128e-02],
[ 6.82946447e+00, -5.44960851e-01],
[ 6.75895493e+00, -7.59002759e-01],
[ 7.37495254e+00, 5.65844592e-01],
[ 9.12634625e+00, 1.22443267e+00],
[ 9.46768199e+00, 1.82522635e+00],
[ 7.06201386e+00, -6.63400423e-01],
[ 7.95876243e+00, -1.64961722e-01],
[ 8.61367201e+00, 4.03253602e-01],
[ 8.33041759e+00, 2.28133530e-01],
[ 6.93412007e+00, -7.05519379e-01],
[ 7.68823131e+00, -9.22362309e-03],
[ 7.91793715e+00, 6.75121313e-01],
[ 5.66188065e+00, -1.93435524e+00],
[ 7.24101468e+00, -2.72615132e-01],
[ 6.41443556e+00, 1.24730131e+00],
[ 6.85944381e+00, 1.05165396e+00],
[ 6.76470393e+00, -5.05151855e-01],
[ 8.08189937e+00, 7.63392750e-01],
[ 7.18676904e+00, -3.60986823e-01],
[ 8.31444876e+00, 6.44953177e-01],
[ 7.67196741e+00, -1.34893840e-01],
[-1.45927545e+00, 2.85437643e-02],
[-1.79770574e+00, 4.84385502e-01],
[-2.41694888e+00, -9.27840307e-02],
[-2.26247349e+00, -1.58725251e+00],
[-2.54867836e+00, -4.72204898e-01],
[-2.42996725e+00, -9.66132066e-01],
[-2.44848456e+00, 7.95961954e-01],
[-2.22666513e-01, -1.58467318e+00],
[-1.75020123e+00, -8.21180130e-01],
[-1.95842242e+00, -3.51563753e-01],
[-1.19376031e+00, -2.63445570e+00],
[-1.85892567e+00, 3.19006544e-01],
[-1.15809388e+00, -2.64340991e+00],
[-2.66605725e+00, -6.42504540e-01],
[-3.78367218e-01, 8.66389312e-02],
[-1.20117255e+00, 8.44373592e-02],
[-2.76810246e+00, 3.21995363e-02],
[-7.76854039e-01, -1.65916185e+00],
[-3.49805433e+00, -1.68495616e+00],
[-1.09042788e+00, -1.62658350e+00],
[-3.71589615e+00, 1.04451442e+00],
[-9.97610366e-01, -4.90530602e-01],
[-3.83525931e+00, -1.40595806e+00],
[-2.25741249e+00, -1.42679423e+00],
[-1.25571326e+00, -5.46424197e-01],
[-1.43755762e+00, -1.34424979e-01],
[-2.45906137e+00, -9.35277280e-01],
[-3.51848495e+00, 1.60588866e-01],
[-2.58979871e+00, -1.74611728e-01],
[ 3.07487884e-01, -1.31887146e+00],
[-1.10669179e+00, -1.75225371e+00],
[-6.05524589e-01, -1.94298038e+00],
[-8.98703769e-01, -9.04940034e-01],
[-4.49846635e+00, -8.82749915e-01],
[-2.93397799e+00, 2.73791065e-02],
[-2.10360821e+00, 1.19156767e+00],
[-2.14258208e+00, 8.87797815e-02],
[-2.47945603e+00, -1.94073927e+00],
[-1.32552574e+00, -1.62869550e-01],
[-1.95557887e+00, -1.15434826e+00],
[-2.40157020e+00, -1.59458341e+00],
[-2.29248878e+00, -3.32860296e-01],
[-1.27227224e+00, -1.21458428e+00],
[-2.93176055e-01, -1.79871509e+00],
[-2.00598883e+00, -9.05418042e-01],
[-1.18166311e+00, -5.37570242e-01],
[-1.61615645e+00, -4.70103580e-01],
[-1.42158879e+00, -5.51244626e-01],
[ 4.75973788e-01, -7.99905482e-01],
[-1.54948259e+00, -5.93363582e-01],
[-7.83947399e+00, 2.13973345e+00],
[-5.50747997e+00, -3.58139892e-02],
[-6.29200850e+00, 4.67175777e-01],
[-5.60545633e+00, -3.40738058e-01],
[-6.85055995e+00, 8.29825394e-01],
[-7.41816784e+00, -1.73117995e-01],
[-4.67799541e+00, -4.99095015e-01],
[-6.31692685e+00, -9.68980756e-01],
[-6.32773684e+00, -1.38328993e+00],
[-6.85281335e+00, 2.71758963e+00],
[-4.44072512e+00, 1.34723692e+00],
[-5.45009572e+00, -2.07736942e-01],
[-5.66033713e+00, 8.32713617e-01],
[-5.95823722e+00, -9.40175447e-02],
[-6.75926282e+00, 1.60023206e+00],
[-5.80704331e+00, 2.01019882e+00],
[-5.06601233e+00, -2.62733839e-02],
[-6.60881882e+00, 1.75163587e+00],
[-9.17147486e+00, -7.48255067e-01],
[-4.76453569e+00, -2.15573720e+00],
[-6.27283915e+00, 1.64948141e+00],
[-5.36071189e+00, 6.46120732e-01],
[-7.58119982e+00, -9.80722934e-01],
[-4.37150279e+00, -1.21297458e-01],
[-5.72317531e+00, 1.29327553e+00],
[-5.27915920e+00, -4.24582377e-02],
[-4.08087208e+00, 1.85936572e-01],
[-4.07703640e+00, 5.23238483e-01],
[-6.51910397e+00, 2.96976389e-01],
[-4.58371942e+00, -8.56815813e-01],
[-6.22824009e+00, -7.12719638e-01],
[-5.22048773e+00, 1.46819509e+00],
[-6.80015000e+00, 5.80895175e-01],
[-3.81515972e+00, -9.42985932e-01],
[-5.10748966e+00, -2.13059000e+00],
[-6.79671631e+00, 8.63090395e-01],
[-6.52449599e+00, 2.44503527e+00],
[-4.99550279e+00, 1.87768525e-01],
[-3.93985300e+00, 6.14020389e-01],
[-5.20383090e+00, 1.14476808e+00],
[-6.65308685e+00, 1.80531976e+00],
[-5.10555946e+00, 1.99218201e+00],
[-5.50747997e+00, -3.58139892e-02],
[-6.79601924e+00, 1.46068695e+00],
[-6.84735943e+00, 2.42895067e+00],
[-5.64500346e+00, 1.67771734e+00],
[-5.17956460e+00, -3.63475041e-01],
[-4.96774090e+00, 8.21140550e-01],
[-5.88614539e+00, 2.34509051e+00],
[-4.68315426e+00, 3.32033811e-01]])
# 绘图看一下差异
plt.figure(figsize=(12,5))
plt.subplot(1,2,1)
plt.scatter(lda_data[:,0],lda_data[:,1],c=y)
plt.title('LDA')
plt.subplot(1,2,2)
plt.scatter(data[:,0],data[:,1],c=y)
plt.title('True')
Text(0.5, 1.0, 'True')
lr = LogisticRegression()
lr.fit(X,y)
lr.score(X,y)
0.9733333333333334
lr.fit(lda_data,y)
lr.score(lda_data,y)
0.98
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)