sklearn与机器学习知识点概括

数值型变量的标准化
axis=0按列归总; axis=1按整个样本计算均值、标准差
In [1]:

1
from sklearn import datasets
2
import numpy as np
3
import pandas as pd
In [2]:

1
boston = datasets.load_boston()
2
boston_df = pd.DataFrame(boston.data, columns = boston.feature_names)
In [3]:

1
boston_df.head()
Out[3]:
CRIM ZN INDUS CHAS NOX RM AGE DIS RAD TAX PTRATIO B LSTAT
0 0.00632 18.0 2.31 0.0 0.538 6.575 65.2 4.0900 1.0 296.0 15.3 396.90 4.98
1 0.02731 0.0 7.07 0.0 0.469 6.421 78.9 4.9671 2.0 242.0 17.8 396.90 9.14
2 0.02729 0.0 7.07 0.0 0.469 7.185 61.1 4.9671 2.0 242.0 17.8 392.83 4.03
3 0.03237 0.0 2.18 0.0 0.458 6.998 45.8 6.0622 3.0 222.0 18.7 394.63 2.94
4 0.06905 0.0 2.18 0.0 0.458 7.147 54.2 6.0622 3.0 222.0 18.7 396.90 5.33
In [4]:

1
from sklearn import preprocessing
2
boston_scaled = preprocessing.scale(boston_df)
In [5]:

1
boston_scaled
Out[5]:
array([[-0.41978194, 0.28482986, -1.2879095 , …, -1.45900038,
0.44105193, -1.0755623 ],
[-0.41733926, -0.48772236, -0.59338101, …, -0.30309415,
0.44105193, -0.49243937],
[-0.41734159, -0.48772236, -0.59338101, …, -0.30309415,
0.39642699, -1.2087274 ],
…,
[-0.41344658, -0.48772236, 0.11573841, …, 1.17646583,
0.44105193, -0.98304761],
[-0.40776407, -0.48772236, 0.11573841, …, 1.17646583,
0.4032249 , -0.86530163],
[-0.41500016, -0.48772236, 0.11573841, …, 1.17646583,
0.44105193, -0.66905833]])
In [6]:

1
boston_scaled.mean(axis = 0)
Out[6]:
array([-8.78743718e-17, -6.34319123e-16, -2.68291099e-15, 4.70199198e-16,
2.49032240e-15, -1.14523016e-14, -1.40785495e-15, 9.21090169e-16,
5.44140929e-16, -8.86861950e-16, -9.20563581e-15, 8.16310129e-15,
-3.37016317e-16])
In [7]:

1
boston_scaled.std(axis = 0)
Out[7]:
array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])
In [10]:

1
boston_scaled_all = preprocessing.scale(boston_df,axis=1)
In [11]:

1
boston_scaled_all.mean(axis = 0)
Out[11]:
array([-0.4780112 , -0.40553476, -0.42254267, -0.49773735, -0.49427398,
-0.45178493, -0.01134041, -0.46893722, -0.43835293, 2.28303321,
-0.36385904, 2.159172 , -0.40983071])
In [12]:

1
boston_scaled_all.std(axis = 0)
Out[12]:
array([0.06425559, 0.18342798, 0.0560845 , 0.03195557, 0.03193079,
0.03022877, 0.19077168, 0.03244151, 0.06559356, 0.60447227,
0.03530255, 0.83087432, 0.05416801])
In [13]:

1
boston_scaled_all.mean(),boston_scaled_all.std()
Out[13]:
(-7.392485326631476e-18, 1.0)
In [14]:

1
preprocessing.scale(boston.target)
Out[14]:
array([ 0.15968566, -0.10152429, 1.32424667, 1.18275795, 1.48750288,
0.6712218 , 0.03996443, 0.49708184, -0.65659542, -0.39538548,
-0.81985164, -0.39538548, -0.09064054, -0.23212926, -0.47157171,
-0.286548 , 0.06173193, -0.54775795, -0.25389676, -0.47157171,
-0.97222411, -0.31919924, -0.79808414, -0.87427038, -0.75454915,
-0.93957286, -0.64571167, -0.84161913, -0.44980422, -0.16682677,
-1.07017784, -0.87427038, -1.0157591 , -1.02664285, -0.98310786,
-0.39538548, -0.27566425, -0.16682677, 0.23587189, 0.89978051,
1.34601416, 0.4426631 , 0.30117438, 0.23587189, -0.14505928,
-0.35185049, -0.27566425, -0.64571167, -0.88515413, -0.34096674,
-0.30831549, -0.22124551, 0.26852314, 0.09438317, -0.39538548,
1.4004329 , 0.23587189, 0.98685049, 0.08349942, -0.31919924,
-0.41715297, -0.71101416, -0.0362218 , 0.26852314, 1.13922296,
0.10526692, -0.34096674, -0.0579893 , -0.55864169, -0.17771052,
0.18145315, -0.09064054, 0.02908069, 0.09438317, 0.17056941,
-0.12329178, -0.27566425, -0.18859427, -0.14505928, -0.24301301,
0.59503557, 0.14880191, 0.24675564, 0.03996443, 0.14880191,
0.4426631 , -0.00357056, -0.0362218 , 0.11615067, 0.6712218 ,
0.00731319, -0.0579893 , 0.03996443, 0.26852314, -0.21036176,
0.63857056, -0.12329178, 1.75959658, 2.31466771, 1.16099045,
0.54061683, 0.43177935, -0.42803672, -0.35185049, -0.2647805 ,
-0.33008299, -0.33008299, -0.23212926, -0.29743175, -0.34096674,
-0.09064054, 0.02908069, -0.40626922, -0.41715297, -0.43892047,
-0.46068796, -0.14505928, -0.36273423, -0.23212926, -0.35185049,
-0.0579893 , -0.24301301, -0.22124551, -0.56952544, -0.40626922,
-0.12329178, -0.7436654 , -0.68924667, -0.49333921, -0.89603787,
-0.36273423, -0.31919924, 0.05084818, -0.44980422, -0.75454915,
-0.48245546, -0.55864169, -0.59129294, -1.00487535, -0.5151067 ,
-0.92868912, -0.88515413, -0.9939916 , -0.75454915, -1.16813157,
-0.95045661, -0.75454915, -0.86338663, -0.5151067 , -0.77631665,
-0.11240804, -0.31919924, -0.7872004 , -0.34096674, -0.60217668,
-0.75454915, -1.02664285, 2.04257402, 0.1923369 , 0.08349942,
0.48619809, 2.98946007, 2.98946007, 2.98946007, 0.01819694,
0.26852314, 2.98946007, 0.13791816, 0.13791816, -0.02533805,
-0.55864169, -0.37361798, 0.06173193, 0.11615067, 0.00731319,
0.74740804, 0.07261568, 0.22498815, 0.80182678, 1.59634036,
1.8793178 , 1.48750288, 1.6725266 , 1.08480422, 0.4208956 ,
0.76917553, 2.98946007, 1.03038548, 0.79094303, 1.34601416,
1.57457287, 0.86712926, 1.50927038, 0.93243175, 0.71475679,
2.98946007, 1.1718742 , 0.84536177, 1.31336292, 1.34601416,
1.12833921, 0.17056941, 2.1514115 , 2.82620386, 2.98946007,
0.00731319, 0.20322065, -0.00357056, 0.20322065, -0.27566425,
-0.09064054, -0.35185049, -0.01445431, 0.60591932, 0.12703442,
0.26852314, 0.08349942, 0.6712218 , -0.11240804, 0.05084818,
0.45354685, -0.09064054, 0.54061683, 0.82359427, 2.42350519,
2.98946007, 1.63987535, 0.98685049, 2.6302964 , 0.97596674,
0.1923369 , 0.99773424, 2.08610901, 2.80443636, 0.70387305,
0.15968566, 0.27940688, 0.97596674, 0.12703442, 0.08349942,
-0.0579893 , -0.2647805 , -0.0362218 , 0.12703442, -0.5368742 ,
-0.43892047, 0.1923369 , -0.22124551, 0.2141044 , 0.39912811,
0.20322065, 0.24675564, 0.76917553, 2.20583024, -0.06887304,
-0.17771052, 2.33643521, 2.98946007, 1.46573539, 0.82359427,
1.22629294, 2.23848148, 2.8588551 , 0.921548 , 1.52015413,
0.02908069, 0.88889676, 2.98946007, 2.28201647, -0.19947802,
-0.15594303, 0.29029063, 0.20322065, 1.37866541, 1.07392047,
1.03038548, 1.16099045, 1.1501067 , 0.71475679, 1.36778166,
2.48880768, 1.4004329 , 2.55411016, 2.98946007, 1.05215297,
-0.0579893 , -0.2647805 , 0.07261568, -0.02533805, 0.24675564,
0.64945431, 1.60722411, 0.58415182, 0.14880191, -0.09064054,
0.66033806, 0.49708184, -0.24301301, -0.00357056, 0.70387305,
0.24675564, -0.0579893 , 0.4208956 , 1.1501067 , 1.47661914,
0.63857056, 1.18275795, 0.61680306, 0.02908069, -0.24301301,
-0.70013041, -0.04710555, -0.34096674, -0.10152429, 0.13791816,
-0.68924667, -0.5151067 , -0.29743175, 0.06173193, -0.16682677,
0.13791816, 0.06173193, -0.23212926, -0.43892047, 0.26852314,
0.22498815, 0.05084818, -0.0362218 , -0.35185049, 0.00731319,
-0.29743175, -0.59129294, -0.34096674, -0.0362218 , -0.19947802,
-0.15594303, -0.33008299, -0.43892047, -0.21036176, -0.38450173,
-0.41715297, 1.10657171, -0.65659542, 0.14880191, 0.9433155 ,
-0.54775795, -0.58040919, 0.06173193, 0.2141044 , 0.4426631 ,
0.03996443, 0.17056941, -0.42803672, 0.82359427, -0.47157171,
-0.21036176, -0.5151067 , -0.09064054, 0.01819694, 0.00731319,
0.26852314, -0.286548 , -0.18859427, -0.62394418, -0.06887304,
0.54061683, -0.06887304, 0.06173193, 2.98946007, 2.98946007,
2.98946007, 2.98946007, 2.98946007, -0.95045661, -0.95045661,
-0.81985164, -0.93957286, -1.00487535, -1.02664285, -1.34227153,
-1.32050404, -1.2660853 , -1.22255031, -1.11371283, -1.494644 ,
-1.66878396, -1.30962029, -1.64701647, -1.34227153, -1.20078281,
-0.80896789, 0.07261568, -1.39669027, -0.95045661, -1.07017784,
-1.02664285, -1.09194533, -1.52729524, -1.90822641, -1.76673769,
-1.84292393, -1.66878396, -1.13548032, -1.54906274, -1.52729524,
-1.90822641, -1.15724782, 0.58415182, -0.58040919, 0.54061683,
-0.81985164, -0.58040919, -0.50422295, -0.67836292, -1.69055146,
-1.66878396, -1.63613272, -1.32050404, -1.494644 , -1.53817899,
-0.63482793, -0.90692162, -0.18859427, -0.9939916 , -1.17901531,
-1.54906274, -1.34227153, -1.2660853 , -1.25520155, -1.41845777,
-0.87427038, -0.91780537, -0.70013041, -0.89603787, -1.17901531,
-0.9939916 , -1.40757402, -1.50552775, -1.53817899, -1.05929409,
-1.30962029, -0.59129294, -0.44980422, -0.77631665, -1.27696904,
-1.16813157, -0.83073539, -1.08106158, -0.91780537, -1.03752659,
-0.9939916 , -0.79808414, -0.70013041, -0.5151067 , -0.83073539,
-0.91780537, -1.07017784, -0.98310786, -0.83073539, -0.27566425,
-0.66747917, -0.52599045, -0.33008299, -0.25389676, -0.12329178,
-0.286548 , -0.38450173, -0.37361798, -0.37361798, -0.2647805 ,
-0.286548 , -0.31919924, 0.07261568, 0.79094303, -0.95045661,
-1.00487535, -0.63482793, -1.14636407, -0.86338663, -0.12329178,
0.05084818, 0.12703442, 0.26852314, -0.07975679, -0.21036176,
-0.14505928, -0.37361798, -0.21036176, -0.79808414, -1.69055146,
-1.57083023, -0.97222411, -0.2647805 , -0.07975679, 0.2141044 ,
0.06173193, -0.30831549, -0.46068796, -0.14505928, -0.54775795,
-0.62394418, -0.01445431, -0.21036176, 0.14880191, -0.0579893 ,
-1.15724782])
在多个数据集上使用相同的标准化变换
In [16]:

1

from sklearn import datasets

2

from sklearn import preprocessing

3

import pandas as pd

4

boston = datasets.load_boston()

5

boston_df = pd.DataFrame(boston.data, columns = boston.feature_names)

6
std = preprocessing.StandardScaler()
7
std.fit(boston_df)
8
std.mean_,std.scale_ #fit之后求出了原始数据每列的均值和标准差
Out[16]:
(array([3.61352356e+00, 1.13636364e+01, 1.11367787e+01, 6.91699605e-02,
5.54695059e-01, 6.28463439e+00, 6.85749012e+01, 3.79504269e+00,
9.54940711e+00, 4.08237154e+02, 1.84555336e+01, 3.56674032e+02,
1.26530632e+01]),
array([8.59304135e+00, 2.32993957e+01, 6.85357058e+00, 2.53742935e-01,
1.15763115e-01, 7.01922514e-01, 2.81210326e+01, 2.10362836e+00,
8.69865112e+00, 1.68370495e+02, 2.16280519e+00, 9.12046075e+01,
7.13400164e+00]))
In [18]:

1
std.transform(boston_df)
Out[18]:
array([[-0.41978194, 0.28482986, -1.2879095 , …, -1.45900038,
0.44105193, -1.0755623 ],
[-0.41733926, -0.48772236, -0.59338101, …, -0.30309415,
0.44105193, -0.49243937],
[-0.41734159, -0.48772236, -0.59338101, …, -0.30309415,
0.39642699, -1.2087274 ],
…,
[-0.41344658, -0.48772236, 0.11573841, …, 1.17646583,
0.44105193, -0.98304761],
[-0.40776407, -0.48772236, 0.11573841, …, 1.17646583,
0.4032249 , -0.86530163],
[-0.41500016, -0.48772236, 0.11573841, …, 1.17646583,
0.44105193, -0.66905833]])
In [20]:

1
std.transform(boston_df[:3])
Out[20]:
array([[-0.41978194, 0.28482986, -1.2879095 , -0.27259857, -0.14421743,
0.41367189, -0.12001342, 0.1402136 , -0.98284286, -0.66660821,
-1.45900038, 0.44105193, -1.0755623 ],
[-0.41733926, -0.48772236, -0.59338101, -0.27259857, -0.74026221,
0.19427445, 0.36716642, 0.55715988, -0.8678825 , -0.98732948,
-0.30309415, 0.44105193, -0.49243937],
[-0.41734159, -0.48772236, -0.59338101, -0.27259857, -0.74026221,
1.28271368, -0.26581176, 0.55715988, -0.8678825 , -0.98732948,
-0.30309415, 0.39642699, -1.2087274 ]])
将特征变量缩放至特定范围
In [21]:

1
scaler = preprocessing.MinMaxScaler((1,10))
2
scaler.fit_transform(boston_df)
Out[21]:
array([[ 1. , 2.62 , 1.61033724, …, 3.58510638,
10. , 1.80711921],
[ 1.0021233 , 1. , 3.18071848, …, 5.9787234 ,
10. , 2.84023179],
[ 1.00212128, 1. , 3.18071848, …, 5.9787234 ,
9.90763528, 1.57119205],
…,
[ 1.00550703, 1. , 4.78409091, …, 9.04255319,
10. , 1.97102649],
[ 1.01044657, 1. , 4.78409091, …, 9.04255319,
9.92170558, 2.17963576],
[ 1.00415658, 1. , 4.78409091, …, 9.04255319,
10. , 2.52731788]])
In [23]:

1
scaler_1 = preprocessing.MaxAbsScaler()
2
scaler_1.fit_transform(boston_df)
Out[23]:
array([[7.10302306e-05, 1.80000000e-01, 8.32732516e-02, …,
6.95454545e-01, 1.00000000e+00, 1.31156176e-01],
[3.06936012e-04, 0.00000000e+00, 2.54866619e-01, …,
8.09090909e-01, 1.00000000e+00, 2.40716355e-01],
[3.06711233e-04, 0.00000000e+00, 2.54866619e-01, …,
8.09090909e-01, 9.89745528e-01, 1.06136423e-01],
…,
[6.82879242e-04, 0.00000000e+00, 4.30064888e-01, …,
9.54545455e-01, 1.00000000e+00, 1.48538320e-01],
[1.23167768e-03, 0.00000000e+00, 4.30064888e-01, …,
9.54545455e-01, 9.91307634e-01, 1.70661048e-01],
[5.32839119e-04, 0.00000000e+00, 4.30064888e-01, …,
9.54545455e-01, 1.00000000e+00, 2.07532262e-01]])
In [24]:

1
help(preprocessing.MaxAbsScaler)
Help on class MaxAbsScaler in module sklearn.preprocessing.data:

class MaxAbsScaler(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin)
| MaxAbsScaler(copy=True)
|
| Scale each feature by its maximum absolute value.
|
| This estimator scales and translates each feature individually such
| that the maximal absolute value of each feature in the
| training set will be 1.0. It does not shift/center the data, and
| thus does not destroy any sparsity.
|
| This scaler can also be applied to sparse CSR or CSC matrices.
|
| … versionadded:: 0.17
|

Parameters
copy : boolean, optional, default is True
Set to False to perform inplace scaling and avoid a copy (if the input
is already a numpy array).
Attributes
----------
scale_ : ndarray, shape (n_features,)
Per feature relative scaling of the data.
… versionadded:: 0.17
scale_ attribute.
max_abs_ : ndarray, shape (n_features,)
Per feature maximum absolute value.
n_samples_seen_ : int
The number of samples processed by the estimator. Will be reset on
new calls to fit, but increments across partial_fit calls.
Examples
--------
>>> from sklearn.preprocessing import MaxAbsScaler
>>> X = [[ 1., -1., 2.],
… [ 2., 0., 0.],
… [ 0., 1., -1.]]
>>> transformer = MaxAbsScaler().fit(X)
>>> transformer
MaxAbsScaler(copy=True)
>>> transformer.transform(X)
array([[ 0.5, -1. , 1. ],
[ 1. , 0. , 0. ],
[ 0. , 1. , -0.5]])
See also
--------
maxabs_scale: Equivalent function without the estimator API.
Notes
-----
NaNs are treated as missing values: disregarded in fit, and maintained in
transform.
For a comparison of the different scalers, transformers, and normalizers,
see :ref:`examples/preprocessing/plot_all_scaling.py
`.
Method resolution order:
MaxAbsScaler
sklearn.base.BaseEstimator
sklearn.base.TransformerMixin
builtins.object
Methods defined here:
init(self, copy=True)
Initialize self. See help(type(self)) for accurate signature.
fit(self, X, y=None)
Compute the maximum absolute value to be used for later scaling.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data used to compute the per-feature minimum and maximum
used for later scaling along the features axis.
inverse_transform(self, X)
Scale back the data to the original representation
Parameters
----------
X : {array-like, sparse matrix}
The data that should be transformed back.
partial_fit(self, X, y=None)
Online computation of max absolute value of X for later scaling.
All of X is processed as a single batch. This is intended for cases
when fit is not feasible due to very large number of n_samples
or because X is read from a continuous stream.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data used to compute the mean and standard deviation
used for later scaling along the features axis.
y
Ignored
transform(self, X)
Scale the data
Parameters
----------
X : {array-like, sparse matrix}
The data that should be scaled.
----------------------------------------------------------------------
Methods inherited from sklearn.base.BaseEstimator:
getstate(self)
repr(self)
Return repr(self).
setstate(self, state)
get_params(self, deep=True)
Get parameters for this estimator.
Parameters
----------
deep : boolean, optional
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
set_params(self, **params)
Set the parameters of this estimator.
The method works on simple estimators as well as on nested objects
(such as pipelines). The latter have parameters of the form
__ so that it’s possible to update each
component of a nested object.
Returns
-------
self
----------------------------------------------------------------------
Data descriptors inherited from sklearn.base.BaseEstimator:
dict
dictionary for instance variables (if defined)
weakref
list of weak references to the object (if defined)
----------------------------------------------------------------------
Methods inherited from sklearn.base.TransformerMixin:
fit_transform(self, X, y=None, **fit_params)
Fit to data, then transform it.
Fits transformer to X and y with optional parameters fit_params
and returns a transformed version of X.
Parameters
----------
X : numpy array of shape [n_samples, n_features]
Training set.
y : numpy array of shape [n_samples]
Target values.
Returns
-------
X_new : numpy array of shape [n_samples, n_features_new]
Transformed array.

数据的正则化
In [31]:

1
x = [[-1,-1,2]]
2
x_normalized = preprocessing.normalize(x,norm=‘l2’,return_norm = True)
3
x_normalized
Out[31]:
(array([[-0.40824829, -0.40824829, 0.81649658]]), array([2.44948974]))
In [32]:

1
-1/2.44948974
Out[32]:
-0.408248290927726
考虑异常值的标准化方法
In [33]:

1
preprocessing.robust_scale(boston_df)
Out[33]:
array([[-0.06959315, 1.44 , -0.57164988, …, -1.33928571,
0.26190191, -0.63768116],
[-0.06375455, 0. , -0.20294345, …, -0.44642857,
0.26190191, -0.22188906],
[-0.06376011, 0. , -0.20294345, …, -0.44642857,
0.06667466, -0.73263368],
…,
[-0.05445006, 0. , 0.17350891, …, 0.69642857,
0.26190191, -0.57171414],
[-0.04086745, 0. , 0.17350891, …, 0.69642857,
0.09641444, -0.48775612],
[-0.05816351, 0. , 0.17350891, …, 0.69642857,
0.26190191, -0.34782609]])
In [34]:

1
rscaler = preprocessing.RobustScaler()
2
rs = rscaler.fit_transform(boston_df)
3
rs
Out[34]:
array([[-0.06959315, 1.44 , -0.57164988, …, -1.33928571,
0.26190191, -0.63768116],
[-0.06375455, 0. , -0.20294345, …, -0.44642857,
0.26190191, -0.22188906],
[-0.06376011, 0. , -0.20294345, …, -0.44642857,
0.06667466, -0.73263368],
…,
[-0.05445006, 0. , 0.17350891, …, 0.69642857,
0.26190191, -0.57171414],
[-0.04086745, 0. , 0.17350891, …, 0.69642857,
0.09641444, -0.48775612],
[-0.05816351, 0. , 0.17350891, …, 0.69642857,
0.26190191, -0.34782609]])
In [35]:

1
rscaler_1 = preprocessing.RobustScaler()
2
rs_1 = rscaler_1.fit(boston_df)
3
rs_1_trans = rs_1.transform(boston_df)
4
rs_1_trans
Out[35]:
array([[-0.06959315, 1.44 , -0.57164988, …, -1.33928571,
0.26190191, -0.63768116],
[-0.06375455, 0. , -0.20294345, …, -0.44642857,
0.26190191, -0.22188906],
[-0.06376011, 0. , -0.20294345, …, -0.44642857,
0.06667466, -0.73263368],
…,
[-0.05445006, 0. , 0.17350891, …, 0.69642857,
0.26190191, -0.57171414],
[-0.04086745, 0. , 0.17350891, …, 0.69642857,
0.09641444, -0.48775612],
[-0.05816351, 0. , 0.17350891, …, 0.69642857,
0.26190191, -0.34782609]])
In [36]:

1
np.median(rs,axis = 0)
Out[36]:
array([-7.69783542e-18, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 7.19910243e-17,
0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
8.86877377e-17])
In [37]:

1
rs.mean(axis = 0)
Out[37]:
array([ 0.93379097, 0.90909091, 0.11206651, 0.06916996, 0.09540034,
0.10316313, -0.1819592 , 0.1902672 , 0.22747036, 0.20216319,
-0.21230943, -1.66763249, 0.1292417 ])
In [38]:

1
rs.std(axis = 1)
Out[38]:
array([0.6250073 , 0.27406231, 0.4942495 , 0.5359548 , 0.5391737 ,
0.39422312, 0.53151563, 0.57764139, 0.78034348, 0.59313259,
0.59963953, 0.55749395, 0.58147107, 0.29536587, 0.28148001,
0.3107368 , 0.39619017, 0.25453891, 1.35164698, 0.27894161,
0.4757345 , 0.23810586, 0.27397875, 0.35107135, 0.27853045,
1.19748192, 0.3610289 , 1.15548503, 0.24480698, 0.30982337,
0.60406476, 0.32999804, 2.13820423, 0.56432822, 1.90344892,
0.16264113, 0.21997263, 0.26594417, 0.28145386, 1.71599107,
1.74898854, 0.56623875, 0.48057288, 0.48389133, 0.34941025,
0.38079466, 0.38799294, 0.37623785, 0.70294873, 0.43227149,
0.67449372, 0.64772504, 0.74577359, 0.72434851, 1.67635852,
2.08707831, 1.9660246 , 2.29956181, 0.77072363, 0.69851182,
0.69583066, 0.68803271, 0.68226695, 0.7477414 , 0.79423182,
1.86485303, 1.848483 , 0.58110748, 0.57107742, 0.54795931,
0.49444086, 0.45033393, 0.478743 , 0.4983928 , 0.45695926,
0.30215596, 0.30494357, 0.26497459, 0.28810974, 0.34041125,
0.72050802, 0.64516003, 0.69138624, 0.65721231, 0.30128566,
0.31388083, 0.28329128, 0.20833072, 0.40748642, 0.42457997,
0.20023486, 0.20492822, 0.66853925, 0.72282276, 0.63657392,
0.4839031 , 0.19852923, 0.76849986, 0.72036445, 0.54291281,
0.27038376, 0.30566938, 4.12119195, 0.20617042, 0.212292 ,
0.3066183 , 0.33036182, 0.23736022, 0.23658002, 0.2285757 ,
0.2403253 , 0.24995715, 0.25893945, 0.25271526, 0.17636961,
0.63861823, 0.15696516, 0.17469946, 0.70328819, 0.24273257,
0.40938211, 0.43596626, 0.47290033, 0.63509994, 0.47541469,
0.41457988, 0.76747005, 0.44637043, 0.35401466, 0.45493103,
0.34009305, 0.32714014, 0.35926131, 0.39390445, 1.75486828,
0.34597674, 0.44900734, 0.35180572, 0.46417663, 0.38829161,
0.47983725, 0.85191357, 0.94257813, 0.91942654, 1.0285824 ,
2.98953922, 2.97997942, 1.02025522, 1.07098373, 0.99473943,
0.78787274, 1.03689994, 1.10243347, 1.85878022, 1.2298443 ,
4.02562449, 3.98143561, 0.72229372, 0.72226937, 0.84125768,
0.91606672, 0.79089379, 0.86890361, 1.00261278, 0.54766702,
2.01608867, 0.91186535, 2.15156213, 1.33113307, 0.9662058 ,
1.37603289, 0.78280078, 0.36636316, 0.2857016 , 0.25230178,
0.37420171, 0.28116713, 0.28581057, 0.37660792, 0.41636884,
0.64452272, 0.20866875, 0.47028858, 0.32348198, 0.30001692,
0.19993432, 0.69301347, 1.13578001, 1.15924713, 1.18166486,
1.22944421, 1.18665241, 1.23668073, 1.49873833, 1.49701335,
1.9706335 , 1.98795165, 2.04026818, 1.97580171, 2.17689783,
2.19513602, 1.91612325, 2.00250411, 2.24113904, 2.26395239,
0.33856042, 0.20915168, 0.28578157, 0.36863132, 0.55802477,
0.35206852, 0.54813367, 0.38221237, 0.28066879, 0.87427253,
0.2222928 , 0.434169 , 0.33214393, 0.45415755, 0.40507993,
0.43821954, 0.43447338, 0.41835348, 0.27771175, 0.82469151,
0.98625965, 0.75204072, 0.49581849, 0.72502306, 0.41693548,
0.2450949 , 0.55906694, 0.86100388, 0.8338093 , 0.57545554,
0.25050069, 0.38474369, 0.51575744, 0.88349938, 0.83596057,
0.82750012, 0.78840625, 0.86298705, 0.94659627, 0.75516843,
0.72291721, 0.71971982, 0.69945933, 0.72357286, 0.77652525,
0.76211323, 0.8103469 , 0.90436444, 1.12007407, 1.886782 ,
1.90133522, 2.10147499, 1.2288562 , 0.91303525, 0.83899133,
0.86887491, 0.93406886, 1.14148487, 0.89091219, 0.87965417,
0.81209909, 0.85134674, 1.10397787, 0.93385438, 0.56609773,
0.54332584, 0.61253962, 0.51427943, 0.78377935, 1.02491311,
0.99438303, 1.03673509, 1.03832884, 0.96486621, 0.75046008,
0.92277535, 0.78101668, 0.96408804, 2.20750402, 2.09506688,
1.3826988 , 1.99814086, 1.28879589, 1.26855499, 1.33201038,
1.81172345, 1.82176036, 1.80612719, 0.51558981, 0.45225391,
0.55148129, 0.48179379, 0.50840565, 1.74847798, 1.77474092,
1.6899325 , 0.88945194, 0.93315462, 0.9796187 , 0.88205576,
0.78457209, 0.88300766, 0.80071261, 0.26965284, 0.12889189,
0.7095418 , 0.21956544, 0.1423705 , 0.12393796, 0.17926198,
0.21907966, 0.24463754, 0.24173916, 0.12918314, 0.16748611,
0.26610283, 0.25874332, 0.26599394, 0.25266674, 0.32420382,
0.45524609, 0.38834719, 0.31040665, 0.42719201, 0.48651851,
0.50098723, 0.94427743, 1.01468071, 0.43469076, 0.4244949 ,
0.40783089, 0.33772011, 0.32006394, 0.33155758, 0.30743078,
0.27748622, 1.04536391, 0.48346418, 1.24717651, 1.3170771 ,
0.50906541, 0.60845741, 1.93550489, 1.85386447, 1.098498 ,
1.06033839, 1.54817362, 1.535506 , 2.15463652, 1.92502634,
1.93509629, 0.75938988, 0.46314744, 0.52607213, 0.5045664 ,
0.60947257, 0.78911698, 0.67272985, 0.83168021, 1.16314297,
1.27493504, 1.25629146, 3.68864703, 0.81816452, 0.68605351,
0.68599906, 0.85395546, 0.99593075, 1.14821323, 1.72158341,
1.40721649, 1.22554118, 0.73287797, 1.68319119, 1.28359063,
6.47441049, 1.12219801, 0.81372558, 0.76700643, 2.2977925 ,
1.35177371, 1.92140177, 1.77064477, 1.33025454, 0.77070638,
0.63350344, 0.57858122, 1.04804591, 0.6607969 , 0.98797796,
0.65904852, 0.50790774, 0.68153356, 2.80589763, 1.21100991,
1.80467401, 1.01613152, 0.7926916 , 1.83566752, 3.22446213,
4.96497849, 1.81681038, 1.29944623, 1.3287871 , 3.07919634,
6.51111135, 4.84827013, 5.08263124, 3.31345166, 5.45287635,
5.05966346, 4.98422007, 4.09778346, 7.54409925, 4.65580759,
1.35476973, 1.19990792, 1.68518822, 5.139426 , 5.11695169,
5.23118162, 4.90485247, 5.74958636, 3.95239094, 4.45283819,
4.11910474, 4.19281604, 3.89387781, 3.88573755, 4.01923697,
3.87818595, 4.95430547, 5.21573787, 4.46795049, 0.80887159,
1.60645779, 0.74786654, 0.51305739, 0.75672494, 2.32369441,
4.70692534, 1.20337528, 0.75206395, 0.68903768, 1.39241359,
5.18109035, 0.75736492, 0.49736756, 0.74993498, 5.13882241,
4.509815 , 5.01559827, 5.13680213, 1.76586469, 0.55125926,
1.92846635, 0.40825207, 0.52389712, 0.4945897 , 0.60413815,
0.94505459, 4.84620829, 0.99331253, 1.21443162, 1.00071036,
0.39117789, 0.38563394, 0.36593103, 0.57361057, 0.94048174,
1.38529272, 0.41749526, 1.37535177, 0.79393376, 1.05811618,
0.5018133 , 0.51573363, 0.5509982 , 0.50618106, 0.56645226,
0.44026666, 0.46128725, 0.52020229, 0.58608846, 0.93961439,
1.2862877 , 0.50246282, 0.47117786, 0.26479232, 0.26832973,
0.3926689 , 0.4267789 , 0.21234863, 0.17263614, 0.29227129,
0.1621952 , 0.27370941, 0.25401684, 0.40652786, 0.34383714,
0.26633386])
缺失值的填充
In [40]:

1
imp = preprocessing.Imputer()
2
imp.fit([[1,2],[np.nan,3],[7,6]])
3
imp.statistics_
C:\Users\admin\Anaconda3\lib\site-packages\sklearn\utils\deprecation.py:58: DeprecationWarning: Class Imputer is deprecated; Imputer was deprecated in version 0.20 and will be removed in 0.22. Import impute.SimpleImputer from sklearn instead.
warnings.warn(msg, category=DeprecationWarning)
Out[40]:
array([4. , 3.66666667])
In [41]:

1
imp.transform([[np.nan,2],[6,np.nan]])
Out[41]:
array([[4. , 2. ],
[6. , 3.66666667]])
In [43]:

1
from sklearn import impute
2
imp_1 = impute.SimpleImputer()
3
imp_1.fit([[1,2],[np.nan,3],[7,6]])
4
imp_1.statistics_
Out[43]:
array([4. , 3.66666667])
In [44]:

1
imp_1.transform([[np.nan,2],[6,np.nan]])
Out[44]:
array([[4. , 2. ],
[6. , 3.66666667]])
生成多项式特征(交互项)
In [45]:

1
poly = preprocessing.PolynomialFeatures(interaction_only=True)
2
poly_res = poly.fit_transform(boston_df.iloc[:,[0,1,2,3]])
3
poly_res[:1]
Out[45]:
array([[1.00000e+00, 6.32000e-03, 1.80000e+01, 2.31000e+00, 0.00000e+00,
1.13760e-01, 1.45992e-02, 0.00000e+00, 4.15800e+01, 0.00000e+00,
0.00000e+00]])
In [46]:

1
#包含cons,0,1,2,3,01,02,03,12,13,23共11列
In [47]:

1
poly = preprocessing.PolynomialFeatures(interaction_only=True)
2
poly_res = poly.fit_transform(boston_df)
3
poly_res[:1]
Out[47]:
array([[1.0000000e+00, 6.3200000e-03, 1.8000000e+01, 2.3100000e+00,
0.0000000e+00, 5.3800000e-01, 6.5750000e+00, 6.5200000e+01,
4.0900000e+00, 1.0000000e+00, 2.9600000e+02, 1.5300000e+01,
3.9690000e+02, 4.9800000e+00, 1.1376000e-01, 1.4599200e-02,
0.0000000e+00, 3.4001600e-03, 4.1554000e-02, 4.1206400e-01,
2.5848800e-02, 6.3200000e-03, 1.8707200e+00, 9.6696000e-02,
2.5084080e+00, 3.1473600e-02, 4.1580000e+01, 0.0000000e+00,
9.6840000e+00, 1.1835000e+02, 1.1736000e+03, 7.3620000e+01,
1.8000000e+01, 5.3280000e+03, 2.7540000e+02, 7.1442000e+03,
8.9640000e+01, 0.0000000e+00, 1.2427800e+00, 1.5188250e+01,
1.5061200e+02, 9.4479000e+00, 2.3100000e+00, 6.8376000e+02,
3.5343000e+01, 9.1683900e+02, 1.1503800e+01, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
3.5373500e+00, 3.5077600e+01, 2.2004200e+00, 5.3800000e-01,
1.5924800e+02, 8.2314000e+00, 2.1353220e+02, 2.6792400e+00,
4.2869000e+02, 2.6891750e+01, 6.5750000e+00, 1.9462000e+03,
1.0059750e+02, 2.6096175e+03, 3.2743500e+01, 2.6666800e+02,
6.5200000e+01, 1.9299200e+04, 9.9756000e+02, 2.5877880e+04,
3.2469600e+02, 4.0900000e+00, 1.2106400e+03, 6.2577000e+01,
1.6233210e+03, 2.0368200e+01, 2.9600000e+02, 1.5300000e+01,
3.9690000e+02, 4.9800000e+00, 4.5288000e+03, 1.1748240e+05,
1.4740800e+03, 6.0725700e+03, 7.6194000e+01, 1.9765620e+03]])
自定义转换器
In [48]:

1
trans = preprocessing.FunctionTransformer(np.sqrt)
2
x = np.array([[1,2],[3,4]])
3
trans.transform(x)
C:\Users\admin\Anaconda3\lib\site-packages\sklearn\preprocessing_function_transformer.py:98: FutureWarning: The default validate=True will be replaced by validate=False in 0.22.
“validate=False in 0.22.”, FutureWarning)
Out[48]:
array([[1. , 1.41421356],
[1.73205081, 2. ]])
移除变异度明显过低的特征
In [52]:

1
from sklearn import feature_selection
2
x = [[0,2,0,3],
3
[0,1,4,3],
4
[0,1,1,3]]
5
selec = feature_selection.VarianceThreshold()
6
selec.fit(x)
7
selec.variances_
Out[52]:
array([0. , 0.22222222, 2.88888889, 0. ])
In [53]:

1
selec.transform(x)
Out[53]:
array([[2, 0],
[1, 4],
[1, 1]])
基于单变量检验进行筛选
In [54]:

1
from sklearn import feature_selection
2
sele = feature_selection.SelectKBest(feature_selection.f_regression,k = 2)
3
sele.fit_transform(boston.data,boston.target)
Out[54]:
array([[6.575, 4.98 ],
[6.421, 9.14 ],
[7.185, 4.03 ],
…,
[6.976, 5.64 ],
[6.794, 6.48 ],
[6.03 , 7.88 ]])
In [55]:

1
sele.pvalues_
Out[55]:
array([1.17398708e-19, 5.71358415e-17, 4.90025998e-31, 7.39062317e-05,
7.06504159e-24, 2.48722887e-74, 1.56998221e-18, 1.20661173e-08,
5.46593257e-19, 5.63773363e-29, 1.60950948e-34, 1.31811273e-14,
5.08110339e-88])
In [56]:

1
sele.scores_
Out[56]:
array([ 89.48611476, 75.2576423 , 153.95488314, 15.97151242,
112.59148028, 471.84673988, 83.47745922, 33.57957033,
85.91427767, 141.76135658, 175.10554288, 63.05422911,
601.61787111])
基于检验误差进行筛选
In [57]:

1
sel_fpr = feature_selection.SelectFpr(feature_selection.f_regression,alpha = 10e-10).fit(boston.data,boston.target)
2
sel_fpr.pvalues_
Out[57]:
array([1.17398708e-19, 5.71358415e-17, 4.90025998e-31, 7.39062317e-05,
7.06504159e-24, 2.48722887e-74, 1.56998221e-18, 1.20661173e-08,
5.46593257e-19, 5.63773363e-29, 1.60950948e-34, 1.31811273e-14,
5.08110339e-88])
In [58]:

1
sel_fpr.transform(boston.data)[:1]
Out[58]:
array([[6.320e-03, 1.800e+01, 2.310e+00, 5.380e-01, 6.575e+00, 6.520e+01,
1.000e+00, 2.960e+02, 1.530e+01, 3.969e+02, 4.980e+00]])
统计量用于特征筛选的通用框架
主成分分析 用PCA

你可能感兴趣的:(Python之旅)