主要参考了吴恩达老师的UFLDL UFLDL
推导的话,建议先看吴恩达老师的UFLDL和CS229,然后的周志华老师的西瓜书感觉比较简略。Goodfellow的深度学习感觉是从另一个方面展开的,比较好玩。
实现部分的最后使用了sklearn库中的PCA做了简单的验证。效果图都在下面。
比较粗糙,欢迎指正!
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 26 15:49:23 2018
@author: Roc-Ng
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
K_first = 1 # the first k features
'''
bulid our samples
'''
x1 = np.random.rand(30,1)
x2 = x1+np.random.rand(30,1)*0.5
x_r = np.hstack((x1,x2))
'''
decentration
'''
mean_x = np.mean(x_r,0)
x = x_r-mean_x
#var_x = np.mean(x**2,0)
#x = x / np.sqrt(var_x)
plt.plot(x[:,0],x[:,1],'ro')
plt.show()
'''
compute covariance matrix
'''
sima = np.dot(x.T,x)/(np.shape(x)[0])
'''
compute cov matrix's eigenvalue and eigenvector
'''
U,S,V = np.linalg.svd(sima)
'''
compute rotation_x
'''
x_rot = np.dot(x,U)
plt.plot(x_rot[:,0],x_rot[:,1],'bo')
plt.show()
'''
fetch the first k features
'''
x_reduct = x_rot[:,:K_first]
plt.plot(x_reduct,'yo')
plt.show()
'''
use PCA from sklearn library to validate our raw pca's process
'''
pca = PCA(n_components=1)
pca.fit(x)
plt.plot(pca.fit_transform(x),'go')
plt.show()
#x_rec = np.dot(x_rot,U.T)
#plt.plot(x_rec[:,0],x_rec[:,1],'yo')
#plt.show()