在推荐系统中协同过滤应该算是大名鼎鼎了,基本上做推荐的线上都会用协同过滤,比较简单而且效果较好,而协同过滤又分为基于用户的和基于物品的,基本上原理就是“与当前用户行为相似的用户喜欢一个物品,那么当前用户也会喜欢这个物品”,或者“物品A和物品B同时都被一个用户群喜欢,那么认为他们相似”。而协同过滤算法主要有两个模型,最邻近点对模型和潜在语义模型,第一个比较常用且为大家熟知,因为就是定义权值计算相似度,主要介绍第二个。
潜在语义模型最典型的就是矩阵分解模型,矩阵分解模型尝试找到一系列潜在向量参数。对每个用户u,找到一个k维向量Wu,对每个资源i,找到一个k维向量Hi。并且假设模型中每个用户u对每个资源i的兴趣为对应的潜在向量Wu和Hi的内积。说白了就是矩阵分解,然后通过奇异值提取特征来填充矩阵,推荐的本质就是根据矩阵中已知量计算未知量的过程。
对于一个矩阵m行n列矩阵M,存在一个分解使得
如果一个矩阵分解后奇异值存在0,则奇异值分解结果不唯一。
对于任意的奇异值分解
矩阵Σ的对角线上的元素等于M的奇异值. U和V的列分别是奇异值中的左、右奇异向量。因此,上述定理表明:
svd分解方法基本上搜了半天没有找到,有人了解可以详细解释下,或者后面找到了补充一下,但是基本上都有现成的库可以用。
这里说一下矩阵分解在推荐中的运用:
下面上一个例子,通过矩阵分解提取主要特征,然后计算物品相似度给某个用户推荐物品:
#coding=UTF-8
from numpy import *
from numpy import linalg as la
def loadExData():
return[[0, 0, 0, 2, 2],
[0, 0, 0, 3, 3],
[0, 0, 0, 1, 1],
[1, 1, 1, 0, 0],
[2, 2, 2, 0, 0],
[5, 5, 5, 0, 0],
[1, 1, 1, 0, 0]]
def loadExData2():
return[[0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 5],
[0, 0, 0, 3, 0, 4, 0, 0, 0, 0, 3],
[0, 0, 0, 0, 4, 0, 0, 1, 0, 4, 0],
[3, 3, 4, 0, 0, 0, 0, 2, 2, 0, 0],
[5, 4, 5, 0, 0, 0, 0, 5, 5, 0, 0],
[0, 0, 0, 0, 5, 0, 1, 0, 0, 5, 0],
[4, 3, 4, 0, 0, 0, 0, 5, 5, 0, 1],
[0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 4],
[0, 0, 0, 2, 0, 2, 5, 0, 0, 1, 2],
[0, 0, 0, 0, 5, 0, 0, 0, 0, 4, 0],
[1, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0]]
def ecludSim(inA,inB):
return 1.0/(1.0 + la.norm(inA - inB))
def pearsSim(inA,inB):
if len(inA) < 3 : return 1.0
return 0.5+0.5*corrcoef(inA, inB, rowvar = 0)[0][1]
def cosSim(inA,inB):
num = float(inA.T*inB)
denom = la.norm(inA)*la.norm(inB)
return 0.5+0.5*(num/denom)
def standEst(dataMat, user, simMeas, item):
n = shape(dataMat)[1]
simTotal = 0.0; ratSimTotal = 0.0
for j in range(n):
userRating = dataMat[user,j]
if userRating == 0: continue
overLap = nonzero(logical_and(dataMat[:,item].A>0, \
dataMat[:,j].A>0))[0]
if len(overLap) == 0: similarity = 0
else: similarity = simMeas(dataMat[overLap,item], \
dataMat[overLap,j])
print 'the %d and %d similarity is: %f' % (item, j, similarity)
simTotal += similarity
ratSimTotal += similarity * userRating
if simTotal == 0: return 0
else: return ratSimTotal/simTotal
def svdEst(dataMat, user, simMeas, item):
n = shape(dataMat)[1]
simTotal = 0.0; ratSimTotal = 0.0
U,Sigma,VT = la.svd(dataMat)
Sig4 = mat(eye(4)*Sigma[:4]) #arrange Sig4 into a diagonal matrix
xformedItems = dataMat.T * U[:,:4] * Sig4.I #create transformed items
Sig = mat(eye(n)*Sigma) #arrange Sig4 into a diagonal matrix
#print Sig
#print U * Sig * VT #back up source mat
#print xformedItems #item feature begin compute item similer
#print "user feature:"
#xformedUsers = dataMat * VT[:,:4] * Sig4
#print xformedUsers
#print xformedUsers * xformedItems.T
#print dataMat
for j in range(n):
userRating = dataMat[user,j]
if userRating == 0 or j==item: continue
similarity = simMeas(xformedItems[item,:].T,\
xformedItems[j,:].T)
print 'the %d and %d similarity is: %f' % (item, j, similarity)
simTotal += similarity
ratSimTotal += similarity * userRating
if simTotal == 0: return 0
else: return ratSimTotal/simTotal
def recommend(dataMat, user, N=3, simMeas=cosSim, estMethod=standEst):
#print 'type', dataMat[:,:4] #the number user line or col
print nonzero(dataMat[user,:].A==0) # to array
unratedItems=nonzero(dataMat[user,:].A==0)[1]
print unratedItems
#unratedItems = nonzero(dataMat[user,:].A==0)[1]#find unrated items
if len(unratedItems) == 0: return 'you rated everything'
itemScores = []
for item in unratedItems:
estimatedScore = estMethod(dataMat, user, simMeas, item)
itemScores.append((item, estimatedScore))
return sorted(itemScores, key=lambda jj: jj[1], reverse=True)[:N]
def printMat(inMat, thresh=0.8):
for i in range(32):
for k in range(32):
if float(inMat[i,k]) > thresh:
print 1,
else: print 0,
print ''
def imgCompress(numSV=3, thresh=0.8):
myl = []
for line in open('0_5.txt').readlines():
newRow = []
for i in range(32):
newRow.append(int(line[i]))
myl.append(newRow)
myMat = mat(myl)
print "****original matrix******"
printMat(myMat, thresh)
U,Sigma,VT = la.svd(myMat)
SigRecon = mat(zeros((numSV, numSV)))
for k in range(numSV):#construct diagonal matrix from vector
SigRecon[k,k] = Sigma[k]
reconMat = U[:,:numSV]*SigRecon*VT[:numSV,:]
print "****reconstructed matrix using %d singular values******" % numSV
printMat(reconMat, thresh)
if __name__ == '__main__':
print "begin"
myData=loadExData2()
myMat=mat(myData)
#myMat = mat(loadExData)
recommend(myMat, 2, 3, cosSim, svdEst)