import requests,random,time,re,os,time,base64,pymysql
from lxmlimport etree
from fontTools.ttLibimport TTFont
from ioimport BytesIO
print('MySQL数据库正在连接,请稍等。。。。。。')
db= pymysql.connect('localhost','root','root','document',charset="utf8")
print('MySQL数据库已经连接!!!!!!')
cursor= db.cursor()
# sql = """CREATE TABLE TongCheng_58(
# id INT(11) NOT NULL PRIMARY KEY AUTO_INCREMENT COMMENT '自增id' ,
# pic VARCHAR(200) NOT NULL DEFAULT '0' COMMENT '个人房源图片',
# title VARCHAR(50) NOT NULL DEFAULT '0' COMMENT '个人房源标题',
# area VARCHAR(50) NOT NULL DEFAULT '0' COMMENT '个人房源面积户型',
# site VARCHAR (50) NOT NULL DEFAULT '0' COMMENT '个人房源位置',
# owner VARCHAR (10)NOT NULL DEFAULT '0' COMMENT '个人房源户主',
# rent VARCHAR(10) NOT NULL DEFAULT '0' COMMENT '个人房源租金',
# NewTime INT(11) NOT NULL COMMENT '添加时间戳'
# )ENGINE=InnoDB AUTO_INCREMENT=1;"""
# cursor.execute(sql)
def readfile(path):# 读取文件的函数
content= [line.strip()for linein open(path,encoding='utf-8',errors='ignore').readlines()]
return content
def savefile(savepath,content):
fp= open(savepath,'w+',encoding='utf8',newline="",errors='ignore')
fp.write(content+"\n")
fp.flush()
fp.close()
def make_font_file(base64_string: str):
bin_data= base64.decodebytes(base64_string.encode())
with open('text.otf','wb')as f:
f.write(bin_data)
return bin_data
def get_num(string):
ret_list= []
for charin string:
decode_num= ord(char)
num= c[decode_num]
num= int(num[-2:])-1
ret_list.append(num)
return ret_list
proxy_list= readfile("./guoneiip.txt")
type_list= ['驋','龒','龤','閏','麣','鸺','龥','齤','餼','鑶']
for iin range(1,71):
'''
实现增量采集数据'''
arrt_list= set()
db= pymysql.connect('localhost','root','root','document',charset="utf8")
cursor= db.cursor()
sql= '''select pic FROM TongCheng_58;'''
cursor.execute(sql)
detail_list= cursor.fetchall()
for detailin detail_list:
arrt_list.add(detail[0])
url= "https://sh.58.com/chuzu/0/pn%s/?"%i
params= {
"PGTID": "0d200001-0000-2b84-5b0c-434a2838407f",
"ClickID": 1
}
headers= {
'cookie': 'userid360_xml=6FB311888C84FD2806E06F5BFC134B6A; time_create=1548638789422; f=n; commontopbar_new_city_info=2%7C%E4%B8%8A%E6%B5%B7%7Csh; commontopbar_ipcity=zz%7C%E9%83%91%E5%B7%9E%7C0; userid360_xml=6FB311888C84FD2806E06F5BFC134B6A; time_create=1548638826978; id58=lXz2m1wSG4qZ4p2XrOu2bQ==; 58tj_uuid=9c1c17cd-7e6c-4834-9de5-2bc1e69f1a82; wmda_uuid=8738aebd7efa44a76ceff4415086bc68; wmda_new_uuid=1; wmda_visited_projects=%3B2385390625025; als=0; xxzl_deviceid=rUlkmVAAmdK%2BJbB52Ktcl0RRnXXp%2BSBF1%2BF6uJ6ZWImkxwz0RaYbG3wxaklbZLJi; Hm_lvt_dcee4f66df28844222ef0479976aabf1=1546047758; 58home=zz; Hm_lvt_ae019ebe194212c4486d09f377276a77=1546049643; __utma=253535702.374299874.1546049643.1546049643.1546052186.2; __utmz=253535702.1546052186.2.2.utmcsr=sh.58.com|utmccn=(referral)|utmcmd=referral|utmcct=/chuzu/0/; city=sh; XQH=%7B%22w%22%3A%5B%7B%22id%22%3A%221416%22%2C%22t%22%3A1546049642951%7D%2C%7B%22id%22%3A%22640900%22%2C%22t%22%3A1546052186058%7D%2C%7B%22id%22%3A%224457%22%2C%22t%22%3A1546053792454%7D%5D%7D; f=n; wmda_session_id_2385390625025=1546653837624-329993ac-3d85-bbb1; new_uv=9; utm_source=; spm=; init_refer=; new_session=0; ppStore_fingerprint=A535D7FFB241264E2CFD028A12FE5681C96D5C5942516A17%EF%BC%BF1546655183289; defraudName=defraud; xzfzqtoken=NrUwlbhCtDaY1eIbK9LBowg6t1qQyebl%2FwFCjnRhQV67gWcXgodkSuoS7M6hVFecin35brBb%2F%2FeSODvMgkQULA%3D%3D',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
}
proxy= random.choice(proxy_list)
response= requests.get(url,params=params,headers=headers,proxies={"http": "http://"+proxy},timeout=5)
time.sleep(3)
print(response.url)
response.encoding= response.apparent_encoding
print("第 %s 页正常采集!--%s--" % (i,response.encoding))
print("*"*100)
if response.status_code== 200:
response.raise_for_status()
html= response.content.decode("utf-8")
pattern= re.compile(r"'data:application/font-ttf;charset=utf-8;base64,(.*?)'", re.I)
base64_list= pattern.findall(html)
base64_str= "".join(base64_list)
bin_data= make_font_file(base64_str)
font= TTFont(BytesIO(bin_data))
font.saveXML("text.xml")
font= TTFont(BytesIO(make_font_file(base64_str)))
uniList= font['cmap'].tables[0].ttFont.getGlyphOrder()
c= font['cmap'].tables[0].ttFont.tables['cmap'].tables[0].cmap
comment= etree.HTML(html)
page_count= len(comment.xpath('//ul[@class="listUl"]/li'))
for iin range(1, page_count):
xml= '//ul/li[%s]/' % i
pic_list= comment.xpath(xml+ 'div[1]/a/img/@lazy_src')
pic= "".join(pic_list).strip("/ ")
if picnot in arrt_list:
print(pic+ "----------------数据没在数据库里!---------------")
print("*"*100)
title= ''
title_list= comment.xpath(xml+ "div[2]/h2/a[1]/text()")
title_str= "".join(title_list).strip()
for linein title_str:
if linenot in type_list:
title+= line
else:
line= get_num(line)[0]
title+= str(line)
area= ''
area_list= comment.xpath(xml+ "div[2]/p[1]/text()")
area_str= "".join(area_list).strip().replace(" ","")
for linein area_str:
if linenot in type_list:
area+= line
else:
line= get_num(line)[0]
area+= str(line)
site_list= comment.xpath(xml+ "div[2]/p[2]//text()")
site= "".join(site_list).strip().replace("\n",'').replace(" ","")
owner_list= comment.xpath(xml+ "div[2]/p[3]/text()")
owner= "".join(owner_list).strip().lstrip(":")
rent= ''
rent_list= comment.xpath(xml+ "div[3]/div[2]/b/text()")
rent_str= " ".join(rent_list).strip()
for linein rent_str:
if linenot in type_list:
rent+= line
else:
line= get_num(line)[0]
rent+= str(line)
rent= rent+ "元/月"
NewTime= int(time.time())
print(pic)
print(title)
print(area, site)
print(owner, rent)
print("*" * 100)
try:
insert_color= (
"INSERT INTO TongCheng_58(pic,title,area,site,owner,rent,NewTime)" "VALUES (%s,%s,%s,%s,%s,%s,%s)")
data_color= (pic, title, area, site, owner, rent, NewTime)
cursor.execute(insert_color, data_color)
db.commit()
except Exception as ex:
print(Exception,":", ex)
db.rollback()
else:
print(pic+ "================已经在数据库里==============")
print("-"*100)