Python 高德地图爬取地铁站点

#!/usr/bin/python
# coding=utf-8

import requests
import time
import json
import ast
import os
import utils
from lxml import etree

PAGE_URL = 'http://map.amap.com/subway/index.html?&1100'
DATA_URL = 'http://map.amap.com/service/subway?srhdata='
HEADER  = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"}

def fetchAllCity(url, header):
    r = requests.get(url, header)
    html = r.content
    element = etree.HTML(html)
    options = element.xpath("//a[contains(@class, 'city')]")
    
    cities = []
    for option in options:
        city = {
            'id': option.get('id'),
            'name': option.get('cityname'),
            'text': option.text
        }
        cities.append(city)
    return cities

def saveData(citiesData):
    path = './data/'
    if not os.path.exists(path):
        os.mkdir(path)
    for cityData in citiesData:
        f = open(path + cityData['name'] + '.json', 'w')
        # dict to json; json to str
        f.write(str(json.dumps(cityData)))

def parseCityData(citys):
    lw = open('./lwkt.txt', 'w')
    lw.write('wkt' + '\n')
    pw = open('./pwkt.txt', 'w')
    pw.write('wkt' + '\n')
    for city in citys:
        parseCityPointFromApi(city,lw,pw)

def parseCityPointFromApi(city,lw,pw):

    url =  DATA_URL + "{}_drw_{}.json".format(city['id'], city['name'])
    print(url)
    json_str = requests.get(url).text
    res = json.loads(json_str)
    res = res['l']
    for r in res:
        st = r['st']
        coords = []
        for s in st:
            _coords = s.get('sl','').split(',')
            coords.append(_coords)
            pwkt = '"POINT ({} {})"'.format(_coords[0],_coords[1])
            pw.write(pwkt + '\n')
        new_coords = ','.join([ '%s %s'%(coord[0],coord[1]) for coord in coords])
        lwkt = '"LINESTRING(%s)"'%new_coords
        lw.write(lwkt + '\n')

    # #  字符串转json(ast.literal_eval())
    # return eval(r.text.encode('utf-8'))

def main():
    cities = fetchAllCity(PAGE_URL, HEADER)
    print(cities)
    parseCityData(cities)
    # saveData(citiesData)

if __name__ == '__main__':
    main()

你可能感兴趣的:(Python)