根據搜尋內容爬取拉鉤網和招聘網的職位招聘資訊
阿新 • • 發佈:2019-01-29
程式碼:
import requests import time import random ip_list = ['117.135.132.107', '121.8.98.196', '194.116.198.212'] #http請求頭資訊 headers={ 'Accept':'application/json, text/javascript, */*; q=0.01', 'Accept-Encoding':'gzip, deflate, br', 'Accept-Language':'zh-CN,zh;q=0.8', 'Connection':'keep-alive', 'Content-Length':'25', 'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8', 'Cookie':'user_trace_token=20170214020222-9151732d-f216-11e6-acb5-525400f775ce; LGUID=20170214020222-91517b06-f216-11e6-acb5-525400f775ce; JSESSIONID=ABAAABAAAGFABEF53B117A40684BFB6190FCDFF136B2AE8; _putrc=ECA3D429446342E9; login=true; unick=yz; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=0; PRE_UTM=; PRE_HOST=; PRE_SITE=; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2F; TG-TRACK-CODE=index_navigation; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1494688520,1494690499,1496044502,1496048593; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1496061497; _gid=GA1.2.2090691601.1496061497; _gat=1; _ga=GA1.2.1759377285.1487008943; LGSID=20170529203716-8c254049-446b-11e7-947e-5254005c3644; LGRID=20170529203828-b6fc4c8e-446b-11e7-ba7f-525400f775ce; SEARCH_ID=13c3482b5ddc4bb7bfda721bbe6d71c7; index_location_city=%E6%9D%AD%E5%B7%9E', 'Host':'www.lagou.com', 'Origin':'https://www.lagou.com', 'Referer':'https://www.lagou.com/jobs/list_Python?', 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36', 'X-Anit-Forge-Code':'0', 'X-Anit-Forge-Token':'None', 'X-Requested-With':'XMLHttpRequest' } def get_json(url,page,lange_name): #構造一個framdata資料 FramData = {'firts':'true','pn':page,'kd':lange_name} #採用request是post方法,返回requests<200>,訪問成功 JsonDatas = requests.post(url,FramData,headers=headers,proxies={'http': 'http://' + random.choice(ip_list)}).json() #獲取字典資料 #JsonDatas = jsonData.json() return JsonDatas def parser_json(page,JsonDatas):#JsonDatas資料庫型別是字典 #total = int(JsonDatas['content']['positionResult']['totalCount']) companyInfos = [] #獲取招聘資訊的公司,列表型別 companyInfo = JsonDatas['content']['positionResult']['result'] #對每一個公司遍歷 print("正在解析{0}頁招聘資訊".format(page)) for company in companyInfo: #定義一個列表,暫時儲存一個公司資訊 comInfo = [] #公司所在城市 if company['district'] is not None: city = company['city'] + '-' + company['district'] else: city = company['city'] #print(city) comInfo.append(city) # 職位名稱 positionName = company['positionName'] #print(positionName) comInfo.append(positionName) #獲取公司名稱 companyFullName = company['companyFullName']+ '(' + company['companyShortName'] + ')' #print(companyFullName) comInfo.append(companyFullName) #要求學歷 education = company['education'] #print(education) comInfo.append(education) #職位型別 jobNature = company['jobNature'] #print(jobNature) comInfo.append(jobNature) #職位待遇 positionAdvantages = company['positionAdvantage'] positionAdvantage = positionAdvantages.replace(',',';').replace(',',';') #print(positionAdvantage) comInfo.append(positionAdvantage) #工資 salary = company['salary'] #print(salary) comInfo.append(salary) #經驗要求 workYear = company['workYear'] comInfo.append(workYear) #分佈時間 time = company['createTime'] comInfo.append(time) #將每個公司的資訊加入companyInfos中 companyInfos.append(comInfo) print("第{0}頁解析完成".format(page)) return companyInfos def writeCSV(page,fw,companyInfos): for companyInfo in companyInfos: #print(companyInfo) fw.write(",".join(companyInfo)+'\n') print("第{0}頁資料寫入完畢".format(page)) def main(): path = 'F:' # 檔案儲存路徑 start_page = 1 end_page = 20 #預設 lange_name = input("請輸入要所有的職位:") city = input("請輸入工作地點:") #建立檔案 fw = open(path + '\lagou_' + lange_name + '.csv', 'a+') #構造url連結 start_url = 'https://www.lagou.com/jobs/positionAjax.json?px=default&city=' end_url = '&needAddtionalResult=false&isSchoolJob=0' url=start_url + city + end_url page = start_page row = ['工作地點','職位名稱', '公司名稱', '要求學歷', '工作性質', '工作福利', '薪水', '工作經驗要求','釋出時間'] fw.write(",".join(row) + '\n') while page < end_page: time.sleep(12) print("正在抓取第{0}頁招聘資料資訊".format(page)) #獲取json資料 JsonDatas = get_json(url,page,lange_name) #對獲取的資料進行解析 companyInfos = parser_json(page,JsonDatas) #將資訊寫入CSV檔案中 writeCSV(page,fw,companyInfos) page = page+1 print("所有資料寫入完畢") if __name__ == '__main__': main()