scrapy----將資料儲存到MySQL資料庫中
阿新 • • 發佈:2019-02-11
1.在pipelines.py中自定義自己的pipeline
import pymysql class PymysqlPipeline(object): def __init__(self): # 連線資料庫 self.connect = pymysql.connect( host='localhost', db='bole', user='root', passwd='123456', charset='utf8', port=330, use_unicode=True) self.cursor = self.connect.cursor() def process_item(self, item, spider): cursor = self.cursor sql = 'insert into bole(title, datetime, category, content, dianzanshu, shoucanshu, pinglunshu) values (%s,%s,%s,%s,%s,%s,%s)' cursor.execute(sql, ( item['title'], item['datetime'], item['category'], item['content'], item['dianzanshu'], item['shoucanshu'], item['pinglunshu'])) self.connect.commit() return item
2.在settings中開啟自己的pipeline
ITEM_PIPELINES = {
'Bole.pipelines.PymysqlPipeline': 1,
}