Scrapy链接MySQL存入数据 -《狗嗨默示录》-

settings.pysql

ITEM_PIPELINES = {
   'IAChina.pipelines.IachinaPipeline': 300,
}

 

DBKWARGS = {'db':'iachina','user':'root','password':'',
            'host':'localhost','use_unicode':True,'charset':'utf8'}

 

pipelines.pyide

import MySQLdb

class IachinaPipeline(object):
    def process_item(self, item, spider):
        DBKWARGS = spider.settings.get('DBKWARGS')
        con = MySQLdb.connect(**DBKWARGS)
        cur = con.cursor()
        sql = ("insert into info(COMPANY,TYPE,PRODUCT,CLAUSE,CLAUSE_URL)"
            "values(%s,%s,%s,%s,%s)")
        list = (item['COMPANY'],item['TYPE'],item['PRODUCT'],item['CLAUSE'],item['CLAUSE_URL'])
        try:
            cur.execute(sql,list)
        except Exception as e:
            print("Insert error:",e)
            con.rollback() #未进行数据操做就回滚
        else:
            con.commit() #提交后数据操做才生效
        cur.close()
        con.close()
        return item
相关文章
相关标签/搜索