scrapy爬数据存mysql报错
最近学习python,遇到一个奇葩问题,跪求高手解答
这是数据直接存的json文件
存mysql就会报如下错误
pipeline代码如下:
from twisted.enterprise import adbapi
import MySQLdb
import MySQLdb.cursors
class TutorialPipeline(object):
def __init__(self):
self.dbpool = adbapi.ConnectionPool('MySQLdb',
db = 'test',
user = 'root',
passwd = '123456',
cursorclass = MySQLdb.cursors.DictCursor,
charset = 'utf8',
use_unicode = False
)
# pipeline dafault function
def process_item(self, item, spider):
query = self.dbpool.runInteraction(self._conditional_insert, item)
return item
# insert the data to databases
def _conditional_insert(self, tx, item):
sql = "insert into test values (%s,%s)"
tx.execute(sql, (item['link'],item['title']))
tesaaa
9 years, 4 months ago