scarpy 爬虫存入数据库报错 'YysItem' object is not callable

爬虫主代码

#coding:utf-8

import scrapy,json
from yys.items import YysItem
from scrapy.selector import  Selector #选择器
from .get_urls import get_url

class Yyspider(scrapy.Spider):
    name='yys'
    allow_domain=['comp-sync.webapp.163.com']
    start_urls=get_url()

    def parse(self, response):
        items=[]
        item=YysItem()
        jsonresponse = json.loads(response.body_as_unicode())
        # print (jsonresponse)
        try:
            for j in jsonresponse['data']:
                item['id']=j['req_id']
                item['time']=j['get_time']
                item['whi']=j['prop_info']['from']
                item['level']=j['prop_info']['prop_name'].split("式神")[0]
                item['name']=j['prop_info']['prop_name'].split("式神")[-1]
                item['nick']=j['user_info']['nick']
                item['server']=j['user_info']['server']
                item['uid']=j['user_info']['uid']
                yield item
        except Exception as e:
            print (e)

pipelines程序

# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
from yys import settings
from yys.items import YysItem #数据库结构
class YysPipeline(object):
    def __init__(self):
        self.connect = pymysql.connect(
            host=settings.MYSQL_HOST,
            db=settings.MYSQL_DBNAME,
            user=settings.MYSQL_USER,
            passwd=settings.MYSQL_PASSWD,
            port=settings.MYSQL_PORT,
            charset='utf8',
            use_unicode=True)
        self.cursor = self.connect.cursor()

    def process_item(self, item, spider):
        if item.__class__ == YysItem:
            try:
                print ('执行sql')
                # print (item)
                insert_sql= "insert into yys values(%s,%s,%s,%s,%s,%s,%s,%d)"
                print (insert_sql)
                print (item['id'])
                self.cursor.execute(insert_sql,(item['id'],item['time'],item['whi'],item['level'],item['name'],item['nick'],item('server'),item['uid']))
                self.connect.commit()
            except Exception as e:
                print (e)
            return item

item.py

import scrapy


class YysItem(scrapy.Item):
    id=scrapy.Field()
    time=scrapy.Field()
    whi=scrapy.Field()
    level=scrapy.Field()
    name=scrapy.Field()
    nick=scrapy.Field()
    server=scrapy.Field()
    uid=scrapy.Field()
阅读 3k
1 个回答

item('server'), 这里打错了。真丢人

撰写回答
你尚未登录,登录后可以
  • 和开发者交流问题的细节
  • 关注并接收问题和回答的更新提醒
  • 参与内容的编辑和改进,让解决方法与时俱进
推荐问题