scarpy 爬虫存入数据库报错 'YysItem' object is not callable 执行 pipelines 程序时报错。

2017-07-28 15:47:49 +08:00
 xiaoheijw

请大神帮忙解惑 爬虫主代码

#coding:utf-8

import scrapy,json
from yys.items import YysItem
from scrapy.selector import  Selector #选择器
from .get_urls import get_url

class Yyspider(scrapy.Spider):
    name='yys'
    allow_domain=['comp-sync.webapp.163.com']
    start_urls=get_url()

    def parse(self, response):
        items=[]
        item=YysItem()
        jsonresponse = json.loads(response.body_as_unicode())
        # print (jsonresponse)
        try:
            for j in jsonresponse['data']:
                item['id']=j['req_id']
                item['time']=j['get_time']
                item['whi']=j['prop_info']['from']
                item['level']=j['prop_info']['prop_name'].split("式神")[0]
                item['name']=j['prop_info']['prop_name'].split("式神")[-1]
                item['nick']=j['user_info']['nick']
                item['server']=j['user_info']['server']
                item['uid']=j['user_info']['uid']
                yield item
        except Exception as e:
            print (e)


pipelines 程序

# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
from yys import settings
from yys.items import YysItem #数据库结构
class YysPipeline(object):
    def __init__(self):
        self.connect = pymysql.connect(
            host=settings.MYSQL_HOST,
            db=settings.MYSQL_DBNAME,
            user=settings.MYSQL_USER,
            passwd=settings.MYSQL_PASSWD,
            port=settings.MYSQL_PORT,
            charset='utf8',
            use_unicode=True)
        self.cursor = self.connect.cursor()

    def process_item(self, item, spider):
        if item.__class__ == YysItem:
            try:
                print ('执行 sql')
                # print (item)
                insert_sql= "insert into yys values(%s,%s,%s,%s,%s,%s,%s,%d)"
                print (insert_sql)
                print (item['id'])
                self.cursor.execute(insert_sql,(item['id'],item['time'],item['whi'],item['level'],item['name'],item['nick'],item('server'),item['uid']))
                self.connect.commit()
            except Exception as e:
                print (e)
            return item

item.py

import scrapy


class YysItem(scrapy.Item):
    id=scrapy.Field()
    time=scrapy.Field()
    whi=scrapy.Field()
    level=scrapy.Field()
    name=scrapy.Field()
    nick=scrapy.Field()
    server=scrapy.Field()
    uid=scrapy.Field()

2373 次点击
所在节点    Python
3 条回复
knightdf
2017-07-28 16:04:32 +08:00
item('server')
mark06
2017-07-28 16:09:06 +08:00
楼上正解
xiaoheijw
2017-07-28 16:33:45 +08:00
@knightdf 智障了,丢人,感谢。

这是一个专为移动设备优化的页面(即为了让你能够在 Google 搜索结果里秒开这个页面),如果你希望参与 V2EX 社区的讨论,你可以继续到 V2EX 上打开本讨论主题的完整版本。

https://www.v2ex.com/t/378644

V2EX 是创意工作者们的社区,是一个分享自己正在做的有趣事物、交流想法,可以遇见新朋友甚至新机会的地方。

V2EX is a community of developers, designers and creative people.

© 2021 V2EX