微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

“选择器”对象不支持项目分配

如何解决“选择器”对象不支持项目分配

我在scrapy 的数据挖掘项目有问题。当我运行我的蜘蛛时,我有这个错误信息,我真的不知道我该如何解决这个问题,请帮帮我!

这是错误输出

2021-01-08 21:27:41 [scrapy.core.scraper] ERROR: Spider error processing <GET 
https://yokatlas.yok.gov.tr/content/lisans-dynamic/1000_1.PHP?y=100110027> (referer: None)
Traceback (most recent call last):
File "C:\Anaconda3\lib\site-packages\scrapy\utils\defer.py",line 120,in iter_errback
yield next(it)
File "C:\Anaconda3\lib\site-packages\scrapy\utils\python.py",line 347,in __next__
return next(self.data)
File "C:\Anaconda3\lib\site-packages\scrapy\utils\python.py",in __next__
return next(self.data)
File "C:\Anaconda3\lib\site-packages\scrapy\core\spidermw.py",line 64,in _evaluate_iterable
for r in iterable:
File "C:\Anaconda3\lib\site-packages\scrapy\spidermiddlewares\offsite.py",line 29,in 
process_spider_output
for x in result:
File "C:\Anaconda3\lib\site-packages\scrapy\core\spidermw.py",in _evaluate_iterable
for r in iterable:
File "C:\Anaconda3\lib\site-packages\scrapy\spidermiddlewares\referer.py",line 340,in <genexpr>
return (_set_referer(r) for r in result or ())
File "C:\Anaconda3\lib\site-packages\scrapy\core\spidermw.py",in _evaluate_iterable
for r in iterable:
File "C:\Anaconda3\lib\site-packages\scrapy\spidermiddlewares\urllength.py",line 37,in <genexpr>
return (r for r in result or () if _filter(r))
File "C:\Anaconda3\lib\site-packages\scrapy\core\spidermw.py",in _evaluate_iterable
for r in iterable:
File "C:\Anaconda3\lib\site-packages\scrapy\spidermiddlewares\depth.py",line 58,in _evaluate_iterable
for r in iterable:
File "C:\Users\yusuf\Desktop\Yök Atlas Projesi\atlasdatas\atlasdatas\spiders\atlasspider.py",line 
46,in parse
items["ÖSYM_KODU"] = ÖSYM_KODU
TypeError: 'Selector' object does not support item assignment
2021-01-08 21:27:41 [scrapy.core.engine] INFO: Closing spider (finished)
2021-01-08 21:27:41 [scrapy.statscollectors] INFO: Dumping Scrapy stats:
{'downloader/request_bytes': 712,'downloader/request_count': 3,'downloader/request_method_count/GET': 3,'downloader/response_bytes': 27358,'downloader/response_count': 3,'downloader/response_status_count/200': 2,'downloader/response_status_count/302': 1,'elapsed_time_seconds': 0.978011,'finish_reason': 'finished','finish_time': datetime.datetime(2021,1,8,18,27,41,343786),'log_count/DEBUG': 36,'log_count/ERROR': 1,'log_count/INFO': 10,'response_received_count': 2,'robotstxt/request_count': 1,'robotstxt/response_count': 1,'robotstxt/response_status_count/200': 1,'scheduler/dequeued': 1,'scheduler/dequeued/memory': 1,'scheduler/enqueued': 1,'scheduler/enqueued/memory': 1,'spider_exceptions/TypeError': 1,'start_time': datetime.datetime(2021,40,365775)}
2021-01-08 21:27:41 [scrapy.core.engine] INFO: Spider closed (finished)

我的 Spider.py 文件

import scrapy
from atlasdatas.items import AtlasdatasItem

class Atlasdatas(scrapy.Spider):

    name = "atlas"

    def start_requests(self):
        urls = ['https://yokatlas.yok.gov.tr/content/lisans-dynamic/1000_1.PHP?y=100110027']             
        for url in urls:
            yield scrapy.Request(url=url,callback=self.parse)


    def parse(self,response):
        items = AtlasdatasItem()
        parse_place = response.css("table.table-bordered")
        for items in parse_place:
           
            #PROGRAM_İSMİ = items.css("big::text")[0].getall()
           
            ÖSYM_KODU = items.css("td.text-center::text")[0].get()
            ÜNİVERSİTE_TÜRÜ = items.css("td.text-center::text")[1].get()
            ÜNİVERSİTE_İSMİ = items.css("td.text-center::text")[2].get()
            FAKÜLTE_YÜKSEKOKUL = items.css("td.text-center::text")[3].get()
            PUAN_TÜRÜ = items.css("td.text-center::text")[4].get()
            BURS_TÜRÜ = items.css("td.text-center::text")[5].get()
           
            #items["PROGRAM_İSMİ"] = PROGRAM_İSMİ

            items["ÖSYM_KODU"] = ÖSYM_KODU
            items["ÜNİVERSİTE_TÜRÜ"] = ÜNİVERSİTE_TÜRÜ
            items["ÜNİVERSİTE_İSMİ"] = ÜNİVERSİTE_İSMİ
            items["FAKÜLTE_YÜKSEKOKUL"] = FAKÜLTE_YÜKSEKOKUL
            items["PUAN_TÜRÜ"] = PUAN_TÜRÜ
            items["BURS_TÜRÜ"] = BURS_TÜRÜ
            yield items

和我的 items.py 文件,其中的数据将保存在 sqlite3 数据库

import scrapy

class AtlasdatasItem(scrapy.Item):

    #PROGRAM_İSMİ = scrapy.Field()

    ÖSYM_KODU = scrapy.Field()
    ÜNİVERSİTE_TÜRÜ = scrapy.Field()
    ÜNİVERSİTE_İSMİ = scrapy.Field()
    FAKÜLTE_YÜKSEKOKUL = scrapy.Field()
    PUAN_TÜRÜ = scrapy.Field()
    BURS_TÜRÜ = scrapy.Field()

还有我的 pipelines.py 文件

import sqlite3

class AtlasDB(object):

    def __init__(self):
       self.create_connection()
       self.create_table()
    
    def create_connection(self):
        self.conn = sqlite3.connect("atlasdb")
        self.curr = self.conn.cursor()
    
    def create_table(self):
        self.curr.execute(
            """CREATE TABLE GENERAL_INF(
            
            OSYM_KODU INT PRIMARY KEY NOT NULL,ÜNİVERSİTE_TÜRÜ TEXT,ÜNİVERSİTE_İSMİ TEXT,FAKÜLTE_YÜKSEKOKUL TEXT,PUAN_TÜRÜ TEXT,BURS_TÜRÜ TEXT)"""
        )
    def process_ıtems(self,item,spider):
        self.store_db(item)
        return item
    
    def store_db(self,item):
        self.curr.execute("""INSERT INTO VALUES
        
        GENERAL_INF values (?,?,?)""",(
             item["ÖSYM_KODU"][0],item["ÜNİVERSİTE_TÜRÜ"][0],item["ÜNİVERSİTE_İSMİ"][0],item["FAKÜLTE_YÜKSEKOKUL"][0],item["PUAN_TÜRÜ"][0],item["BURS_TÜRÜ"][0]
         ))
        self.conn.commit()
        return item   
    

这种情况我该怎么办,请帮帮我

解决方法

您在 pipelines.py 中有不同的拼写“OSYM_KODU”

你能把它和其他地方拼写的“ÖSYM_KODU”一样吗,然后再试一次?

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。