//-------------------------------
import scrapy
class quotes_spyder(scrapy.Spider):
name = “quotes”
def start_requests(self):
urls = [
"http://quotes.toscrape.com/page/1/",
"http://quotes.toscrape.com/page/2/"
]
for url in urls:
yield scrapy.Request(url=url,callback=self.parse)
def parse(self,response):
pageid = response.url.split("/")[-2]
filename="quotes-page%s.html"%pageid
with open(filename,'wb') as f:
f.write(response.body)
self.log('saved file %s'%filename)
//-------------------------------------
error message :
Traceback (most recent call last):
File “c:\users\h351295\appdata\local\programs\python\python38\lib\runpy.py”, line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File “c:\users\h351295\appdata\local\programs\python\python38\lib\runpy.py”, line 87, in run_code
exec(code, run_globals)
File "C:\Users\H351295\AppData\Local\Programs\Python\Python38\Scripts\scrapy.exe_main.py", line 7, in
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py”, line 143, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py”, line 98, in _run_print_help
func(*a, **kw)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py”, line 151, in _run_command
cmd.run(args, opts)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\commands\crawl.py”, line 22, in run
crawl_defer = self.crawler_process.crawl(spname, **opts.spargs)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py”, line 191, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py”, line 224, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py”, line 228, in _create_crawler
spidercls = self.spider_loader.load(spidercls)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\spiderloader.py”, line 78, in load
raise KeyError(“Spider not found: {}”.format(spider_name))
KeyError: ‘Spider not found: quotes’
PS C:\Users\H351295\Desktop\Personal\DSandML\web crawler scrapy\sProj1\sProj1> & C:/Users/H351295/AppData/Local/Programs/Python/Python38/python.exe “c:/Users/H351295/Desktop/Personal/DSandML/web crawler scrapy/sProj1/sProj1/quotes_spyder.py”
PS C:\Users\H351295\Desktop\Personal\DSandML\web crawler scrapy\sProj1\sProj1> scrapy crawl quotes
2020-07-15 11:02:14 [scrapy.utils.log] INFO: Scrapy 2.2.0 started (bot: sProj1)
2020-07-15 11:02:14 [scrapy.utils.log] INFO: Versions: lxml 4.5.2.0, libxml2 2.9.5, cssselect 1.1.0, parsel 1.6.0, w3lib 1.22.0, Twisted 20.3.0, Python 3.8.3 (tags/v3.8.3:6f8c832, May 13 2020, 22:37:02) [MSC v.1924 64 bit (AMD64)], pyOpenSSL 19.1.0 (OpenSSL 1.1.1g 21 Apr 2020), cryptography 2.9.2, Platform Windows-10-10.0.17134-SP0
2020-07-15 11:02:14 [scrapy.utils.log] DEBUG: Using reactor: twisted.internet.selectreactor.SelectReactor
Traceback (most recent call last):
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\spiderloader.py”, line 76, in load
return self._spiders[spider_name]
KeyError: ‘quotes’
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File “c:\users\h351295\appdata\local\programs\python\python38\lib\runpy.py”, line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File “c:\users\h351295\appdata\local\programs\python\python38\lib\runpy.py”, line 87, in run_code
exec(code, run_globals)
File "C:\Users\H351295\AppData\Local\Programs\Python\Python38\Scripts\scrapy.exe_main.py", line 7, in
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py”, line 143, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py”, line 98, in _run_print_help
func(*a, **kw)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\cmdline.py”, line 151, in _run_command
cmd.run(args, opts)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\commands\crawl.py”, line 22, in run
crawl_defer = self.crawler_process.crawl(spname, **opts.spargs)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py”, line 191, in crawl
crawler = self.create_crawler(crawler_or_spidercls)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py”, line 224, in create_crawler
return self._create_crawler(crawler_or_spidercls)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\crawler.py”, line 228, in _create_crawler
spidercls = self.spider_loader.load(spidercls)
File “c:\users\h351295\appdata\local\programs\python\python38\lib\site-packages\scrapy\spiderloader.py”, line 78, in load
raise KeyError(“Spider not found: {}”.format(spider_name))
KeyError: ‘Spider not found: quotes’
//-----------------------------------
help me in sloving this