import scrapy import sys import requests import io sys.path.append("../../convert/31-2") from db import * db.bind(provider="sqlite", filename="../../clean/31-2/data.sqlite3") db.generate_mapping() class OutfitSpider(scrapy.Spider): name = "outfits" start_urls = [ "https://deadcells.fandom.com/wiki/Outfits", ] def parse(self, response): for quote in response.css(".wikitable tbody tr"): name_en = quote.css("td:nth-child(2) > span:last-child::text").get() preview = quote.css("td:nth-child(6) a::attr('href')").get() if not name_en: continue r = requests.get(preview) with db_session: w = Outfit.select(name_en=name_en).first() w.preview = io.BytesIO(r.content).getbuffer().tobytes()