您可以编写用于重复删除的自定义中间件并将其添加到设置中
import osfrom scrapy.dupefilter import RFPDupeFilterclass CustomFilter(RFPDupeFilter):"""A dupe filter that considers specific ids in the url""" def __getid(self, url): mm = url.split("&refer")[0] #or something like that return mm def request_seen(self, request): fp = self.__getid(request.url) if fp in self.fingerprints: return True self.fingerprints.add(fp) if self.file: self.file.write(fp + os.linesep)然后,您需要在settings.py中设置正确的DUPFILTER_CLASS
DUPEFILTER_CLASS = 'scraper.duplicate_filter.CustomFilter'
在那之后应该工作



