run isort

This commit is contained in:
Abel Luck 2024-04-18 11:58:45 +02:00
parent 6add19c288
commit 0c3a7fe7fe
7 changed files with 20 additions and 31 deletions

View file

@ -1,7 +1,7 @@
import copy import copy
from colorlog import ColoredFormatter
import scrapy.utils.log import scrapy.utils.log
from colorlog import ColoredFormatter
color_formatter = ColoredFormatter( color_formatter = ColoredFormatter(
( (

View file

@ -1,11 +1,9 @@
from scrapy.crawler import CrawlerProcess from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings from scrapy.utils.project import get_project_settings
from .spiders.rss_spider import RssFeedSpider
from .postprocessing import SortRssItems
from . import colorlog from . import colorlog
from .postprocessing import SortRssItems
from .spiders.rss_spider import RssFeedSpider
base_settings = get_project_settings() base_settings = get_project_settings()

View file

@ -1,13 +1,11 @@
from scrapy.exporters import BaseItemExporter
from .items import ChannelElementItem
from .exceptions import *
from typing import Any
from io import BytesIO from io import BytesIO
from typing import Any
from repub import rss from repub import rss
from scrapy.exporters import BaseItemExporter
from .exceptions import *
from .items import ChannelElementItem
class RssExporter(BaseItemExporter): class RssExporter(BaseItemExporter):

View file

@ -3,10 +3,9 @@
# See documentation in: # See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html # https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
# useful for handling different item types with a single interface # useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter from itemadapter import ItemAdapter, is_item
from scrapy import signals
class RepubSpiderMiddleware: class RepubSpiderMiddleware:

View file

@ -8,12 +8,11 @@
# from itemadapter import ItemAdapter # from itemadapter import ItemAdapter
import six import six
from scrapy import signals from scrapy import signals
from scrapy.exceptions import NotConfigured, CloseSpider from scrapy.exceptions import CloseSpider, NotConfigured
from scrapy.utils.misc import load_object from scrapy.utils.misc import load_object
from .items import RssItem
from .exporters import RssItemExporter from .exporters import RssItemExporter
from .items import RssItem
from .signals import feed_channel_discovered from .signals import feed_channel_discovered

View file

@ -1,8 +1,7 @@
from lxml.builder import ElementMaker
from lxml import etree
from lxml.etree import Element
import lxml.etree as ET import lxml.etree as ET
from lxml import etree
from lxml.builder import ElementMaker
from lxml.etree import Element
class SafeElementMaker: class SafeElementMaker:

View file

@ -1,14 +1,10 @@
from scrapy.spiders import Spider
from scrapy.utils.spider import iterate_spider_output
from repub.items import (
ChannelElementItem,
ElementItem,
)
import feedparser
import logging import logging
from repub.rss import E, ITUNES, CONTENT, MEDIA, CDATA, normalize_date import feedparser
from repub.items import ChannelElementItem, ElementItem
from repub.rss import CDATA, CONTENT, ITUNES, MEDIA, E, normalize_date
from scrapy.spiders import Spider
from scrapy.utils.spider import iterate_spider_output
class BaseRssFeedSpider(Spider): class BaseRssFeedSpider(Spider):