Working fetch feeds and parser, process raw and error urls

This commit is contained in:
Luciano Gervasoni
2025-03-18 14:49:12 +01:00
parent 7d7bce1e72
commit fb4b30f05e
26 changed files with 270 additions and 364 deletions

View File

@@ -8,15 +8,15 @@ logger = get_logger()
class FetchFeeds():
def __init__(self) -> None:
logger.debug("Initializing News feed")
logger.debug("Initializing Fetcher Feeds")
def run(self):
try:
logger.debug("Starting NewsFeed.run()")
logger.debug("Starting FetchFeeds.run()")
# Get feeds
list_url_feeds = list(Feed.objects.values_list('rss_feed', flat=True))
logger.debug("Fetching news from feeds: {}".format(list_url_feeds))
logger.debug("Fetching from feeds: {}".format(list_url_feeds))
# Process via RSS feeds
for url_feed in list_url_feeds:
@@ -47,4 +47,4 @@ class FetchFeeds():
# Write to DB
DB_Handler().insert_raw_urls(urls_fetched, source)
except Exception as e:
logger.warning("Exception in NewsFeed.run(): {}\n{}".format(e, traceback.format_exc()))
logger.warning("Exception in FetchFeeds.run(): {}\n{}".format(e, traceback.format_exc()))