Duckduckgo search update
This commit is contained in:
@@ -9,7 +9,7 @@ logger = get_logger()
|
|||||||
|
|
||||||
from furl import furl
|
from furl import furl
|
||||||
from gnews import GNews
|
from gnews import GNews
|
||||||
from duckduckgo_search import DDGS
|
from ddgs import DDGS
|
||||||
from GoogleNews import GoogleNews
|
from GoogleNews import GoogleNews
|
||||||
from search_engines import Yahoo, Aol
|
from search_engines import Yahoo, Aol
|
||||||
|
|
||||||
@@ -113,7 +113,7 @@ class SearchDuckDuckGoGeneral(FetcherAbstract):
|
|||||||
return "ddg-general {} results={}".format(self.region, self.max_results).replace("results=None", "").strip()
|
return "ddg-general {} results={}".format(self.region, self.max_results).replace("results=None", "").strip()
|
||||||
|
|
||||||
def _fetch_raw_urls(self, keyword_search):
|
def _fetch_raw_urls(self, keyword_search):
|
||||||
try:
|
try:
|
||||||
news = DDGS().text(keyword_search, region=self.region, timelimit=self.period, max_results=self.max_results)
|
news = DDGS().text(keyword_search, region=self.region, timelimit=self.period, max_results=self.max_results)
|
||||||
urls = [e.get("href") for e in news]
|
urls = [e.get("href") for e in news]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ lxml[html_clean]
|
|||||||
googlenewsdecoder
|
googlenewsdecoder
|
||||||
gnews
|
gnews
|
||||||
GoogleNews
|
GoogleNews
|
||||||
duckduckgo_search
|
ddgs
|
||||||
git+https://github.com/tasos-py/Search-Engines-Scraper.git
|
git+https://github.com/tasos-py/Search-Engines-Scraper.git
|
||||||
furl
|
furl
|
||||||
langdetect
|
langdetect
|
||||||
|
|||||||
Reference in New Issue
Block a user