diff --git a/.gitignore b/.gitignore index b4cf9f1..051548c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ __pycache__/ *.pyc **/credentials.py logs/ +postgres/ \ No newline at end of file diff --git a/A_Development.ipynb b/A_Development.ipynb deleted file mode 100644 index 2f88e52..0000000 --- a/A_Development.ipynb +++ /dev/null @@ -1,363 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# !pip install git+https://github.com/tasos-py/Search-Engines-Scraper.git\n", - "import search_engines\n", - "\n", - "engine = search_engines.Bing()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "results = engine.search('news: \"child abuse\"', pages=2)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "engine = search_engines.search_engines_dict[\"brave\"]()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "query = 'news: child abuse'\n", - "r = engine.search(query, pages=2)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "r.__dict__" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import newspaper\n", - "newspaper.ArticleBinaryDataException" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "'''\n", - "import newspaper\n", - "\n", - "url = 'https://www.missingkids.org/poster/USVA/VA25-0820/1'\n", - "art_1 = newspaper.article(url)\n", - "url = 'https://www.missingkids.org/poster/NCMC/2045193/1'\n", - "art_2 = newspaper.article(url)\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import ollama\n", - "\n", - "#model = \"llama3.2:1b\"\n", - "client = ollama.Client(\n", - " host = 'https://ollamamodel.matitos.org',\n", - ")\n", - "l = client.list()\n", - "list_models = [m.get(\"model\") for m in l.model_dump().get(\"models\")]\n", - "\n", - "print(list_models)\n", - "\n", - "for m in list_models:\n", - " context_key = [ k for k in client.show(m).model_dump().get(\"modelinfo\").keys() if \"context_length\" in k]\n", - " if (len(context_key) != 1):\n", - " print(\"Problem!!!\")\n", - " print(m, client.show(m).model_dump().get(\"modelinfo\").get(context_key[0]))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "text = \"...\"\n", - "model = \"falcon3:1b\"\n", - "\n", - "msg_content = {\n", - " \"role\": \"user\", \n", - " \"content\": text,\n", - "}\n", - "response = client.chat(model=model, messages=[msg_content], stream=False)\n", - "print(response[\"message\"][\"content\"])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import requests\n", - "import cv2\n", - "import base64\n", - "import numpy as np\n", - "\n", - "endpoint = \"http://192.168.2.64:12343/image\"\n", - "\n", - "\n", - "\n", - "prompt = \"Majestic mountain landscape with snow-capped peaks, autumn foliage in vibrant reds and oranges, a turquoise river winding through a valley, crisp and serene atmosphere, ultra-realistic style.\"\n", - "prompt = \"A group of kids happily playing in a joy environment\"\n", - "#prompt = \"A bitcoin behaving like a king, surrounded by small alternative coins. Detailed, geometric style\"\n", - "\n", - "json = {\n", - " \"prompt\": prompt,\n", - " \"num_inference_steps\": 10,\n", - " \"size\": \"512x512\",\n", - " \"seed\": 123456,\n", - "}\n", - "\n", - "for inf_step in [1, 4, 10, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 90, 100]:\n", - " json[\"num_inference_steps\"] = inf_step\n", - "\n", - " %time r = requests.post(endpoint, json=json)\n", - " print(\"Status code\", r.status_code)\n", - "\n", - " # Image\n", - " png_as_np = np.frombuffer(base64.b64decode(r.text), dtype=np.uint8)\n", - " image_bgr = cv2.imdecode(png_as_np, cv2.IMREAD_COLOR)\n", - "\n", - " cv2.imwrite(\"sample_img_{}.png\".format(json[\"num_inference_steps\"]), image_bgr)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# !pip install trafilatura\n", - "import trafilatura\n", - "from pprint import pprint\n", - "\n", - "url = \"https://www.foxnews.com/us/utah-mommy-blogger-ruby-franke-power-public-image-allowed-child-abuse-go-unchecked-expert\"\n", - "# url = \"https://www.missingkids.org/poster/USVA/VA25-0820/1\"\n", - "url = \"https://www.bloomberg.com/news/articles/2025-03-12/eu-launches-metals-tariff-retaliation-on-26-billion-of-us-goods\"\n", - "\n", - "# Fetch\n", - "doc = trafilatura.fetch_url(url)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Content & metadata\n", - "metadata = trafilatura.extract_metadata(doc)\n", - "content = trafilatura.extract(doc)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pprint(metadata.as_dict())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(content)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# !pip install newspaper4k\n", - "# !pip install langdetect \n", - "import newspaper\n", - "import langdetect\n", - "langdetect.DetectorFactory.seed = 0\n", - "\n", - "\n", - "\n", - "# url = \"https://www.missingkids.org/poster/USVA/VA25-0820/1\"\n", - "#url = \"https://www.waff.com/2025/03/11/colbert-heights-high-school-employee-arrested-child-abuse/\"\n", - "\n", - "\n", - "\n", - "#url = \"https://www.bloomberg.com/news/articles/2025-03-12/eu-launches-metals-tariff-retaliation-on-26-billion-of-us-goods\"\n", - "\n", - "\n", - "url = \"https://apnews.com/article/canada-trump-us-tariffs-steel-2517a6a2baf0596cb1a43d3a7d1e7939\"\n", - "url = \"https://www.foxnews.com/us/utah-mommy-blogger-ruby-franke-power-public-image-allowed-child-abuse-go-unchecked-expert\"\n", - "#url = \"https://www.ft.com/content/6d7c6915-4ceb-43fc-9896-590036b12a87\"\n", - "#url = \"https://www.lanacion.com.ar/politica/milei-en-bahia-blanca-un-viaje-sorpresa-para-frenar-las-criticas-y-mostrar-cercania-nid12032025/\"\n", - "#url = \"https://www.missingkids.org/poster/NCMC/2043547/1\"\n", - "\n", - "try:\n", - " article = newspaper.article(url)\n", - "except newspaper.ArticleException as e:\n", - " print(\"ArticleException: {}\".format(str(e)))\n", - "except Exception as e:\n", - " print(\"Err: {}\".format(str(e)))\n", - "\n", - "# url_photo = set([i for i in article.images if \"api.missingkids.org/photographs\" in i])\n", - "# article.is_valid_url(), article.is_parsed, article.is_media_news(), article.is_valid_body()\n", - "article.meta_data\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# !pip install news-please\n", - "from newsplease import NewsPlease\n", - "\n", - "url = \"https://variety.com/2025/film/news/gene-hackman-death-suspicious-gas-leak-search-warrant-1236322610/\"\n", - "url = \"https://www.bbc.com/news/articles/cewkkkvkzn9o\"\n", - "url = \"https://www.foxnews.com/us/utah-mommy-blogger-ruby-franke-power-public-image-allowed-child-abuse-go-unchecked-expert\"\n", - "article = NewsPlease.from_url(url)\n", - "print(article.title)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(article.maintext)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "matitos", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/README.md b/README.md index a865a3e..ae32f84 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,14 @@ - Fetch parsing URL host - Fetch from RSS feed - Fetch searching (Google search & news, DuckDuckGo, ...) + ++ Sources -> Robustness to TooManyRequests block + - Selenium based + - Sites change their logic, request captcha, ... + - Brave Search API + - Free up to X requests per day. Need credit card association (no charges) + - Bing API + - Subscription required + - Yandex. No API? - Process URLs -> Updates raw URLs - Extracts title, description, content, image and video URLs, main image URL, language, keywords, authors, tags, published date - Determines if it is a valid article content diff --git a/app_selenium/logger.py b/app_selenium/logger.py index 28a3099..6905c20 100644 --- a/app_selenium/logger.py +++ b/app_selenium/logger.py @@ -2,30 +2,29 @@ import logging import os # Get env var -path_logs_parameterization = os.getenv("PATH_LOGS_PARAMETERIZATION", "logs/log_app_selenium_{}.log") +logs_directory = os.getenv("PATH_LOGS_DIRECTORY", "logs") # Directory of logs -directory = '/'.join(path_logs_parameterization.split("/")[:-1]) -os.makedirs(directory, exist_ok=True) +os.makedirs(logs_directory, exist_ok=True) logging.basicConfig(format='%(filename)s | %(levelname)s | %(asctime)s | %(message)s') -logger = logging.getLogger("news_fetcher") +logger = logging.getLogger("selenium") logger.setLevel(logging.DEBUG) # To file log: INFO / WARNING / ERROR / CRITICAL -fh = logging.handlers.RotatingFileHandler(filename=path_logs_parameterization.format("debug"), mode="a", maxBytes=10000000, backupCount=1) +fh = logging.handlers.RotatingFileHandler(filename=os.path.join(logs_directory, "debug.log"), mode="a", maxBytes=10000000, backupCount=1) fh.setFormatter(logging.Formatter('%(levelname)s | %(asctime)s | %(message)s')) fh.setLevel(logging.DEBUG) logger.addHandler(fh) # To file log: INFO / WARNING / ERROR -fh = logging.handlers.RotatingFileHandler(filename=path_logs_parameterization.format("info"), mode="a", maxBytes=10000000, backupCount=1) +fh = logging.handlers.RotatingFileHandler(filename=os.path.join(logs_directory, "info.log"), mode="a", maxBytes=10000000, backupCount=1) fh.setFormatter(logging.Formatter('%(levelname)s | %(asctime)s | %(message)s')) fh.setLevel(logging.INFO) logger.addHandler(fh) # To file log: WARNING / ERROR / CRITICAL -fh = logging.handlers.RotatingFileHandler(filename=path_logs_parameterization.format("warning"), mode="a", maxBytes=10000000, backupCount=1) +fh = logging.handlers.RotatingFileHandler(filename=os.path.join(logs_directory, "warning.log"), mode="a", maxBytes=10000000, backupCount=1) fh.setFormatter(logging.Formatter('%(levelname)s | %(asctime)s | %(message)s')) fh.setLevel(logging.WARNING) logger.addHandler(fh) diff --git a/app_selenium/missing_kids.py b/app_selenium/missing_kids.py index 11b38ba..4a97da7 100644 --- a/app_selenium/missing_kids.py +++ b/app_selenium/missing_kids.py @@ -28,7 +28,7 @@ class MissingKidsFetcher(): logger.debug("Processing page: {}...".format(i)) try: - time.sleep(os.getenv("SELENIUM_SLEEP_PER_PAGE", 4)); #driver.implicitly_wait(3) + time.sleep(int(os.getenv("SELENIUM_SLEEP_PER_PAGE", 4))) #driver.implicitly_wait(3) # Fetch poster URLs for element_type in ["a"]: # ["a", "p", "div"]: for elem in driver.find_elements(By.TAG_NAME, element_type): diff --git a/app_urls/1-DB.ipynb b/app_urls/1-DB.ipynb deleted file mode 100644 index 46757c3..0000000 --- a/app_urls/1-DB.ipynb +++ /dev/null @@ -1,341 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# !pip install psycopg[binary]" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "db_postgres\n", - "db_redis\n", - "\u001b[1A\u001b[1B\u001b[0G\u001b[?25l[+] Running 2/0\n", - " ⠿ Container db_redis \u001b[39mStarting\u001b[0m \u001b[34m0.1s \u001b[0m\n", - " ⠿ Container db_postgres \u001b[39mStarting\u001b[0m \u001b[34m0.1s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container dozzle \u001b[32mRunning\u001b[0m \u001b[34m0.0s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container adminer \u001b[32mRunning\u001b[0m \u001b[34m0.0s \u001b[0m\n", - "\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l[+] Running 2/4\n", - " ⠿ Container db_redis \u001b[39mStarting\u001b[0m \u001b[34m0.2s \u001b[0m\n", - " ⠿ Container db_postgres \u001b[39mStarting\u001b[0m \u001b[34m0.2s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container dozzle \u001b[32mRunning\u001b[0m \u001b[34m0.0s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container adminer \u001b[32mRunning\u001b[0m \u001b[34m0.0s \u001b[0m\n", - "\u001b[?25h\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[1A\u001b[0G\u001b[?25l\u001b[34m[+] Running 4/4\u001b[0m\n", - " \u001b[32m✔\u001b[0m Container db_redis \u001b[32mStarted\u001b[0m \u001b[34m0.3s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container db_postgres \u001b[32mStarted\u001b[0m \u001b[34m0.3s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container dozzle \u001b[32mRunning\u001b[0m \u001b[34m0.0s \u001b[0m\n", - " \u001b[32m✔\u001b[0m Container adminer \u001b[32mRunning\u001b[0m \u001b[34m0.0s \u001b[0m\n", - "\u001b[?25h" - ] - } - ], - "source": [ - "!docker rm -f db_postgres db_redis; docker compose -f ../docker/docker-compose.yml up -d ; sleep 5\n", - "!rm logs/*" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "INSERT_TABLES = True\n", - "INSERT_SAMPLE_DATA = False\n", - "\n", - "import psycopg\n", - "connection_info = \"host={} port={} user={} password={} dbname={}\".format(\"localhost\", \"5432\", \"supermatitos\", \"supermatitos\", \"matitos\")\n", - "\n", - "from datetime import datetime, timezone\n", - "import re\n", - "from pprint import pprint" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "if INSERT_TABLES:\n", - " # Connect to an existing database\n", - " with psycopg.connect(connection_info) as conn:\n", - " # Open a cursor to perform database operations\n", - " with conn.cursor() as cur:\n", - " # Autocommit at end of transaction (Atomic insert of URLs and sources)\n", - " with conn.transaction() as tx:\n", - " # Create URLs table\n", - " c = cur.execute(\"\"\"\n", - " CREATE TYPE URL_STATUS AS ENUM ('raw', 'error', 'valid', 'unknown', 'invalid', 'duplicate');\n", - "\n", - " CREATE TABLE URLS (\n", - " id SERIAL PRIMARY KEY,\n", - " url TEXT NOT NULL UNIQUE,\n", - " ts_fetch TIMESTAMPTZ NOT NULL DEFAULT NOW(),\n", - " status URL_STATUS NOT NULL DEFAULT 'raw' -- ,\n", - " -- status_wendy WENDY_STATUS DEFAULT NULL,\n", - " -- ts_wendy TIMESTAMPTZ DEFAULT NULL\n", - " );\n", - " CREATE INDEX idx_urls_status ON urls(status);\n", - " CREATE INDEX idx_urls_ts_fetch ON urls(ts_fetch);\n", - "\n", - " CREATE TABLE URLS_DUPLICATE (\n", - " id_url_canonical INTEGER REFERENCES URLS(id),\n", - " id_url_duplicated INTEGER REFERENCES URLS(id),\n", - " PRIMARY KEY (id_url_canonical, id_url_duplicated)\n", - " );\n", - " \n", - " CREATE TYPE SEARCH_TYPE AS ENUM ('rss_feed', 'keyword_search', 'url_host');\n", - " CREATE TABLE SEARCH (\n", - " id SMALLSERIAL PRIMARY KEY,\n", - " search TEXT NOT NULL UNIQUE,\n", - " type SEARCH_TYPE NOT NULL\n", - " -- language_country CHAR(5), -- Language: ISO 639-1 Code. Country: ISO 3166 ALPHA-2. e.g.: en-us. Required for search\n", - " -- UNIQUE(search, language_country)\n", - " );\n", - " CREATE INDEX idx_search_type ON SEARCH(type);\n", - " \n", - " CREATE TABLE SOURCE (\n", - " id SMALLSERIAL PRIMARY KEY,\n", - " source TEXT NOT NULL UNIQUE\n", - " );\n", - " \n", - " -- CREATE TABLE SEARCH_LANGUAGE (\n", - " -- language CHAR(2) NOT NULL, -- ISO 639-1 Code, e.g. \"en\"\n", - " -- country CHAR(2) NOT NULL, -- ISO 3166 ALPHA-2, e.g. \"us\"\n", - " -- PRIMARY KEY (language, country)\n", - " -- );\n", - " \n", - " CREATE TABLE URLS_SOURCE_SEARCH (\n", - " id_url INTEGER REFERENCES URLS(id),\n", - " id_source SMALLINT REFERENCES SOURCE(id) ON UPDATE CASCADE ON DELETE RESTRICT,\n", - " id_search SMALLINT REFERENCES SEARCH(id) ON UPDATE CASCADE ON DELETE RESTRICT,\n", - " PRIMARY KEY(id_url, id_source, id_search)\n", - " );\n", - " CREATE INDEX idx_source ON URLS_SOURCE_SEARCH(id_source);\n", - " CREATE INDEX idx_search ON URLS_SOURCE_SEARCH(id_search);\n", - "\n", - " CREATE TABLE STATUS_PATTERN_MATCHING (\n", - " pattern TEXT PRIMARY KEY,\n", - " priority SMALLINT NOT NULL,\n", - " status URL_STATUS NOT NULL\n", - " );\n", - " \n", - " \n", - " CREATE TABLE URL_CONTENT (\n", - " id_url INTEGER PRIMARY KEY REFERENCES URLS(id),\n", - " date_published TIMESTAMPTZ DEFAULT NOW(),\n", - " title TEXT,\n", - " description TEXT,\n", - " content TEXT,\n", - " valid_content BOOLEAN,\n", - " language CHAR(2), -- ISO 639-1 Code\n", - " keywords TEXT[],\n", - " tags TEXT[],\n", - " authors TEXT[],\n", - " image_main_url TEXT,\n", - " images_url TEXT[],\n", - " videos_url TEXT[],\n", - " url_host TEXT, -- www.breitbart.com\n", - " site_name TEXT -- Breitbart News\n", - " );\n", - " CREATE INDEX idx_tags ON URL_CONTENT USING GIN(tags);\n", - " CREATE INDEX idx_authors ON URL_CONTENT USING GIN(authors);\n", - " CREATE INDEX idx_date_published ON URL_CONTENT (date_published);\n", - " CREATE INDEX idx_valid_content ON URL_CONTENT (valid_content);\n", - " CREATE INDEX idx_language ON URL_CONTENT (language);\n", - " CREATE INDEX idx_url_host ON URL_CONTENT (url_host);\n", - " \"\"\")\n", - "\n", - " ### Default insert values\n", - " \n", - " # Feeds\n", - " cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('https://api.missingkids.org/missingkids/servlet/XmlServlet?act=rss&LanguageCountry=en_US&orgPrefix=NCMC', 'rss_feed');\" )\n", - " # Websites of interest\n", - " cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('missingkids.org/poster', 'url_host');\" )\n", - " cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('missingkids.org/new-poster', 'url_host');\" )\n", - " cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('breitbart.com', 'url_host');\" )\n", - " # Search keywords\n", - " cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('child abuse', 'keyword_search');\" )\n", - " # cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('child abuse', 'keyword_search', 'en-us');\" )\n", - " # cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('child abuse', 'keyword_search', 'en-gb');\" )\n", - " \n", - " # Status update based on pattern matching (with priority to apply in order). Regex test https://regex101.com/\n", - " # cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 75, 'valid');\".format(\".*{}.*\".format(re.escape(\"missingkids.org/poster/\"))) )\n", - " cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 50, 'invalid');\".format(\".*{}.*\".format(re.escape(\"youtube.com/\"))) )\n", - " cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 50, 'invalid');\".format(\".*{}.*\".format(re.escape(\"tiktok.com/\"))) )\n", - " cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 50, 'invalid');\".format(\".*{}.*\".format(re.escape(\"twitter.com/\"))) )\n", - " cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 50, 'invalid');\".format(\".*{}.*\".format(re.escape(\"reddit.com/\"))) )\n", - " cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 50, 'invalid');\".format(\".*{}.*\".format(re.escape(\"libreddit.de/\"))) )\n", - " cur.execute( \"INSERT INTO STATUS_PATTERN_MATCHING (pattern, priority, status) VALUES ('{}', 50, 'invalid');\".format(\".*{}.*\".format(re.escape(\"radio.foxnews.com/\"))) )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\t urls\n", - "[]\n", - "\t urls_duplicate\n", - "[]\n", - "\t urls_source_search\n", - "[]\n", - "\t source\n", - "[]\n", - "\t search\n", - "[(1,\n", - " 'https://api.missingkids.org/missingkids/servlet/XmlServlet?act=rss&LanguageCountry=en_US&orgPrefix=NCMC',\n", - " 'rss_feed'),\n", - " (2, 'missingkids.org/poster', 'url_host'),\n", - " (3, 'missingkids.org/new-poster', 'url_host'),\n", - " (4, 'breitbart.com', 'url_host'),\n", - " (5, 'child abuse', 'keyword_search')]\n", - "\t status_pattern_matching\n", - "[('.*youtube\\\\.com/.*', 50, 'invalid'),\n", - " ('.*tiktok\\\\.com/.*', 50, 'invalid'),\n", - " ('.*twitter\\\\.com/.*', 50, 'invalid'),\n", - " ('.*reddit\\\\.com/.*', 50, 'invalid'),\n", - " ('.*libreddit\\\\.de/.*', 50, 'invalid'),\n", - " ('.*radio\\\\.foxnews\\\\.com/.*', 50, 'invalid')]\n", - "\t url_content\n", - "[]\n" - ] - } - ], - "source": [ - "# Connect to an existing database\n", - "with psycopg.connect(connection_info) as conn:\n", - " # Open a cursor to perform database operations\n", - " with conn.cursor() as cur:\n", - " # Get tables\n", - " cur.execute(\"SELECT table_name FROM information_schema.tables WHERE table_schema='public';\")\n", - " tables = [t[0] for t in cur.fetchall()]\n", - "\n", - " for t in tables:\n", - " print(\"\\t\", t)\n", - " pprint( cur.execute(\"SELECT * FROM {} LIMIT 50;\".format(t)).fetchall() )" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[(1,\n", - " 'https://api.missingkids.org/missingkids/servlet/XmlServlet?act=rss&LanguageCountry=en_US&orgPrefix=NCMC',\n", - " 'rss_feed'),\n", - " (2, 'missingkids.org/poster', 'url_host'),\n", - " (3, 'missingkids.org/new-poster', 'url_host'),\n", - " (4, 'breitbart.com', 'url_host'),\n", - " (5, 'child abuse', 'keyword_search')]\n" - ] - } - ], - "source": [ - "# Connect to an existing database\n", - "with psycopg.connect(connection_info) as conn:\n", - " # Open a cursor to perform database operations\n", - " with conn.cursor() as cur:\n", - " pprint( cur.execute(\"SELECT * FROM SEARCH;\").fetchall() )" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[]\n" - ] - } - ], - "source": [ - "# Connect to an existing database\n", - "with psycopg.connect(connection_info) as conn:\n", - " # Open a cursor to perform database operations\n", - " with conn.cursor() as cur:\n", - " pprint( cur.execute(\"SELECT * FROM URLS LIMIT 50;\").fetchall() )\n", - " #pprint( cur.execute(\"SELECT id_url, title, valid_content FROM URL_CONTENT LIMIT 10;\").fetchall() )" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\n!docker rm -f db_redis; docker compose -f ../docker/docker-compose.yml up -d\\n\\n# Connect to an existing database\\nwith psycopg.connect(connection_info) as conn:\\n # Open a cursor to perform database operations\\n with conn.cursor() as cur:\\n pprint( cur.execute(\"TRUNCATE URLS, URL_CONTENT, URLS_SOURCE_SEARCH, URLS_DUPLICATE;\") )\\n # cur.execute( \"INSERT INTO SEARCH (search, type) VALUES (\\'missingkids.org\\', \\'url_host\\');\" )\\n'" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "!docker rm -f db_redis; docker compose -f ../docker/docker-compose.yml up -d\n", - "\n", - "# Connect to an existing database\n", - "with psycopg.connect(connection_info) as conn:\n", - " # Open a cursor to perform database operations\n", - " with conn.cursor() as cur:\n", - " pprint( cur.execute(\"TRUNCATE URLS, URL_CONTENT, URLS_SOURCE_SEARCH, URLS_DUPLICATE;\") )\n", - " # cur.execute( \"INSERT INTO SEARCH (search, type) VALUES ('missingkids.org', 'url_host');\" )\n", - "'''" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "matitos", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/app_urls/Dockerfile b/app_urls/Dockerfile new file mode 100644 index 0000000..1a1ddd0 --- /dev/null +++ b/app_urls/Dockerfile @@ -0,0 +1,48 @@ +FROM python:3.12 + +# Prevents Python from writing pyc files to disk +ENV PYTHONDONTWRITEBYTECODE=1 +#Prevents Python from buffering stdout and stderr +ENV PYTHONUNBUFFERED=1 + +# User +RUN useradd -m -r appuser && \ + mkdir /opt/app && \ + chown -R appuser /opt/app + +WORKDIR /opt/app + +# Copy the Django project and install dependencies +COPY requirements.txt /opt/app/ +# run this command to install all dependencies +RUN pip install --no-cache-dir -r requirements.txt + +COPY --chown=appuser:appuser . /opt/app/ + +RUN chmod -R 755 /opt/app +RUN chown -R appuser:appuser /opt/app +USER appuser + +# Initialization script +RUN echo '#!/bin/bash' > /opt/app/initialize.sh && \ + echo 'if [ "${INITIALIZE_DB}" = false ]; then' >> /opt/app/initialize.sh && \ + echo 'echo "Initialization not required"' >> /opt/app/initialize.sh && \ + echo 'else' >> /opt/app/initialize.sh && \ + echo 'echo "Initializating database"' >> /opt/app/initialize.sh && \ + echo 'sleep 5' >> /opt/app/initialize.sh && \ + echo 'python db.py --initialize_tables --initialize_data' >> /opt/app/initialize.sh && \ + echo 'python manage.py makemigrations fetcher; python manage.py migrate --fake-initial' >> /opt/app/initialize.sh && \ + echo 'python manage.py createsuperuser --noinput' >> /opt/app/initialize.sh && \ + echo 'python manage.py collectstatic --no-input' >> /opt/app/initialize.sh && \ + echo 'python manage.py import --filename scheduled_tasks.json' >> /opt/app/initialize.sh && \ + echo 'fi' >> /opt/app/initialize.sh && \ + chmod +x /opt/app/initialize.sh + +# Serving script +RUN echo '#!/bin/bash' > /opt/app/run.sh && \ + echo 'gunicorn core.wsgi:application --bind 0.0.0.0:8000 & python manage.py rqworker high default low' >> /opt/app/run.sh && \ + #echo 'python manage.py runserver & python manage.py rqworker high default low' >> /opt/app/run.sh && \ + chmod +x /opt/app/run.sh + +# Run Django’s server & workers +CMD ["sh", "-c", "/opt/app/initialize.sh && /opt/app/run.sh"] diff --git a/app_urls/README.md b/app_urls/README.md index c9d6b39..70f5486 100644 --- a/app_urls/README.md +++ b/app_urls/README.md @@ -2,18 +2,9 @@ ``` conda create -n matitos_urls python=3.12 conda activate matitos_urls -# Core -pip install django==5.1 psycopg[binary] django-redis django-tasks-scheduler -# Fetcher -pip install feedparser python-dateutil newspaper4k[all] lxml[html_clean] googlenewsdecoder gnews duckduckgo_search GoogleNews langdetect -# News visualization -pip install ollama +pip install -r requirements.txt ``` -* Database - * Database initialization -> 1-DB.ipynb - - * From automated inspectdb ``` # 1) Inspect DB, generate models.py @@ -74,60 +65,19 @@ class Meta: db_table = 'urls' # db_table = '{}_urls'.format(project_name) ``` +* Database & initialization + * Check initialize.sh on Dockerfile + * Environment variables -``` -# Database -DB_NAME=${DB_NAME:-matitos} -DB_USER=${DB_NAME:-supermatitos} -DB_PASSWORD=${DB_NAME:-supermatitos} -DB_HOST=${DB_NAME:-localhost} -DB_PORT=${DB_NAME:-5432} -REDIS_HOST=${REDIS_HOST:-localhost} -REDIS_PORT=${REDIS_PORT:-6379} - -# Job timeout: 30 min -JOB_DEFAULT_TIMEOUT=${RQ_DEFAULT_TIMEOUT:-1800} - -# Logs path -PATH_LOGS_PARAMETERIZATION="logs/log_app_fetcher_{}.log" - -# Fetcher -FETCHER_GNEWS_DECODE_SLEEP=2 -FETCHER_GOOGLE_GENERAL_PAGE_ITER_SLEEP=4 -FETCHER_BETWEEN_SEARCHES_SLEEP=5 -FETCHER_URL_HOST_SLEEP=5 -FETCHER_LANGUAGE_DETECTION_MIN_CHAR=100 - -SELENIUM_ENDPOINT="http://selenium_app:80" -``` + * In docker-compose.yml * Deploy ``` -# Migrations -python manage.py makemigrations api; python manage.py migrate --fake-initial -# Create user -python manage.py createsuperuser +# Check environments variables on docker-compose.yml -# 1) Server -python manage.py runserver +# Remove previous instances +docker compose down -v -# 2) Workers -python manage.py rqworker high default low - -# Visualize DB -http://localhost:8080/?pgsql=matitos_db&username=supermatitos&db=matitos&ns=public&select=urls&order%5B0%5D=id +# Build & up +docker compose up -d --build ``` - -* Scheduled tasks -``` -# Import tasks -python manage.py import --filename scheduled_tasks.json - -# Modify using the admin panel, then save -# python manage.py export > scheduled_tasks.json -``` - -* Utils. TODO: To endpoint... -``` -python manage.py rqstats -``` \ No newline at end of file diff --git a/app_urls/api/templates/charts.html b/app_urls/api/templates/charts.html deleted file mode 100644 index 9cffde6..0000000 --- a/app_urls/api/templates/charts.html +++ /dev/null @@ -1,295 +0,0 @@ - - -
- - -