Wait db connection, login required, dev mode enable
This commit is contained in:
24
app_urls/fetcher/middleware/login_required.py
Normal file
24
app_urls/fetcher/middleware/login_required.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from django.shortcuts import redirect
|
||||
from django.conf import settings
|
||||
from django.urls import reverse
|
||||
|
||||
EXEMPT_URLS = [
|
||||
# reverse('login'), # or the name of your login view
|
||||
reverse('admin:login'),
|
||||
reverse('admin:index'),
|
||||
# reverse('logout'), # optional
|
||||
'/admin/', # allow full access to admin
|
||||
settings.STATIC_URL, # allow static files
|
||||
# path('scheduler/', include('scheduler.urls')),
|
||||
]
|
||||
|
||||
class LoginRequiredMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
if not request.user.is_authenticated:
|
||||
path = request.path
|
||||
if not any(path.startswith(url) for url in EXEMPT_URLS):
|
||||
return redirect(settings.LOGIN_URL)
|
||||
return self.get_response(request)
|
||||
@@ -4,7 +4,7 @@ from django.shortcuts import render, get_object_or_404
|
||||
from django.http import StreamingHttpResponse, JsonResponse, HttpResponse
|
||||
from django.contrib.auth.decorators import login_required
|
||||
import ollama
|
||||
from .models import Urls, Source, Search, UrlContent, UrlsSourceSearch
|
||||
from .models import Urls, Source, Search, UrlContent, UrlsSourceSearch, UrlsDuplicate
|
||||
import os
|
||||
|
||||
####################################################################################################
|
||||
@@ -37,7 +37,6 @@ def link_list(request):
|
||||
return JsonResponse({"links": list_links })
|
||||
|
||||
####################################################################################################
|
||||
# @login_required(login_url='/admin')
|
||||
def logs(request, log_type):
|
||||
# Capture output: python manage.py rqstats
|
||||
try:
|
||||
@@ -71,25 +70,20 @@ class OllamaClient():
|
||||
# return "Imagine you are a journalist, TLDR in a paragraph. Only answer with the summary:"
|
||||
#return "Below you will find the whole content of a news article:\n{}\nProvide a concise summary of one paragraph maximum of the content.".format(content)
|
||||
|
||||
# TODO: move to ollamajs...
|
||||
|
||||
def fetch_details(request, id):
|
||||
url_item = get_object_or_404(Urls, id=id)
|
||||
url_param = request.GET.get("url", "") # Get URL
|
||||
model = request.GET.get("model", "") # Get LLM model
|
||||
# TODO: post with body
|
||||
text = request.GET.get("text", "") # Get LLM prompt
|
||||
|
||||
# print(request)
|
||||
# print(text)
|
||||
|
||||
# LLM
|
||||
ollama = OllamaClient()
|
||||
|
||||
def stream_response():
|
||||
msg_content = {
|
||||
"role": "user",
|
||||
"content": text,
|
||||
}
|
||||
response = ollama.client.chat(model=model, messages=[msg_content], stream=True)
|
||||
response = OllamaClient().client.chat(model=model, messages=[msg_content], stream=True)
|
||||
for chunk in response:
|
||||
yield chunk["message"]["content"] # Stream each chunk of text
|
||||
|
||||
@@ -102,6 +96,12 @@ def url_detail_view(request, id):
|
||||
url_searches = list(Search.objects.filter(urlssourcesearch__id_url=url_item).distinct())
|
||||
# url_source_search = UrlsSourceSearch.objects.filter(id_url=url_item)
|
||||
|
||||
url_duplicate = UrlsDuplicate.objects.get(id_url_duplicated=url_item)
|
||||
#id_url_canonical = models.OneToOneField(Urls, models.DO_NOTHING, db_column='id_url_canonical', primary_key=True) # The composite primary key (id_url_canonical, id_url_duplicated) found, that is not supported. The first column is selected.
|
||||
#id_url_duplicated = models.ForeignKey(Urls, models.DO_NOTHING, db_column='id_url_duplicated', related_name='urlsduplicate_id_url_duplicated_set')
|
||||
|
||||
url_duplicate.id_url_duplicated
|
||||
|
||||
try:
|
||||
url_content = UrlContent.objects.get(pk=id)
|
||||
except UrlContent.DoesNotExist:
|
||||
@@ -222,9 +222,7 @@ def filtered_urls(request):
|
||||
statuses = Urls.STATUS_ENUM.choices
|
||||
searches = Search.objects.all()
|
||||
sources = Source.objects.all()
|
||||
# TODO: Cache languages, update once every N
|
||||
languages = list(UrlContent.objects.distinct('language').values_list('language', flat=True))
|
||||
# Null for visualization
|
||||
languages = list(UrlContent.objects.distinct('language').values_list('language', flat=True)) # TODO: Cache languages
|
||||
languages = ["Unknown"] + [l for l in languages if l is not None]
|
||||
valid_contents = ["True", "False", "Unknown"]
|
||||
|
||||
@@ -237,15 +235,7 @@ def filtered_urls(request):
|
||||
selected_days = request.GET.get("days", 30)
|
||||
per_page = request.GET.get('per_page', 100) # Default is X URLs per page
|
||||
page_number = request.GET.get('page') # Get the current page number
|
||||
|
||||
|
||||
all_status = [str(status[0]) for status in statuses]
|
||||
all_search = [str(search.id) for search in searches]
|
||||
all_source = [str(source.id) for source in sources]
|
||||
all_languages = languages
|
||||
all_valid_contents = valid_contents
|
||||
|
||||
|
||||
# Override with default filters? [Case: no params update on URL] -> Only on "Home" click, or "Next page"
|
||||
if (len(request.GET.keys()) == 0) or ((len(request.GET.keys()) == 1) and ("page" in request.GET.keys())):
|
||||
selected_status = ["all"]
|
||||
@@ -254,20 +244,22 @@ def filtered_urls(request):
|
||||
selected_language = ["all"]
|
||||
selected_valid_contents = ["all"]
|
||||
else:
|
||||
# All elements
|
||||
all_status = [str(status[0]) for status in statuses]
|
||||
all_search = [str(search.id) for search in searches]
|
||||
all_source = [str(source.id) for source in sources]
|
||||
all_languages = languages
|
||||
all_valid_contents = valid_contents
|
||||
|
||||
# Non-defult parameters, if list with all elements, replace with "all" and avoid heavy query
|
||||
if (set(selected_status) == set(all_status)):
|
||||
selected_status = ["all"]
|
||||
if (set(selected_search) == set(all_search)):
|
||||
selected_search = ["all"]
|
||||
if (set(selected_source) == set(all_source)):
|
||||
selected_source = ["all"]
|
||||
if (set(selected_language) == set(all_languages)):
|
||||
selected_language = ["all"]
|
||||
if (set(selected_valid_contents) == set(all_valid_contents)):
|
||||
selected_valid_contents = ["all"]
|
||||
selected_status = ["all"] if (set(selected_status) == set(all_status)) else selected_status
|
||||
selected_search = ["all"] if (set(selected_search) == set(all_search)) else selected_search
|
||||
selected_source = ["all"] if (set(selected_source) == set(all_source)) else selected_source
|
||||
selected_language = ["all"] if (set(selected_language) == set(all_languages)) else selected_language
|
||||
selected_valid_contents = ["all"] if (set(selected_valid_contents) == set(all_valid_contents)) else selected_valid_contents
|
||||
|
||||
# Filter URLs based on selected filters
|
||||
if ('null' in selected_status) or ('null' in selected_search) or ('null' in selected_source) or ('null' in selected_language) or ('null' in selected_valid_contents):
|
||||
if any( 'null' in l for l in [selected_status, selected_search, selected_source, selected_language, selected_valid_contents] ):
|
||||
urls = []
|
||||
else:
|
||||
# Filter by date
|
||||
@@ -308,7 +300,6 @@ def filtered_urls(request):
|
||||
|
||||
# Run query
|
||||
urls = Urls.objects.filter(query).distinct() # .order_by('-ts_fetch')
|
||||
# print(urls.query)
|
||||
|
||||
# Pagination
|
||||
paginator = Paginator(urls, per_page) # Paginate the filtered URLs
|
||||
|
||||
Reference in New Issue
Block a user