Better URL visualization

This commit is contained in:
Luciano Gervasoni
2025-03-26 17:35:09 +01:00
parent e1f4787119
commit 8dce5206af
3 changed files with 237 additions and 79 deletions

View File

@@ -109,7 +109,7 @@ class OllamaClient():
self.client = ollama.Client(host=os.getenv("ENDPOINT_OLLAMA", "https://ollamamodel.matitos.org"))
def _get_default_model(self):
return "gemma3:1b"
return "llama3.2:3b"
def get_models(self):
models = sorted([m.model for m in self.client.list().models])
@@ -271,6 +271,8 @@ def logs(request):
####################################################################################################
from django.shortcuts import render
from .models import Urls, Search, Source
from django.db.models import Q
from django.utils.timezone import now, timedelta
def filtered_urls(request):
statuses = Urls.STATUS_ENUM.choices
@@ -278,32 +280,51 @@ def filtered_urls(request):
sources = Source.objects.all()
# Check if filters are applied; if not, select all by default
if not request.GET:
selected_status = [str(status[0]) for status in statuses]
selected_search = [str(search.id) for search in searches]
selected_source = [str(source.id) for source in sources]
else:
selected_status = request.GET.getlist('status')
selected_search = request.GET.getlist('search')
selected_source = request.GET.getlist('source')
selected_status = request.GET.getlist('status', [str(status[0]) for status in statuses])
selected_search = request.GET.getlist('search', [str(search.id) for search in searches])
selected_source = request.GET.getlist('source', [str(source.id) for source in sources])
selected_days = int(request.GET.get("selected_days", 30))
print(selected_days)
# Filter URLs based on selected filters
urls = Urls.objects.all()
if selected_status:
urls = urls.filter(status__in=selected_status)
if selected_search:
urls = urls.filter(urlssourcesearch__id_search__in=selected_search)
if selected_source:
urls = urls.filter(urlssourcesearch__id_source__in=selected_source)
urls = Urls.objects.filter(
Q(urlssourcesearch__id_source__in=selected_source) &
Q(urlssourcesearch__id_search__in=selected_search) &
Q(status__in=selected_status) &
Q(ts_fetch__gte=now() - timedelta(days=selected_days))
).distinct() # .order_by('-ts_fetch')
# Custom replace search type
for s in searches:
s.type = s.type.replace("rss_feed", "rss").replace("url_host", "url").replace("keyword_search", "keyword")
# Pagination
per_page = request.GET.get('per_page', 25) # Default is 50 URLs per page
paginator = Paginator(urls, per_page) # Paginate the filtered URLs
page_number = request.GET.get('page') # Get the current page number
page_obj = paginator.get_page(page_number) # Get the current page object
# Map URL IDs to their sources & searches, only for subset of URLs (page of interest)
sources_map = {
url.id: list(Source.objects.filter(urlssourcesearch__id_url=url).distinct()) for url in page_obj.object_list
}
searches_map = {
url.id: list(Search.objects.filter(urlssourcesearch__id_url=url).distinct()) for url in page_obj.object_list
}
context = {
'urls': urls,
'urls': page_obj, # Pass the paginated URLs
'per_page': per_page, # Send per_page value for dynamic pagination
'statuses': statuses,
'searches': searches,
'sources': sources,
'selected_status': selected_status,
'selected_search': selected_search,
'selected_source': selected_source,
"selected_days": selected_days,
"sources_map": sources_map,
"searches_map": searches_map,
}
return render(request, 'filtered_urls.html', context)