LLM refactor, NPU ollama based, publisher update json query to llm

This commit is contained in:
Luciano Gervasoni
2025-04-23 16:35:50 +02:00
parent 8ea3ec1bda
commit e5c574ba33
7 changed files with 123 additions and 34 deletions

View File

@@ -14,16 +14,6 @@ import json
####################################################################################################
def llm(request):
def stream_response(model, text):
msg_content = {
"role": "user",
"content": text,
}
response = OllamaClient().client.chat(model=model, messages=[msg_content], stream=True)
for chunk in response:
yield chunk["message"]["content"] # Stream each chunk of text
if request.method == 'POST':
try:
body_data = json.loads(request.body)
@@ -33,7 +23,7 @@ def llm(request):
if message is None:
return JsonResponse({'error': 'No message found in request'}, status=400)
return StreamingHttpResponse(stream_response(model, message), content_type="text/plain")
return StreamingHttpResponse(OllamaClient().generate_stream(model, message), content_type="text/plain")
except json.JSONDecodeError:
return JsonResponse({'error': 'Invalid JSON'}, status=400)
@@ -55,13 +45,15 @@ def url_detail_view(request, id):
url_content = {}
ollama = OllamaClient()
# prompt_content = "{}\n{}\n{}".format(url_content.title, url_content.description, url_content.content)
prompt_content = "{}".format(url_content.content)
context = {
'url_item': url_item,
'sources': url_sources,
'searches': url_searches,
'models': ollama.get_models(),
'prompt': ollama.get_prompt(),
'prompt': ollama.get_prompt(prompt_content),
'url_content': url_content,
'url_canonical': url_canonical,
}