diff --git a/app_urls/fetcher/src/llm.py b/app_urls/fetcher/src/llm.py index 33b4689..8107b31 100644 --- a/app_urls/fetcher/src/llm.py +++ b/app_urls/fetcher/src/llm.py @@ -29,7 +29,7 @@ class OllamaClient(): return [] def get_prompt(self, content): - return "Provide, in one sentence each, the who, what, when, where, why, and a detailed summary of the content below:\n\n{}".format(content) + return "Provide, in one sentence each, the what, why, who, when, where, and a detailed summary of the content below:\n\n{}".format(content) return "First, provide a detailed summary of the content below in one paragraph. Second, specify in one sentence each the who, what, when, where and why of the story. Do not mention or reference the original text, its source, or any phrases like 'According to' or 'The text states':\n\n{}".format(content) return "First, provide a summary of the content below in one paragraph. Second, specify the Who, What, When, Where and Why of the story:\n\n{}".format(content) # First, provide a summary of the content below in one paragraph. Second, specify the who, what, when, where and why of the story in one sentence each. Do not mention or reference the original text, its source, or any phrases like 'According to' or 'The text states': diff --git a/app_urls/fetcher/src/publisher.py b/app_urls/fetcher/src/publisher.py index d64923c..7934f02 100644 --- a/app_urls/fetcher/src/publisher.py +++ b/app_urls/fetcher/src/publisher.py @@ -140,9 +140,9 @@ class Publisher(): summary, five_w = "", "" for k, v in generated_content_dict.items(): if ("summary" in k.lower()): - summary = v + summary = v if type(v) is str else "\n".join(summary) else: - five_w += "{}: {}\n".format(k.capitalize(), v) + five_w += "{}: {}\n".format(k.capitalize(), v if type(v) is str else ". ".join(v) ) # Aggregate generated content generated_content = "{}\n\n{}".format(summary, five_w)