Update 'main.py'

main
Francesco Minnocci 10 months ago
parent d03a97ab58
commit 51100ae0f0

@ -18,19 +18,12 @@ LLM_EXAMPLE = (
"OUTPUT (JSON): \n" "OUTPUT (JSON): \n"
"{" "{"
'"title": "Statistical and Computational Aspects of Dynamics",' '"title": "Statistical and Computational Aspects of Dynamics",'
'"link": "http://www.crm.sns.it/event/507/", ' '"url": "http://www.crm.sns.it/event/507/", '
'"description": "Organized by Buddhima Kasun Fernando Akurugodage (Centro di ricerca matematica' '"description": "Organized by Buddhima Kasun Fernando Akurugodage (Centro di ricerca matematica'
" Ennio De Giorgi SNS), Paolo Giulietti, and Tanja Isabelle Schindler (Universität Wien," " Ennio De Giorgi SNS), Paolo Giulietti, and Tanja Isabelle Schindler (Universität Wien,"
' Austria). Centro De Giorgi", ' ' Austria). Centro De Giorgi - SNS, Pisa.", '
'"organizers": [' '"startDate": "2022-12-13", '
'"Buddhima Kasun Fernando Akurugodage", ' '"endDate": "2022-12-16"'
'"Paolo Giulietti", '
'"Tanja Isabelle Schindler"'
"]"
'"location": "SNS, Pisa", '
'"date": "December 13 16, 2022.", '
'"start_date": "2022-12-13", '
'"end_date": "2022-12-16"'
"}\n" "}\n"
"\n" "\n"
"INPUT:\n" "INPUT:\n"
@ -93,8 +86,6 @@ conference_html_snippets = [snippet for link in page_urls for snippet in crawl_p
print("LLM Example Context:") print("LLM Example Context:")
print(LLM_EXAMPLE) print(LLM_EXAMPLE)
exit(1)
# Load the model and, set the chat format and use the default model context length # Load the model and, set the chat format and use the default model context length
llm = Llama(model_path="./mistral-7b-instruct-v0.2.Q4_K_M.gguf", chat_format="llama-2", n_ctx=0) llm = Llama(model_path="./mistral-7b-instruct-v0.2.Q4_K_M.gguf", chat_format="llama-2", n_ctx=0)

Loading…
Cancel
Save