-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsingleLLM.py
More file actions
65 lines (33 loc) · 1.1 KB
/
singleLLM.py
File metadata and controls
65 lines (33 loc) · 1.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# {
# Using VirtualchatbotTest memory
# (venv) python3 -m venv chatbotTest
# (venv) source chatbotTest/bin/activate
# (chatbotTest) (venv)
# }
import json
from langchain_core.prompts import ChatPromptTemplate
from langchain_ollama import ChatOllama
from langchain_core.output_parsers import StrOutputParser
templateChat="""
You are given a sentence in Hebrew.
The text may contain incorrect syntax.
Return the correct way to say the same sentence.
Here is the text:
{text_no_nikud}
Answer:
"""
modelLlama3_8_Normal = ChatOllama(model="llama3.1:8b")
## Notice - OllamaLLM is !NOT! ChatOllama (used in the start)
promptChat = ChatPromptTemplate.from_template(templateChat)
chatAI = promptChat | modelLlama3_8_Normal | StrOutputParser()
def handle_conv():
history=""
print("Welcome to Chat, type exit to quit.")
while True:
userInput = input("Input: ")
if userInput.lower() =="exit":
break
result = chatAI.invoke({"text_no_nikud":userInput})
print("AI: ", result)
history = history + f"\nUser: {userInput}\nAI: {result}"
handle_conv()