-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
42 lines (32 loc) · 950 Bytes
/
main.py
File metadata and controls
42 lines (32 loc) · 950 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#{
# Using VirtualchatbotTest memory
#(venv) python3 -m venv chatbotTest
#(venv) source chatbotTest/bin/activate
#(chatbotTest) (venv)
#}
from langchain_ollama import OllamaLLM
from langchain_core.prompts import ChatPromptTemplate
template="""
Answer the question below, use less than 200 words.
Here is the conversation history: {history}
Question: {question}
Answer:
"""
model = OllamaLLM(model="ollamaPirate")
prompt = ChatPromptTemplate.from_template(template)
chain = prompt | model
def handle_conv():
history=""
print("Welcome to pirateChat, type exit to quit.")
while True:
userInput = input("Input: ")
if userInput.lower() =="exit":
break
result = chain.invoke({
"history": history,
"question": userInput
})
print("Pirate: ", result)
history = history + f"\nUser: {userInput}\nAI: {result}"
if __name__ == "__main__":
handle_conv()