|
@@ -1,6 +1,6 @@
|
|
from langchain.llms import Ollama
|
|
from langchain.llms import Ollama
|
|
|
|
|
|
-input = input("What is your question?")
|
|
|
|
|
|
+input = input("What is your question?\n> ")
|
|
llm = Ollama(model="llama3.2")
|
|
llm = Ollama(model="llama3.2")
|
|
-res = llm.predict(input)
|
|
|
|
|
|
+res = llm.invoke(input)
|
|
print (res)
|
|
print (res)
|