3 LLM structured output
In [1]:
Copied!
from langchain_ollama import ChatOllama
from StructuredOutputClass import JawabanTerstruktur
import json
from langchain_ollama import ChatOllama
from StructuredOutputClass import JawabanTerstruktur
import json
In [2]:
Copied!
model = ChatOllama(model='llama3.2:latest' , temperature=0.3).with_structured_output(JawabanTerstruktur)
model = ChatOllama(model='llama3.2:latest' , temperature=0.3).with_structured_output(JawabanTerstruktur)
In [3]:
Copied!
jawaban = model.invoke("Which animal has a longest neck")
print(json.dumps(jawaban.__dict__, indent=5))
jawaban = model.invoke("Which animal has a longest neck")
print(json.dumps(jawaban.__dict__, indent=5))
{
"answer": "Giraffe",
"justification": "The giraffe is known for its exceptionally long neck, which can reach up to 6 feet (1.8 meters) in length."
}