Lab for Ilmu dasar LLM dengan Langchain
In [5]:
Copied!
import json
import json
In [6]:
Copied!
from langchain_ollama.llms import OllamaLLM
ollamaModel = "llama3.2:latest"
model = OllamaLLM(model=ollamaModel)
from langchain_ollama.llms import OllamaLLM
ollamaModel = "llama3.2:latest"
model = OllamaLLM(model=ollamaModel)
In [7]:
Copied!
return_value = model.invoke("Assalamualaikum")
print(return_value)
return_value = model.invoke("Assalamualaikum")
print(return_value)
Wa alaykums salam. How can I assist you today?
Konversasi menggunakan chat model¶
In [8]:
Copied!
from langchain_core.messages import HumanMessage, AIMessage
from langchain_ollama.chat_models import ChatOllama
chat_model = ChatOllama(model=ollamaModel)
from langchain_core.messages import HumanMessage, AIMessage
from langchain_ollama.chat_models import ChatOllama
chat_model = ChatOllama(model=ollamaModel)
In [9]:
Copied!
human_message = HumanMessage("Nama saya farras")
return_value_chage_model = chat_model.invoke([human_message])
print(type(return_value_chage_model))
print(json.dumps(return_value_chage_model.model_dump(), indent=4))
human_message = HumanMessage("Nama saya farras")
return_value_chage_model = chat_model.invoke([human_message])
print(type(return_value_chage_model))
print(json.dumps(return_value_chage_model.model_dump(), indent=4))
<class 'langchain_core.messages.ai.AIMessage'>
{
"content": "Halo Farras! Senang bertemu denganmu. Bagaimana hari ini?",
"additional_kwargs": {},
"response_metadata": {
"model": "llama3.2:latest",
"created_at": "2025-10-13T14:52:18.3048132Z",
"done": true,
"done_reason": "stop",
"total_duration": 575399700,
"load_duration": 270851100,
"prompt_eval_count": 30,
"prompt_eval_duration": 35991400,
"eval_count": 19,
"eval_duration": 230276900,
"model_name": "llama3.2:latest"
},
"type": "ai",
"name": null,
"id": "run--e6d1b0df-7b89-4721-a51a-353a45bacfd3-0",
"example": false,
"tool_calls": [],
"invalid_tool_calls": [],
"usage_metadata": {
"input_tokens": 30,
"output_tokens": 19,
"total_tokens": 49
}
}
Build simple class to using chat models¶
In [10]:
Copied!
from ChatModelsSimplifier import MyLLMChatModel
my_simple_model = MyLLMChatModel("llama3.2:latest")
from ChatModelsSimplifier import MyLLMChatModel
my_simple_model = MyLLMChatModel("llama3.2:latest")
In [11]:
Copied!
my_simple_model.chat_models("Halo nama saya muhammad farras ma'ruf")
my_simple_model.chat_models("Halo nama saya muhammad farras ma'ruf")
Selamat datang Muhammad Farras Ma'ruf XXX
In [12]:
Copied!
my_simple_model.chat_models("Saya punya tiga anak, 2 laki-laku, dan satu perempuan")
my_simple_model.chat_models("Saya punya tiga anak, 2 laki-laku, dan satu perempuan")
Terima kasih atas informasinya, Muhammad Farras Ma'ruf XXX
In [13]:
Copied!
my_simple_model.chat_models("Siapa nama saya ?")
my_simple_model.chat_models("Siapa nama saya ?")
Nama Anda adalah Muhammad Farras Ma'ruf XXX
In [14]:
Copied!
print(my_simple_model.list_of_chats)
print(my_simple_model.list_of_chats)
[SystemMessage(content='You are assitant named NoSys, you always answert the question with sufix three XXX.', additional_kwargs={}, response_metadata={}), HumanMessage(content="Halo nama saya muhammad farras ma'ruf", additional_kwargs={}, response_metadata={}), AIMessage(content="Selamat datang Muhammad Farras Ma'ruf XXX", additional_kwargs={}, response_metadata={'model': 'llama3.2:latest', 'created_at': '2025-10-13T14:52:19.1976237Z', 'done': True, 'done_reason': 'stop', 'total_duration': 490199400, 'load_duration': 238550500, 'prompt_eval_count': 58, 'prompt_eval_duration': 21505000, 'eval_count': 13, 'eval_duration': 207168400, 'model_name': 'llama3.2:latest'}, id='run--caa341c6-ae8f-4bcf-89d0-fdf501b1cf46-0', usage_metadata={'input_tokens': 58, 'output_tokens': 13, 'total_tokens': 71}), HumanMessage(content='Saya punya tiga anak, 2 laki-laku, dan satu perempuan', additional_kwargs={}, response_metadata={}), AIMessage(content="Terima kasih atas informasinya, Muhammad Farras Ma'ruf XXX", additional_kwargs={}, response_metadata={'model': 'llama3.2:latest', 'created_at': '2025-10-13T14:52:19.9664717Z', 'done': True, 'done_reason': 'stop', 'total_duration': 760525300, 'load_duration': 410881300, 'prompt_eval_count': 99, 'prompt_eval_duration': 27383700, 'eval_count': 17, 'eval_duration': 277198600, 'model_name': 'llama3.2:latest'}, id='run--9655ac52-329b-4c99-919c-979544610714-0', usage_metadata={'input_tokens': 99, 'output_tokens': 17, 'total_tokens': 116}), HumanMessage(content='Siapa nama saya ?', additional_kwargs={}, response_metadata={}), AIMessage(content="Nama Anda adalah Muhammad Farras Ma'ruf XXX", additional_kwargs={}, response_metadata={'model': 'llama3.2:latest', 'created_at': '2025-10-13T14:52:20.4393638Z', 'done': True, 'done_reason': 'stop', 'total_duration': 466768400, 'load_duration': 223380100, 'prompt_eval_count': 130, 'prompt_eval_duration': 33110500, 'eval_count': 12, 'eval_duration': 183154500, 'model_name': 'llama3.2:latest'}, id='run--c1551cac-c285-4e99-ab2d-7e94ff8600d0-0', usage_metadata={'input_tokens': 130, 'output_tokens': 12, 'total_tokens': 142})]
Membuat MML yang dapat digunakan kembalim¶
In [ ]:
Copied!