Skip to content

Instantly share code, notes, and snippets.

@KrishnanSriram
Created November 24, 2025 21:31
Show Gist options
  • Select an option

  • Save KrishnanSriram/4a5130fc9dd9eb111dc9c23b7fa8887a to your computer and use it in GitHub Desktop.

Select an option

Save KrishnanSriram/4a5130fc9dd9eb111dc9c23b7fa8887a to your computer and use it in GitHub Desktop.
Langchain example with context
from langchain_core.messages import HumanMessage, SystemMessage, BaseMessage, AIMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.output_parsers import StrOutputParser
from langchain_ollama import ChatOllama
from langchain_core.chat_history import InMemoryChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
from typing import List
import uuid
def new_session_id() -> str:
return str(uuid.uuid4())
# Initialize Ollama LLM
# Make sure Ollama is running locally with: ollama serve
# And you have a model downloaded, e.g.: ollama pull llama3.2
def get_llm() -> ChatOllama:
llm = ChatOllama(
model="llama3.2", # or "llama2", "mistral", "phi3", etc.
temperature=0.1,
base_url="http://localhost:11434")
return llm
# Store for conversation histories (in-memory)
conversation_store = {}
def get_session_history(session_id: str) -> InMemoryChatMessageHistory:
"""Retrieve or create a chat history for a given session."""
if session_id not in conversation_store:
conversation_store[session_id] = InMemoryChatMessageHistory()
return conversation_store[session_id]
def chat_with_memory(user_input: str, session_id: str = "default") -> str:
# Create a prompt template with memory placeholder
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful AI assistant. You help technologists on their coding challenges with simple and concise examples."),
MessagesPlaceholder(variable_name="history"),
("human", "{input}")
])
# Create the chain
chain = prompt | get_llm() | StrOutputParser()
# Wrap the chain with message history
chain_with_history = RunnableWithMessageHistory(
chain,
get_session_history,
input_messages_key="input",
history_messages_key="history"
)
response = chain_with_history.invoke(
{"input": user_input},
config={"configurable": {"session_id": session_id}}
)
return response
def clear_history(session_id: str):
"""Clear conversation history for a session."""
if session_id in conversation_store:
conversation_store[session_id].clear()
print(f"History cleared for session: {session_id}")
def view_history(session_id: str):
"""View the conversation history for a session."""
if session_id in conversation_store:
history = conversation_store[session_id]
print(f"\n--- Conversation History for {session_id} ---")
for i, msg in enumerate(history.messages, 1):
role = "User" if isinstance(msg, HumanMessage) else "AI"
print(f"{i}. {role}: {msg.content[:100]}...")
else:
print(f"No history found for session: {session_id}")
def main():
print("Ollama Chat with Memory - Example")
print("=" * 50)
session_id = new_session_id()
response1 = chat_with_memory(
"What's the best programming language to write iOS applications?",
session_id
)
print(f"AI: {response1}\n")
print("+" * 50)
response2 = chat_with_memory("Can I use the same programming language to build Andriod applications?", session_id)
print(f"AI: {response2}\n")
print("+" * 50)
response3 = chat_with_memory("What if I want to add web development to this mix?", session_id)
print(f"AI: {response3}\n")
print("+" * 50)
print("=" * 50)
print("Chat History")
print("<>" * 50)
view_history(session_id)
print("<>" * 50)
print("\n" + "-" * 50)
print("Clearing history and starting fresh...")
clear_history(session_id)
# Example usage
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment