Skip to content

Instantly share code, notes, and snippets.

@makkes
Created October 16, 2025 13:57
Show Gist options
  • Select an option

  • Save makkes/dbefa04092da94632895a27619b32c14 to your computer and use it in GitHub Desktop.

Select an option

Save makkes/dbefa04092da94632895a27619b32c14 to your computer and use it in GitHub Desktop.
LlamaIndex
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex, Settings, StorageContext, load_index_from_storage
from llama_index.embeddings.ollama import OllamaEmbedding
from llama_index.llms.ollama import Ollama
from llama_index.vector_stores.chroma import ChromaVectorStore
import chromadb, os
DOC_DIR = "docs"
PERSIST_DIR = ".li_storage"
Settings.embed_model = OllamaEmbedding(model_name="nomic-embed-text") # local
Settings.llm = Ollama(model="llama3.1:8b", request_timeout=120.0)
def build_or_load():
if os.path.exists(PERSIST_DIR):
storage = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
return load_index_from_storage(storage)
docs = SimpleDirectoryReader(DOC_DIR, recursive=True, required_exts=[".md"]).load_data()
client = chromadb.PersistentClient(path=".chroma_li")
vs = ChromaVectorStore(chroma_collection=client.get_or_create_collection("md_rag_li"))
index = VectorStoreIndex.from_documents(docs, vector_store=vs)
index.storage_context.persist(persist_dir=PERSIST_DIR)
return index
if __name__ == "__main__":
index = build_or_load()
engine = index.as_query_engine(similarity_top_k=4)
print("Ready. Type your question (or 'exit').")
while True:
q = input("> ")
if q.strip().lower() in {"exit","quit"}:
break
print("\n" + engine.query(q).response + "\n")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment