Cognee transforms unstructured data into searchable AI memory using AI. Perfect for building intelligent applications with memory.

Installation

To install Cognee, run the following command in your terminal:
# Standard installation
pip install cognee

Basic Usage

Initialize Cognee

Ensure Python 3.9+ is installed before initializing Cognee.

Add Data

import cognee
import asyncio

async def main():
    # Add some text data
    text = """
    Artificial Intelligence is transforming healthcare through machine learning 
    algorithms that analyze medical images and predict patient outcomes. 
    Deep learning models excel at pattern recognition in radiology.
    """
    
    result = await cognee.add(text)
    print("Data added:", result)

asyncio.run(main())

Process into Knowledge Graph

Transform your raw data into a structured knowledge graph:
import cognee
import asyncio

async def main():
    # Process all added data
    result = await cognee.cognify()
    print("Knowledge graph created:", result)

asyncio.run(main())

Search Your Knowledge Graph

import cognee
import asyncio
from cognee import SearchType

async def main():
    # Search with natural language
    results = await cognee.search(
        query_text="What is artificial intelligence?",
        query_type=SearchType.GRAPH_COMPLETION
    )
    
    print(results[0])

asyncio.run(main())

Memory Management

import cognee
import asyncio

async def main():
    # Add new information
    new_info = "AI also helps in drug discovery and personalized medicine."
    result = await cognee.add(new_info)
    print("New data added:", result)
    
    # Incremental processing (only new data)
    cognify_result = await cognee.cognify()
    print("Processing result:", cognify_result)
    
    print("Memory updated successfully!")

asyncio.run(main())

Delete Memory

import cognee
import asyncio

async def main():
    # Delete specific data by ID
    delete_result = await cognee.delete(data_id="abc123-def456-ghi789")
    print("Delete result:", delete_result)
    
    # Or clear all memory
    # prune_result = await cognee.prune()
    # print("Prune result:", prune_result)
    
    print("Data deleted successfully!")

asyncio.run(main())

Configuration Options

Cognee offers extensive configuration options to customize its behavior:
import os

# OpenAI (default)
os.environ["LLM_API_KEY"] = "your-openai-key"

# Azure OpenAI  
os.environ["LLM_PROVIDER"] = "azure_openai"
os.environ["LLM_MODEL"] = "azure/gpt-4o"
os.environ["AZURE_OPENAI_ENDPOINT"] = "your-endpoint"

# Anthropic Claude
os.environ["LLM_PROVIDER"] = "anthropic" 
os.environ["LLM_API_KEY"] = "your-anthropic-key"

# Local Ollama
os.environ["LLM_PROVIDER"] = "ollama"
os.environ["LLM_MODEL"] = "llama3.2"

Complete Example

Here’s a full working example that demonstrates the entire workflow:
import cognee
import asyncio
import os
from cognee import SearchType

async def main():
    # 1. Setup (add your API key)
    os.environ["LLM_API_KEY"] = "your-openai-api-key"
    
    # 2. Add knowledge
    knowledge_base = """
    Cognee is an AI memory layer that transforms unstructured data into 
    knowledge graphs. It uses large language models to extract entities 
    and relationships, creating searchable semantic networks.
    
    Key features include:
    - Automatic entity extraction
    - Relationship mapping  
    - Multi-modal data support
    - Natural language querying
    - Incremental learning
    """
    
    print("📚 Adding knowledge...")
    await cognee.add(knowledge_base)
    
    # 3. Process into knowledge graph
    print("🧠 Building knowledge graph...")
    await cognee.cognify()
    
    # 4. Query the knowledge
    print("🔍 Searching knowledge...")
    
    # Natural language question
    results = await cognee.search(
        query_text="What are the key features of Cognee?",
        query_type=SearchType.GRAPH_COMPLETION
    )
    
    print("\n✨ Answer:")
    print(results[0])
    
    # Get related concepts
    insights = await cognee.search(
        query_text="Cognee features",
        query_type=SearchType.INSIGHTS
    )
    
    print("\n🔗 Related Concepts:")
    for insight in insights[:3]:
        print(f"- {insight}")

if __name__ == "__main__":
    asyncio.run(main())

Advanced Memory Organization

Cognee supports organizing memories with context parameters:
import cognee
import asyncio

async def main():
    # Add memories with context
    result1 = await cognee.add(
        "User prefers technical documentation",
        user_id="alice",
        node_set="onboarding-001"
    )
    print("Added memory 1:", result1)
    
    result2 = await cognee.add(
        "User is interested in machine learning",
        user_id="alice", 
        node_set="onboarding-001"
    )
    print("Added memory 2:", result2)
    
    # Process with context
    cognify_result = await cognee.cognify()
    print("Processing result:", cognify_result)
    
    # Search with different scopes
    user_memories = await cognee.search(
        query_text="What do you know about the user?",
        user_id="alice"
    )
    
    node_set_memories = await cognee.search(
        query_text="What happened in this session?",
        user_id="alice",
        node_set="onboarding-001"
    )
    
    print("User memories:", len(user_memories))
    print("Node Set memories:", len(node_set_memories))
    print("Sample user memory:", user_memories[0][:100] + "...")

asyncio.run(main())

Search Types Explained

GRAPH_COMPLETION

AI-Powered ResponsesReturns natural language answers using your knowledge graph as context. Best for Q&A applications.

CHUNKS

Raw Text SegmentsReturns relevant text chunks from your data. Perfect for finding specific information or citations.

INSIGHTS

Structured RelationshipsReturns entity relationships and connections. Ideal for understanding knowledge structure.

SUMMARIES

Hierarchical SummariesReturns pre-generated summaries at different levels of detail. Great for overviews.

Run Cognee Locally

For complete local setup without external dependencies:
import cognee
import os
import asyncio

async def main():
    # Fully local configuration
    os.environ["LLM_PROVIDER"] = "ollama"
    os.environ["LLM_MODEL"] = "llama3.2"
    os.environ["EMBEDDING_PROVIDER"] = "fastembed"
    os.environ["EMBEDDING_MODEL"] = "sentence-transformers/all-MiniLM-L6-v2"
    
    print("Setting up local processing...")
    
    # Your code here
    add_result = await cognee.add("Local knowledge processing with Cognee")
    print("Data added:", add_result)
    
    cognify_result = await cognee.cognify()
    print("Processing complete:", cognify_result)
    
    results = await cognee.search("Tell me about local processing")
    print("Search result:", results[0])

asyncio.run(main())

Next Steps

Ready to build something amazing? Check out our examples for more advanced tutorials, or join our Discord community to get help and share your projects!