Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions mirix/database/redis_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ class RedisMemoryClient:
PROCEDURAL_PREFIX = "procedural:"
RESOURCE_PREFIX = "resource:"
KNOWLEDGE_PREFIX = "knowledge:"
RAW_MEMORY_PREFIX = "raw_memory:"
ORGANIZATION_PREFIX = "org:"
USER_PREFIX = "user:"
CLIENT_PREFIX = "client:"
Expand Down
106 changes: 106 additions & 0 deletions mirix/jobs/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
"""
Cleanup job for raw memories with 14-day TTL.

This job should be run nightly via cron or Celery beat to delete
raw memories older than 14 days (based on updated_at timestamp).
"""
import logging
from datetime import UTC, datetime, timedelta
from typing import Dict

from mirix.schemas.client import Client as PydanticClient
from mirix.services.raw_memory_manager import RawMemoryManager

logger = logging.getLogger(__name__)


def delete_stale_raw_memories(days_threshold: int = 14) -> Dict:
"""
Hard delete raw memories older than the specified threshold (based on updated_at).

This job should be run nightly via cron or Celery beat.

Args:
days_threshold: Number of days after which memories are considered stale (default: 14)

Returns:
Dict with deletion statistics
"""
cutoff = datetime.now(UTC) - timedelta(days=days_threshold)

logger.info(
"Starting cleanup of raw memories older than %s (cutoff: %s)",
f"{days_threshold} days",
cutoff.isoformat(),
)

manager = RawMemoryManager()
deleted_count = 0
error_count = 0

# Query memories older than cutoff and delete them
with manager.session_maker() as session:
from sqlalchemy import select

from mirix.orm.raw_memory import RawMemory

# Query stale memories
stmt = select(RawMemory).where(RawMemory.updated_at < cutoff)
result = session.execute(stmt)
stale_memories = result.scalars().all()

logger.info(
f"Found {len(stale_memories)} stale raw memories to delete"
)

# Create system actor for deletion
# Note: This bypasses organization-level access control for cleanup
system_actor = PydanticClient(
id="system-cleanup-job",
organization_id="system",
name="Cleanup Job",
)

for memory in stale_memories:
try:
manager.delete_raw_memory(memory.id, system_actor)
deleted_count += 1
except Exception as e:
logger.error(
f"Failed to delete raw memory {memory.id}: {e}"
)
error_count += 1

result = {
"success": True,
"deleted_count": deleted_count,
"error_count": error_count,
"cutoff_date": cutoff.isoformat(),
"days_threshold": days_threshold,
}

logger.info(
"Cleanup completed: deleted %d memories, %d errors",
deleted_count,
error_count,
)

return result


if __name__ == "__main__":
# Allow running directly from command line
import sys

# Set up basic logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)

# Get threshold from command line args if provided
threshold = int(sys.argv[1]) if len(sys.argv) > 1 else 14

print(f"Running raw memory cleanup with {threshold}-day threshold...")
result = delete_stale_raw_memories(threshold)
print(f"Cleanup result: {result}")
106 changes: 106 additions & 0 deletions mirix/jobs/cleanup_raw_memories.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
"""
Cleanup job for raw memories with 14-day TTL.

This job should be run nightly via cron or Celery beat to delete
raw memories older than 14 days (based on updated_at timestamp).
"""
import logging
from datetime import UTC, datetime, timedelta
from typing import Dict

from mirix.schemas.client import Client as PydanticClient
from mirix.services.raw_memory_manager import RawMemoryManager

logger = logging.getLogger(__name__)


def delete_stale_raw_memories(days_threshold: int = 14) -> Dict:
"""
Hard delete raw memories older than the specified threshold (based on updated_at).

This job should be run nightly via cron or Celery beat.

Args:
days_threshold: Number of days after which memories are considered stale (default: 14)

Returns:
Dict with deletion statistics
"""
cutoff = datetime.now(UTC) - timedelta(days=days_threshold)

logger.info(
"Starting cleanup of raw memories older than %s (cutoff: %s)",
f"{days_threshold} days",
cutoff.isoformat(),
)

manager = RawMemoryManager()
deleted_count = 0
error_count = 0

# Query memories older than cutoff and delete them
with manager.session_maker() as session:
from sqlalchemy import select

from mirix.orm.raw_memory import RawMemory

# Query stale memories
stmt = select(RawMemory).where(RawMemory.updated_at < cutoff)
result = session.execute(stmt)
stale_memories = result.scalars().all()

logger.info(
f"Found {len(stale_memories)} stale raw memories to delete"
)

# Create system actor for deletion
# Note: This bypasses organization-level access control for cleanup
system_actor = PydanticClient(
id="system-cleanup-job",
organization_id="system",
name="Cleanup Job",
)

for memory in stale_memories:
try:
manager.delete_raw_memory(memory.id, system_actor)
deleted_count += 1
except Exception as e:
logger.error(
f"Failed to delete raw memory {memory.id}: {e}"
)
error_count += 1

result = {
"success": True,
"deleted_count": deleted_count,
"error_count": error_count,
"cutoff_date": cutoff.isoformat(),
"days_threshold": days_threshold,
}

logger.info(
"Cleanup completed: deleted %d memories, %d errors",
deleted_count,
error_count,
)

return result


if __name__ == "__main__":
# Allow running directly from command line
import sys

# Set up basic logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)

# Get threshold from command line args if provided
threshold = int(sys.argv[1]) if len(sys.argv) > 1 else 14

print(f"Running raw memory cleanup with {threshold}-day threshold...")
result = delete_stale_raw_memories(threshold)
print(f"Cleanup result: {result}")
151 changes: 151 additions & 0 deletions mirix/orm/raw_memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
"""
ORM model for raw (unprocessed) task memories.

Raw memories store task context without LLM extraction, intended for
task sharing use cases with a 14-day TTL.
"""
import datetime as dt
from datetime import datetime
from typing import TYPE_CHECKING, Optional

from sqlalchemy import JSON, DateTime, Index, String, Text, text
from sqlalchemy.orm import Mapped, declared_attr, mapped_column, relationship

from mirix.orm.mixins import OrganizationMixin, UserMixin
from mirix.orm.sqlalchemy_base import SqlalchemyBase
from mirix.schemas.raw_memory import RawMemoryItem as PydanticRawMemoryItem
from mirix.settings import settings

if TYPE_CHECKING:
from mirix.orm.organization import Organization
from mirix.orm.user import User


class RawMemory(SqlalchemyBase, OrganizationMixin, UserMixin):
"""
ORM model for raw (unprocessed) task memories.

Raw memories store task context without LLM extraction, intended for
task sharing use cases with a 14-day TTL.
"""

__tablename__ = "raw_memory"
__pydantic_model__ = PydanticRawMemoryItem

# Primary key
id: Mapped[str] = mapped_column(
String,
primary_key=True,
doc="Unique ID for this raw memory entry",
)

# Note: user_id is provided by UserMixin with ForeignKey to users table
# Note: organization_id is provided by OrganizationMixin with ForeignKey to organizations table

# Content field
context: Mapped[str] = mapped_column(
Text,
nullable=False,
doc="Raw task context string (unprocessed)",
)

# filter_tags stores scope and other metadata (matching episodic_memory pattern)
filter_tags: Mapped[Optional[dict]] = mapped_column(
JSON,
nullable=True,
default=None,
doc="Custom filter tags including scope for access control",
)

# Last modification tracking (standard MIRIX pattern)
last_modify: Mapped[dict] = mapped_column(
JSON,
nullable=False,
default=lambda: {
"timestamp": datetime.now(dt.timezone.utc).isoformat(),
"operation": "created",
},
doc="Last modification info including timestamp and operation type",
)

# Timestamps
occurred_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
doc="When the event occurred or was recorded",
)
created_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
doc="When record was created",
)
updated_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
doc="When record was last updated",
)

# Audit fields (track which client created/updated the record)
_created_by_id: Mapped[Optional[str]] = mapped_column(
String,
nullable=True,
doc="Client ID that created this memory",
)
_last_update_by_id: Mapped[Optional[str]] = mapped_column(
String,
nullable=True,
doc="Client ID that last updated this memory",
)

# Indexes following standard MIRIX memory table pattern
__table_args__ = tuple(
filter(
None,
[
# PostgreSQL indexes
Index("ix_raw_memory_organization_id", "organization_id")
if settings.mirix_pg_uri_no_default
else None,
Index(
"ix_raw_memory_org_updated_at",
"organization_id",
"updated_at",
postgresql_using="btree",
)
if settings.mirix_pg_uri_no_default
else None,
Index(
"ix_raw_memory_filter_tags_gin",
text("(filter_tags::jsonb)"),
postgresql_using="gin",
)
if settings.mirix_pg_uri_no_default
else None,
Index(
"ix_raw_memory_org_filter_scope",
"organization_id",
text("((filter_tags->>'scope')::text)"),
postgresql_using="btree",
)
if settings.mirix_pg_uri_no_default
else None,
# SQLite fallback indexes
Index(
"ix_raw_memory_organization_id_sqlite",
"organization_id",
)
if not settings.mirix_pg_uri_no_default
else None,
],
)
)

@declared_attr
def organization(cls) -> Mapped["Organization"]:
"""Relationship to the Organization."""
return relationship("Organization", lazy="selectin")

@declared_attr
def user(cls) -> Mapped["User"]:
"""Relationship to the User."""
return relationship("User", lazy="selectin")
1 change: 1 addition & 0 deletions mirix/orm/sqlalchemy_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1007,6 +1007,7 @@ def _update_redis_cache(
"procedural_memory": redis_client.PROCEDURAL_PREFIX,
"resource_memory": redis_client.RESOURCE_PREFIX,
"knowledge_vault": redis_client.KNOWLEDGE_PREFIX,
"raw_memory": redis_client.RAW_MEMORY_PREFIX,
}

if table_name in memory_tables:
Expand Down
Loading
Loading