Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from fastapi_users import FastAPIUsers
from beanie import PydanticObjectId
from pydantic import BaseModel
from kernelci.api.models import (

Check failure on line 44 in api/main.py

View workflow job for this annotation

GitHub Actions / Lint

Unable to import 'kernelci.api.models'
Node,
Hierarchy,
PublishEvent,
Expand Down Expand Up @@ -1040,14 +1040,18 @@


@app.get('/maintenance/purge-old-nodes')
async def purge_handler(current_user: User = Depends(get_current_superuser)):
async def purge_handler(current_user: User = Depends(get_current_superuser),
days: int = 180,
batch_size: int = 1000):
"""Purge old nodes from the database
This is a maintenance operation and should be performed
only by superusers.
Accepts GET parameters:
- days: Number of days to keep nodes, default is 180.
- batch_size: Number of nodes to delete in one batch, default is 1000.
"""
metrics.add('http_requests_total', 1)
await purge_old_nodes()
return "OK"
return await purge_old_nodes(age_days=days, batch_size=batch_size)


versioned_app = VersionedFastAPI(
Expand Down
15 changes: 12 additions & 3 deletions api/maintenance.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def connect_to_db():
return db


async def purge_old_nodes(age_days=180):
async def purge_old_nodes(age_days=180, batch_size=1000):
"""
Purge nodes from the 'nodes' collection that are older than the
specified number of days.
Expand All @@ -63,13 +63,22 @@ async def purge_old_nodes(age_days=180):
nodes = db["nodes"].find({
"created": {"$lt": date_end}
})
# We need to delete node in chunks of 1000,
# We need to delete node in chunks of {batch_size}
# to not block the main thread for too long
deleted = 0
del_batch = []
for node in nodes:
del_batch.append(node["_id"])
if len(del_batch) == 1000:
if len(del_batch) == batch_size:
deleted += len(del_batch)
purge_ids(db, "nodes", del_batch)
del_batch = []
if del_batch:
deleted += len(del_batch)
purge_ids(db, "nodes", del_batch)
db = {
'response': 'ok',
'deleted': deleted,
'age_days': age_days
}
return db