Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions folding/registries/miner_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class MinerData(BaseModel):

overall_credibility: float = c.STARTING_CREDIBILITY
tasks: Dict[str, TaskMetrics] = field(default_factory=dict)
logs: Dict[str, Any] = {}
logs: Dict[str, Any] = field(default_factory=dict)


class MinerRegistry:
Expand Down Expand Up @@ -145,7 +145,7 @@ def get_all_miner_logs(self) -> Dict[int, Dict[str, Any]]:
all_miner_logs = {}

for miner_uid, miner_data in self.registry.items():
if not miner_data.logs:
if not hasattr(miner_data, "logs") or miner_data.logs is None:
self._initialize_miner_logs(miner_uid)
all_miner_logs[miner_uid] = miner_data.logs

Expand Down
2 changes: 1 addition & 1 deletion folding/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,7 @@ def add_validator_args(cls, parser):
nargs="+", # Accepts one or more values as a list
help="The validator will only accept organic queries from a list of whitelisted hotkeys.",
default=[
"5Cg5QgjMfRqBC6bh8X4PDbQi7UzVRn9eyWXsB8gkyfppFPPy",
"5FYwKoHYfiuy9m2yM4wbwM5pQyaEP38MBTTZS8RwNKRuQ8mi",
],
)
parser.add_argument(
Expand Down
109 changes: 109 additions & 0 deletions folding_api/database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import sqlite3
import uuid
from datetime import datetime
from typing import Optional, List, Dict, Any
from contextlib import asynccontextmanager
import aiosqlite
from loguru import logger


class DatabaseManager:
def __init__(self, db_path: str = "protein_jobs.db"):
self.db_path = db_path

async def init_database(self):
"""Initialize the database and create tables if they don't exist"""
async with aiosqlite.connect(self.db_path) as db:
await db.execute("""
CREATE TABLE IF NOT EXISTS protein_jobs (
id TEXT NOT NULL PRIMARY KEY,
job_id TEXT NOT NULL,
pdb_id TEXT NOT NULL,
user_id TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
)
""")
await db.commit()
logger.info("Database initialized successfully")

async def insert_protein_job(
self,
job_id: str,
pdb_id: str,
user_id: str
) -> str:
"""Insert a new protein job record and return the generated ID"""
record_id = str(uuid.uuid4())

async with aiosqlite.connect(self.db_path) as db:
await db.execute("""
INSERT INTO protein_jobs (id, job_id, pdb_id, user_id, created_at)
VALUES (?, ?, ?, ?, ?)
""", (record_id, job_id, pdb_id, user_id, datetime.utcnow().isoformat()))
await db.commit()

logger.info(f"Inserted protein job: id={record_id}, job_id={job_id}, pdb_id={pdb_id}, user_id={user_id}")
return record_id

async def get_protein_jobs(
self,
user_id: Optional[str] = None,
limit: int = 100
) -> List[Dict[str, Any]]:
"""Get protein jobs, optionally filtered by user_id"""
async with aiosqlite.connect(self.db_path) as db:
if user_id:
cursor = await db.execute("""
SELECT id, job_id, pdb_id, user_id, created_at
FROM protein_jobs
WHERE user_id = ?
ORDER BY created_at DESC
LIMIT ?
""", (user_id, limit))
else:
cursor = await db.execute("""
SELECT id, job_id, pdb_id, user_id, created_at
FROM protein_jobs
ORDER BY created_at DESC
LIMIT ?
""", (limit,))

rows = await cursor.fetchall()
await cursor.close()

return [
{
"id": row[0],
"job_id": row[1],
"pdb_id": row[2],
"user_id": row[3],
"created_at": row[4]
}
for row in rows
]

async def get_protein_job_by_id(self, record_id: str) -> Optional[Dict[str, Any]]:
"""Get a specific protein job by its record ID"""
async with aiosqlite.connect(self.db_path) as db:
cursor = await db.execute("""
SELECT id, job_id, pdb_id, user_id, created_at
FROM protein_jobs
WHERE id = ?
""", (record_id,))

row = await cursor.fetchone()
await cursor.close()

if row:
return {
"id": row[0],
"job_id": row[1],
"pdb_id": row[2],
"user_id": row[3],
"created_at": row[4]
}
return None


# Global database manager instance
db_manager = DatabaseManager()
5 changes: 5 additions & 0 deletions folding_api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from folding_api.utility_endpoints import router as utility_router
from folding_api.validator_registry import ValidatorRegistry
from folding_api.auth import APIKeyManager, get_api_key, api_key_router
from folding_api.database import db_manager
from folding_api.vars import (
bt_config,
limiter,
Expand Down Expand Up @@ -50,6 +51,10 @@ async def lifespan(app: FastAPI):
api_key_manager = APIKeyManager()
app.state.api_key_manager = api_key_manager

# Initialize database
await db_manager.init_database()
app.state.db_manager = db_manager

# Start background sync task
app.state.sync_task = asyncio.create_task(
sync_metagraph_periodic(subtensor_service, validator_registry)
Expand Down
16 changes: 15 additions & 1 deletion folding_api/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from folding_api.schemas import FoldingSchema, FoldingReturn
from folding_api.utils import make_request
from folding_api.validator_registry import ValidatorRegistry
from folding_api.database import db_manager


async def query_validators(
Expand Down Expand Up @@ -48,7 +49,20 @@ async def query_validators(
if resp is not None:
response_information["status_codes"].append(resp.status_code)
if resp.status_code == 200:
response_information["job_id"].append(resp.json()["job_id"])
job_id = resp.json()["job_id"]
response_information["job_id"].append(job_id)

# Insert successful job into database
try:
await db_manager.insert_protein_job(
job_id=job_id,
pdb_id=schema.pdb_id,
user_id=schema.user_id
)
except Exception as e:
# Log the error but don't fail the request
from loguru import logger
logger.error(f"Failed to insert protein job to database: {e}")
else:
response_information["job_id"].append(None)
else:
Expand Down
23 changes: 23 additions & 0 deletions folding_api/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ class FoldingSchema(BaseModel):
...,
description="The base epsilon that should be used for the challenge. Represented in %/100",
)
user_id: str = Field(
..., description="The user identifier for tracking job ownership."
)

validator_uids: list[int] = Field(
..., description="The validator identifier for the selected response source."
Expand Down Expand Up @@ -192,6 +195,26 @@ class PDBSearchResponse(BaseModel):
total: int = Field(..., description="Total number of matches found")


class UserPDBEntry(BaseModel):
"""
Represents a PDB job entry with creation timestamp for a user.
"""

job_id: str = Field(..., description="The job identifier")
pdb_id: str = Field(..., description="The PDB identifier")
created_at: str = Field(..., description="When this job was created")


class UserPDBResponse(BaseModel):
"""
Represents a response containing protein folding jobs for a specific user.
"""

user_id: str = Field(..., description="The user identifier")
pdb_entries: List[UserPDBEntry] = Field(..., description="List of protein folding jobs with job ID, PDB ID and creation timestamps")
total: int = Field(..., description="Total number of jobs found")


class PDBInfoResponse(BaseModel):
"""
Represents detailed information about a PDB structure from RCSB.
Expand Down
53 changes: 52 additions & 1 deletion folding_api/utility_endpoints.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import subprocess
from typing import Optional, Literal
from fastapi import APIRouter, HTTPException, Query, Depends, Request
from fastapi import APIRouter, HTTPException, Query, Depends, Request, Path
from http import HTTPStatus
import pickle
import os
Expand All @@ -16,6 +16,8 @@
Job,
JobResponse,
Miner,
UserPDBResponse,
UserPDBEntry,
)
from folding_api.auth import APIKey, get_api_key
from folding_api.utils import query_gjp
Expand Down Expand Up @@ -622,3 +624,52 @@ async def get_job(
created_at=job.get("created_at", ""),
updated_at=job.get("updated_at", ""),
)


@router.get("/user/{user_id}/pdb-ids", response_model=UserPDBResponse)
async def get_user_pdb_ids(
request: Request,
user_id: str = Path(..., description="The user identifier"),
api_key: APIKey = Depends(get_api_key),
) -> UserPDBResponse:
"""
Get all protein folding jobs for a specific user.

This endpoint returns a list of all protein folding jobs submitted by the specified user,
including job ID, PDB ID, and creation timestamp for each job.
"""
try:
# Get the database manager from app state
db_manager = request.app.state.db_manager

# Query jobs for the specific user
jobs = await db_manager.get_protein_jobs(user_id=user_id)

# Create PDB entries with creation timestamps (one per job)
pdb_entries = [
UserPDBEntry(
job_id=job["job_id"],
pdb_id=job["pdb_id"],
created_at=job["created_at"]
)
for job in jobs
]

# Sort by creation date, youngest (most recent) first
pdb_entries.sort(key=lambda x: x.created_at, reverse=True)

return UserPDBResponse(
user_id=user_id,
pdb_entries=pdb_entries,
total=len(pdb_entries)
)

except HTTPException:
raise
except Exception as e:
logger.exception(f"Unexpected error getting PDB IDs for user {user_id}: {e}")

raise HTTPException(
status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
detail="An unexpected error occurred. Please have an admin check the logs and try again later.",
)
2 changes: 1 addition & 1 deletion folding_api/vars.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"--api-key-file", type=str, help="API key file", default="api_keys.json"
)
parser.add_argument(
"--gjp-address", type=str, help="GJP API address", default="174.138.3.61:4001"
"--gjp-address", type=str, help="GJP API address", default="167.99.209.27:4001"
)

bt_config = bt.config(parser)
Expand Down
8 changes: 8 additions & 0 deletions pm2_configs/folding_api.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
module.exports = {
apps: [{
name: "folding-api",
script: "scripts/run_folding_api.sh", // Use the wrapper script
autorestart: true,
watch: false
}]
};
Loading