From 8320e936028166f8f4c3033564b77f98d8c06668 Mon Sep 17 00:00:00 2001 From: Ruge Li Date: Thu, 19 Feb 2026 14:14:17 -0800 Subject: [PATCH] handle both recipe_path and json body requests --- docker/server.py | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/docker/server.py b/docker/server.py index 74bb20f3..7b0a209d 100644 --- a/docker/server.py +++ b/docker/server.py @@ -1,4 +1,5 @@ import asyncio +import uuid from aiohttp import web from cellpack.autopack.DBRecipeHandler import DataDoc, DBUploader from cellpack.autopack.interface_objects.database_ids import DATABASE_IDS @@ -25,19 +26,19 @@ def job_exists(self, dedup_hash): job_status, _ = db.get_doc_by_id("job_status", dedup_hash) return job_status is not None - async def run_packing(self, dedup_hash, recipe=None, config=None, body=None): - self.update_job_status(dedup_hash, "RUNNING") + async def run_packing(self, job_id, recipe=None, config=None, body=None): + self.update_job_status(job_id, "RUNNING") try: # Pack JSON recipe in body if provided, otherwise use recipe path - pack(recipe=(body if body else recipe), config_path=config, docker=True, hash=dedup_hash) + pack(recipe=(body if body else recipe), config_path=config, docker=True, hash=job_id) except Exception as e: - self.update_job_status(dedup_hash, "FAILED", error_message=str(e)) + self.update_job_status(job_id, "FAILED", error_message=str(e)) - def update_job_status(self, dedup_hash, status, result_path=None, error_message=None): + def update_job_status(self, job_id, status, result_path=None, error_message=None): db = self._get_firebase_handler() if db: db_uploader = DBUploader(db) - db_uploader.upload_job_status(dedup_hash, status, result_path, error_message) + db_uploader.upload_job_status(job_id, status, result_path, error_message) async def hello_world(self, request: web.Request) -> web.Response: return web.Response(text="Hello from the cellPACK server") @@ -58,13 +59,18 @@ async def pack_handler(self, request: web.Request) -> web.Response: ) config = request.rel_url.query.get("config") - dedup_hash = DataDoc.generate_hash(body) - - if self.job_exists(dedup_hash): - return web.json_response({"jobId": dedup_hash}) + if body: + dedup_hash = DataDoc.generate_hash(body) + if self.job_exists(dedup_hash): + return web.json_response({"jobId": dedup_hash}) + job_id = dedup_hash + else: + job_id = str(uuid.uuid4()) # Initiate packing task to run in background - packing_task = asyncio.create_task(self.run_packing(dedup_hash, recipe, config, body)) + packing_task = asyncio.create_task( + self.run_packing(job_id, recipe, config, body) + ) # Keep track of task references to prevent them from being garbage # collected, then discard after task completion @@ -73,7 +79,7 @@ async def pack_handler(self, request: web.Request) -> web.Response: # return job id immediately, rather than wait for task to complete, # to avoid timeout issues with API gateway - return web.json_response({"jobId": dedup_hash}) + return web.json_response({"jobId": job_id}) async def init_app() -> web.Application: