use background task to avoid timeout

This commit is contained in:
Laura Abro
2025-04-29 11:09:29 -03:00
parent 6cf7e0db0e
commit f4b0fc4e2e
9 changed files with 198 additions and 107 deletions

View File

@ -20,7 +20,7 @@ RUN git config --global --add safe.directory /app
# Copy the rest of your application code into the container
COPY . .
ENV MIDDLE_SERVER_URL=https://builder247.api.koii.network
ENV MIDDLE_SERVER_URL=https://ik8kcow8ksw8gwgoo0ggosko.dev.koii.network
# Configure logging and output
ENV PYTHONUNBUFFERED=1
@ -40,9 +40,9 @@ CMD ["gunicorn", \
"--capture-output", \
"--enable-stdio-inheritance", \
"--logger-class=gunicorn.glogging.Logger", \
"--timeout", "600", \
"--graceful-timeout", "600", \
"--keep-alive", "5", \
"--timeout", "1800", \
"--graceful-timeout", "1800", \
"--keep-alive", "15", \
"-w", "1", \
"-b", "0.0.0.0:8080", \
"main:app"]

View File

@ -15,6 +15,5 @@ base58>=2.1.0
tenacity>=9.0.0
sqlmodel>=0.0.22
openai>=0.28.0
colorama>=0.4.6
prometheus-swarm>=0.1.5
colorama>=0.4.67prometheus-swarm>=0.1.7
prometheus-test>=0.1.2

View File

@ -1,8 +0,0 @@
from setuptools import setup, find_packages
setup(
name="task-flow",
version="0.1",
packages=find_packages(include=["src", "src.*"]),
python_requires=">=3.6",
)

View File

@ -1,7 +1,41 @@
import os
import requests
from flask import Blueprint, jsonify, request
from src.server.services import repo_summary_service
from concurrent.futures import ThreadPoolExecutor
from prometheus_swarm.database import get_db
from src.server.services.repo_summary_service import logger
bp = Blueprint("task", __name__)
executor = ThreadPoolExecutor(max_workers=2)
def post_pr_url(agent_result, task_id, signature, round_number):
try:
result = agent_result.result() # Get the result from the future
logger.info(f"Result: {result}")
result_data = result.get("result", {})
logger.info(f"Result data: {result_data}")
# Make a POST request with the result
response = requests.post(
f"http://host.docker.internal:30017/task/{task_id}/add-todo-pr",
json={
"prUrl": result_data.get("data", {}).get("pr_url"),
"signature": signature,
"roundNumber": round_number,
"success": result.get("success", False),
"message": result_data.get("error", ""),
},
)
response.raise_for_status() # Raise an error for bad responses
except Exception as e:
# Handle exceptions (e.g., log the error)
logger.error(f"Failed to send result: {e}")
logger.error(f"Exception type: {type(e)}")
if hasattr(e, "__traceback__"):
import traceback
logger.error(f"Traceback: {''.join(traceback.format_tb(e.__traceback__))}")
@bp.post("/worker-task/<round_number>")
@ -10,18 +44,37 @@ def start_task(round_number):
logger.info(f"Task started for round: {round_number}")
data = request.get_json()
task_id = data["task_id"]
podcall_signature = data["podcall_signature"]
repo_url = data["repo_url"]
logger.info(f"Task data: {data}")
required_fields = ["taskId", "round_number", "repo_url"]
required_fields = ["task_id", "round_number", "repo_url", "podcall_signature"]
if any(data.get(field) is None for field in required_fields):
return jsonify({"error": "Missing data"}), 401
result = repo_summary_service.handle_task_creation(
task_id=data["taskId"],
round_number=int(round_number),
repo_url=data["repo_url"],
)
# Get db instance in the main thread where we have app context
db = get_db()
return result
if os.getenv("TEST_MODE") == "true":
result = repo_summary_service.handle_task_creation(
task_id=task_id,
round_number=int(round_number),
repo_url=repo_url,
db=db, # Pass db instance
)
return jsonify(result)
else:
agent_result = executor.submit(
repo_summary_service.handle_task_creation,
task_id=task_id,
round_number=round_number,
repo_url=repo_url,
db=db, # Pass db instance
)
agent_result.add_done_callback(
lambda future: post_pr_url(future, task_id, podcall_signature, round_number)
)
return jsonify({"status": "Task is being processed"}), 200
if __name__ == "__main__":

View File

@ -1,6 +1,5 @@
"""Task service module."""
from flask import jsonify
from prometheus_swarm.database import get_db
from prometheus_swarm.clients import setup_client
from src.workflows.repoSummarizer.workflow import RepoSummarizerWorkflow
@ -12,10 +11,11 @@ from src.database.models import Submission
load_dotenv()
def handle_task_creation(task_id, round_number, repo_url):
def handle_task_creation(task_id, round_number, repo_url, db=None):
"""Handle task creation request."""
try:
db = get_db()
if db is None:
db = get_db() # Fallback for direct calls
client = setup_client("anthropic")
workflow = RepoSummarizerWorkflow(
@ -35,11 +35,9 @@ def handle_task_creation(task_id, round_number, repo_url):
)
db.add(submission)
db.commit()
return jsonify({"success": True, "result": result})
return {"success": True, "result": result}
else:
return jsonify(
{"success": False, "result": result.get("error", "No result")}
)
return {"success": False, "result": result.get("error", "No result")}
except Exception as e:
logger.error(f"Repo summarizer failed: {str(e)}")
raise