transfer from monorepo

This commit is contained in:
Laura Abro
2025-04-24 10:24:42 -03:00
parent a2175785d5
commit 1c6fc5540b
95 changed files with 7110 additions and 0 deletions

View File

@ -0,0 +1,70 @@
"""Flask application initialization."""
from flask import Flask, request
from .routes import repo_summary, star, audit, healthz, submission
from prometheus_swarm.utils.logging import configure_logging, log_section, log_key_value, log_value
from prometheus_swarm.database import initialize_database
from colorama import Fore, Style
import uuid
import os
def create_app():
"""Create and configure the Flask application."""
app = Flask(__name__)
# Add request ID middleware
@app.before_request
def before_request():
request.id = str(uuid.uuid4())
# Store request start time for duration calculation
request.start_time = request.environ.get("REQUEST_TIME", 0)
@app.after_request
def after_request(response):
# Calculate request duration
duration = (request.environ.get("REQUEST_TIME", 0) - request.start_time) * 1000
# Get error message if this is an error response
error_msg = ""
if response.status_code >= 400:
try:
json_data = response.get_json()
if isinstance(json_data, dict):
error_msg = json_data.get("error") or json_data.get("message", "")
except Exception:
# If we can't get JSON data, try to get the message from the response
error_msg = getattr(response, "description", "")
# Log the request with appropriate color
color = Fore.GREEN if response.status_code < 400 else Fore.RED
log_value(
f"[{color}REQ{Style.RESET_ALL}] {request.method} {request.path} "
f"{color}{response.status_code}{Style.RESET_ALL} {error_msg} {duration}ms"
)
return response
# Register blueprints
app.register_blueprint(healthz.bp)
app.register_blueprint(repo_summary.bp)
app.register_blueprint(star.bp)
app.register_blueprint(audit.bp)
app.register_blueprint(submission.bp)
# Configure logging within app context
with app.app_context():
# Set up logging (includes both console and database logging)
configure_logging()
# Initialize database
initialize_database()
# Disable Flask's default logging
app.logger.disabled = True
# Log startup information
log_section("SERVER STARTUP")
log_key_value("Workers", 1)
log_key_value("Host", "0.0.0.0:8080")
log_key_value("Database", os.getenv("DATABASE_PATH", "Not configured"))
return app

View File

@ -0,0 +1,65 @@
"""Database model for logging."""
from datetime import datetime
from prometheus_swarm.database import get_db
def init_logs_table():
"""Initialize the logs table if it doesn't exist."""
# Not needed - handled by SQLModel
pass
def save_log(
level: str,
message: str,
module: str = None,
function: str = None,
path: str = None,
line_no: int = None,
exception: str = None,
stack_trace: str = None,
request_id: str = None,
additional_data: str = None,
) -> bool:
"""
Save a log entry to the database.
Args:
level: Log level (ERROR, WARNING, INFO, etc)
message: Log message
module: Module name where log was generated
function: Function name where log was generated
path: File path where log was generated
line_no: Line number where log was generated
exception: Exception type if any
stack_trace: Stack trace if any
request_id: Request ID if available
additional_data: Any additional JSON-serializable data
Returns:
bool: True if log was saved successfully
"""
try:
db = get_db()
from prometheus_swarm.database import Log
log = Log(
timestamp=datetime.utcnow(),
level=level,
message=message,
module=module,
function=function,
path=path,
line_no=line_no,
exception=exception,
stack_trace=stack_trace,
request_id=request_id,
additional_data=additional_data,
)
db.add(log)
db.commit()
return True
except Exception as e:
print(f"Failed to save log to database: {e}") # Fallback logging
return False

View File

@ -0,0 +1,62 @@
from flask import Blueprint, jsonify, request
from src.server.services.github_service import verify_pr_ownership
from src.server.services.audit_service import audit_repo
import logging
logger = logging.getLogger(__name__)
bp = Blueprint("audit", __name__)
@bp.post("/audit/<round_number>")
def audit_submission(round_number: int):
logger.info("Auditing submission")
data = request.get_json()
submission = data.get("submission")
if not submission:
return jsonify({"error": "Missing submission"}), 400
# submission_round_number = submission.get("roundNumber")
task_id = submission.get("taskId")
pr_url = submission.get("prUrl")
github_username = submission.get("githubUsername")
# Extract repo owner and name from PR URL
try:
pr_url_parts = pr_url.split('github.com/')[1].split('/')
repo_owner = pr_url_parts[0]
repo_name = pr_url_parts[1]
except (IndexError, AttributeError):
return jsonify({"error": "Invalid PR URL format"}), 400
print(f"Repo owner: {repo_owner}, Repo name: {repo_name}")
# This is commented out because the round number might be different due to we put the audit logic in the distribution part
# if int(round_number) != submission_round_number:
# return jsonify({"error": "Round number mismatch"}), 400
if (
not task_id
or not pr_url
or not github_username
or not repo_owner
or not repo_name
):
return jsonify({"error": "Missing submission data"}), 400
is_valid = verify_pr_ownership(
pr_url=pr_url,
expected_username=github_username,
expected_owner=repo_owner,
expected_repo=repo_name,
)
if not is_valid:
return jsonify(False)
try:
is_approved = audit_repo(pr_url)
return jsonify(is_approved), 200
except Exception as e:
logger.error(f"Error auditing PR: {str(e)}")
return jsonify(True), 200

View File

@ -0,0 +1,14 @@
from flask import Blueprint, jsonify
from prometheus_swarm.database import get_db
import logging
logger = logging.getLogger(__name__)
bp = Blueprint("healthz", __name__)
@bp.post("/healthz")
def healthz():
# Test database connection
_ = get_db()
return jsonify({"status": "ok"})

View File

@ -0,0 +1,54 @@
from flask import Blueprint, jsonify, request
from src.server.services import repo_summary_service
bp = Blueprint("repo_summary", __name__)
@bp.post("/repo_summary/<round_number>")
def start_task(round_number):
logger = repo_summary_service.logger
logger.info(f"Task started for round: {round_number}")
data = request.get_json()
logger.info(f"Task data: {data}")
required_fields = [
"taskId",
"round_number",
"repo_url"
]
if any(data.get(field) is None for field in required_fields):
return jsonify({"error": "Missing data"}), 401
result = repo_summary_service.handle_task_creation(
task_id=data["taskId"],
round_number=int(round_number),
repo_url=data["repo_url"],
)
return result
if __name__ == "__main__":
from flask import Flask
# Create a Flask app instance
app = Flask(__name__)
app.register_blueprint(bp)
# Test data
test_data = {
"taskId": "fake",
"round_number": "1",
"repo_url": "https://github.com/koii-network/docs"
}
# Set up test context
with app.test_client() as client:
# Make a POST request to the endpoint
response = client.post(
"/repo_summary/1",
json=test_data
)
# Print the response
print(f"Status Code: {response.status_code}")
print(f"Response: {response.get_json()}")

View File

@ -0,0 +1,39 @@
from prometheus_swarm.utils.logging import log_key_value
from flask import Blueprint, jsonify, request
from src.server.services import star_service
bp = Blueprint("star", __name__)
@bp.post("/star/<round_number>")
def start_task(round_number):
logger = star_service.logger
logger.info(f"Task started for round: {round_number}")
data = request.get_json()
logger.info(f"Task data: {data}")
required_fields = [
"taskId",
"round_number",
"github_urls",
]
if any(data.get(field) is None for field in required_fields):
return jsonify({"error": "Missing data"}), 401
try:
# Log incoming data
print("Received data:", data)
print("Round number:", round_number)
result = star_service.handle_star_task(
task_id=data["taskId"],
round_number=int(round_number),
github_urls=data["github_urls"],
)
return result
except Exception as e:
print(f"Error in star endpoint: {str(e)}")
print(f"Error type: {type(e)}")
import traceback
print(f"Traceback: {traceback.format_exc()}")
return jsonify({'error': str(e)}), 500

View File

@ -0,0 +1,38 @@
from flask import Blueprint, jsonify
from prometheus_swarm.database import get_db
from src.dababase.models import Submission
import logging
import os
logger = logging.getLogger(__name__)
bp = Blueprint("submission", __name__)
@bp.get("/submission/<roundNumber>")
def fetch_submission(roundNumber):
logger.info(f"Fetching submission for round: {roundNumber}")
db = get_db()
submission = (
db.query(Submission)
.filter(
Submission.round_number == int(roundNumber),
)
.first()
)
logger.info(f"Submission: {submission}")
logger.info(f"Submission: {submission}")
if submission:
github_username = os.getenv("GITHUB_USERNAME")
return jsonify(
{
"taskId": submission.task_id,
"roundNumber": submission.round_number,
"status": submission.status,
"prUrl": submission.pr_url,
"githubUsername": github_username,
}
)
else:
return jsonify({"error": "Submission not found"}), 409

View File

@ -0,0 +1,47 @@
"""Audit service module."""
import logging
from prometheus_swarm.clients import setup_client
from src.workflows.repoSummarizerAudit.workflow import repoSummarizerAuditWorkflow
from src.workflows.repoSummarizerAudit.prompts import (
PROMPTS as REPO_SUMMARIZER_AUDIT_PROMPTS,
)
logger = logging.getLogger(__name__)
def audit_repo(pr_url):
# def review_pr(repo_urls, pr_url, github_username, star_only=True):
"""Review PR and decide if it should be accepted, revised, or rejected."""
try:
# Set up client and workflow
client = setup_client("anthropic")
# Below commented out because we won't need to distribute starring repo nodes
# star_repo_workflow = StarRepoAuditWorkflow(
# client=client,
# prompts=STAR_REPO_AUDIT_PROMPTS,
# repo_url=repo_urls[0],
# github_username=github_username,
# )
# star_repo_workflow.run()
repo_summerizer_audit_workflow = repoSummarizerAuditWorkflow(
client=client,
prompts=REPO_SUMMARIZER_AUDIT_PROMPTS,
pr_url=pr_url,
)
# Run workflow and get result
result = repo_summerizer_audit_workflow.run()
recommendation = result["data"]["recommendation"]
return recommendation
except Exception as e:
logger.error(f"PR review failed: {str(e)}")
raise Exception("PR review failed")
if __name__ == "__main__":
# review_pr(["https://github.com/alexander-morris/koii-dumper-reveal"], "https://github.com/koii-network/namespace-wrapper/pull/1", "HermanL02")
audit_repo("https://github.com/koii-network/namespace-wrapper/pull/1")

View File

@ -0,0 +1,44 @@
import re
import requests
from github import Github
import os
import logging
logger = logging.getLogger(__name__)
def verify_pr_ownership(
pr_url,
expected_username,
expected_owner,
expected_repo,
):
try:
gh = Github(os.environ.get("GITHUB_TOKEN"))
match = re.match(r"https://github.com/([^/]+)/([^/]+)/pull/(\d+)", pr_url)
if not match:
logger.error(f"Invalid PR URL: {pr_url}")
return False
owner, repo_name, pr_number = match.groups()
if owner != expected_owner or repo_name != expected_repo:
logger.error(
f"PR URL mismatch: {pr_url} != {expected_owner}/{expected_repo}"
)
return False
repo = gh.get_repo(f"{owner}/{repo_name}")
pr = repo.get_pull(int(pr_number))
if pr.user.login != expected_username:
logger.error(
f"PR username mismatch: {pr.user.login} != {expected_username}"
)
return False
return True
except Exception as e:
logger.error(f"Error verifying PR ownership: {str(e)}")
return True

View File

@ -0,0 +1,60 @@
"""Task service module."""
import requests
import os
from flask import jsonify
from prometheus_swarm.database import get_db
from prometheus_swarm.clients import setup_client
from src.workflows.repoSummarizer.workflow import RepoSummarizerWorkflow
from prometheus_swarm.utils.logging import logger, log_error
from dotenv import load_dotenv
from src.workflows.repoSummarizer.prompts import PROMPTS
from src.dababase.models import Submission
load_dotenv()
def handle_task_creation(task_id, round_number, repo_url):
"""Handle task creation request."""
try:
db = get_db()
client = setup_client("anthropic")
workflow = RepoSummarizerWorkflow(
client=client,
prompts=PROMPTS,
repo_url=repo_url,
)
result = workflow.run()
if result.get("success"):
submission = Submission(
task_id=task_id,
round_number=round_number,
status="summarized",
repo_url=repo_url,
pr_url=result["data"]["pr_url"],
)
db.add(submission)
db.commit()
return jsonify({"success": True, "result": result})
else:
return jsonify(
{"success": False, "result": result.get("error", "No result")}
)
except Exception as e:
logger.error(f"Repo summarizer failed: {str(e)}")
raise
if __name__ == "__main__":
from flask import Flask
app = Flask(__name__)
with app.app_context():
result = handle_task_creation(
task_id="1",
round_number=6,
repo_url="https://github.com/koii-network/builder-test",
)
print(result)

View File

@ -0,0 +1,50 @@
"""Task service module."""
import requests
import os
from flask import jsonify
from prometheus_swarm.database import get_db
from prometheus_swarm.clients import setup_client
from src.workflows.repoSummarizer.workflow import RepoSummarizerWorkflow
from prometheus_swarm.utils.logging import logger, log_error
from src.workflows.starRepo.workflow import StarRepoWorkflow
from dotenv import load_dotenv
from src.workflows.repoSummarizer.prompts import PROMPTS
load_dotenv()
def handle_star_task(task_id, round_number, github_urls):
"""Handle task creation request."""
try:
db = get_db()
client = setup_client("anthropic")
for url in github_urls:
star_workflow = StarRepoWorkflow(
client=client,
prompts=PROMPTS,
repo_url=url,
)
star_result = star_workflow.run()
if not star_result or not star_result.get("success"):
log_error(
Exception(star_result.get("error", "No result")),
"Repository star failed",
)
return jsonify({"success": True, "result": "Repository starred"})
except Exception as e:
logger.error(f"Repo summarizer failed: {str(e)}")
raise
if __name__ == "__main__":
from flask import Flask
app = Flask(__name__)
with app.app_context():
result = handle_star_task(
task_id="1",
round_number=6,
github_urls=["https://github.com/koii-network/builder-test"],
)
print(result)