import from monorepo

This commit is contained in:
Laura Abro
2025-04-24 10:37:07 -03:00
parent 1c6fc5540b
commit 640528c746
45 changed files with 842 additions and 1613 deletions

View File

@ -1,8 +1,13 @@
"""Flask application initialization."""
from flask import Flask, request
from .routes import repo_summary, star, audit, healthz, submission
from prometheus_swarm.utils.logging import configure_logging, log_section, log_key_value, log_value
from .routes import task, audit, healthz, submission
from prometheus_swarm.utils.logging import (
configure_logging,
log_section,
log_key_value,
log_value,
)
from prometheus_swarm.database import initialize_database
from colorama import Fore, Style
import uuid
@ -47,11 +52,10 @@ def create_app():
# Register blueprints
app.register_blueprint(healthz.bp)
app.register_blueprint(repo_summary.bp)
app.register_blueprint(star.bp)
app.register_blueprint(task.bp)
app.register_blueprint(audit.bp)
app.register_blueprint(submission.bp)
# Configure logging within app context
with app.app_context():
# Set up logging (includes both console and database logging)

View File

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
bp = Blueprint("audit", __name__)
@bp.post("/audit/<round_number>")
@bp.post("/worker-audit/<round_number>")
def audit_submission(round_number: int):
logger.info("Auditing submission")
@ -18,22 +18,21 @@ def audit_submission(round_number: int):
if not submission:
return jsonify({"error": "Missing submission"}), 400
# submission_round_number = submission.get("roundNumber")
submission_round_number = submission.get("roundNumber")
task_id = submission.get("taskId")
pr_url = submission.get("prUrl")
github_username = submission.get("githubUsername")
# Extract repo owner and name from PR URL
try:
pr_url_parts = pr_url.split('github.com/')[1].split('/')
pr_url_parts = pr_url.split("github.com/")[1].split("/")
repo_owner = pr_url_parts[0]
repo_name = pr_url_parts[1]
except (IndexError, AttributeError):
return jsonify({"error": "Invalid PR URL format"}), 400
print(f"Repo owner: {repo_owner}, Repo name: {repo_name}")
# This is commented out because the round number might be different due to we put the audit logic in the distribution part
# if int(round_number) != submission_round_number:
# return jsonify({"error": "Round number mismatch"}), 400
if int(round_number) != submission_round_number:
return jsonify({"error": "Round number mismatch"}), 400
if (
not task_id

View File

@ -1,39 +0,0 @@
from prometheus_swarm.utils.logging import log_key_value
from flask import Blueprint, jsonify, request
from src.server.services import star_service
bp = Blueprint("star", __name__)
@bp.post("/star/<round_number>")
def start_task(round_number):
logger = star_service.logger
logger.info(f"Task started for round: {round_number}")
data = request.get_json()
logger.info(f"Task data: {data}")
required_fields = [
"taskId",
"round_number",
"github_urls",
]
if any(data.get(field) is None for field in required_fields):
return jsonify({"error": "Missing data"}), 401
try:
# Log incoming data
print("Received data:", data)
print("Round number:", round_number)
result = star_service.handle_star_task(
task_id=data["taskId"],
round_number=int(round_number),
github_urls=data["github_urls"],
)
return result
except Exception as e:
print(f"Error in star endpoint: {str(e)}")
print(f"Error type: {type(e)}")
import traceback
print(f"Traceback: {traceback.format_exc()}")
return jsonify({'error': str(e)}), 500

View File

@ -1,21 +1,17 @@
from flask import Blueprint, jsonify, request
from src.server.services import repo_summary_service
bp = Blueprint("repo_summary", __name__)
bp = Blueprint("task", __name__)
@bp.post("/repo_summary/<round_number>")
@bp.post("/worker-task/<round_number>")
def start_task(round_number):
logger = repo_summary_service.logger
logger.info(f"Task started for round: {round_number}")
data = request.get_json()
logger.info(f"Task data: {data}")
required_fields = [
"taskId",
"round_number",
"repo_url"
]
required_fields = ["taskId", "round_number", "repo_url"]
if any(data.get(field) is None for field in required_fields):
return jsonify({"error": "Missing data"}), 401
@ -27,28 +23,26 @@ def start_task(round_number):
return result
if __name__ == "__main__":
from flask import Flask
# Create a Flask app instance
app = Flask(__name__)
app.register_blueprint(bp)
# Test data
test_data = {
"taskId": "fake",
"round_number": "1",
"repo_url": "https://github.com/koii-network/docs"
"repo_url": "https://github.com/koii-network/docs",
}
# Set up test context
with app.test_client() as client:
# Make a POST request to the endpoint
response = client.post(
"/repo_summary/1",
json=test_data
)
response = client.post("/repo_summary/1", json=test_data)
# Print the response
print(f"Status Code: {response.status_code}")
print(f"Response: {response.get_json()}")

View File

@ -1,15 +1,13 @@
"""Task service module."""
import requests
import os
from flask import jsonify
from prometheus_swarm.database import get_db
from prometheus_swarm.clients import setup_client
from src.workflows.repoSummarizer.workflow import RepoSummarizerWorkflow
from prometheus_swarm.utils.logging import logger, log_error
from prometheus_swarm.utils.logging import logger
from dotenv import load_dotenv
from src.workflows.repoSummarizer.prompts import PROMPTS
from src.dababase.models import Submission
from src.database.models import Submission
load_dotenv()

View File

@ -0,0 +1,381 @@
DOCS_SECTIONS = {
"library": [
{
"name": "API Reference",
"description": "Generate a complete list of all publicly exported functions, classes, and constants "
"from the library\n"
"For each item, include:\n"
"- Its name\n"
"- Description of what it does\n"
"- Function signature with types and descriptions of parameters and return values\n"
"- Example usage\n"
"Do not omit any significant exports — include everything that would be relevant to a developer using "
"this library\n"
"Group related items (e.g., utility functions, configuration, components) under subsections if helpful\n",
},
],
"web_app": [
{
"name": "Deployment",
"description": "Describe how to build and deploy the application to production\n"
"Include relevant deployment commands and target platforms (e.g., Vercel, Netlify, Docker)\n\n",
},
{
"name": "Technologies Used",
"description": "List the main frameworks, libraries, and tools (e.g., React, Vue, Vite, Tailwind)\n\n",
},
{
"name": "Feature Highlights",
"description": "Describe core user-facing features or flows "
"(e.g., authentication, dashboards, routing)\n\n",
},
{
"name": "Configuration",
"description": "Mention any configurable options, build settings, or plugins used\n\n",
},
],
"api_service": [
{
"name": "API Documentation",
"description": "List the available endpoints or routes\n"
"For each endpoint, include:\n"
"Method (GET, POST, etc.)\n"
"Path and parameters\n"
"Example request and response\n"
"Authentication requirements (if any)\n"
"If an OpenAPI/Swagger spec or GraphQL schema exists, link to it\n\n",
},
{
"name": "Authentication",
"description": "Describe how authentication works (e.g., API keys, OAuth, JWT)\n"
"Include example headers or auth flow steps if needed\n\n",
},
{
"name": "Technologies Used",
"description": "List major frameworks, libraries, or tools (e.g., Express, FastAPI, Prisma)\n\n",
},
{
"name": "Deployment",
"description": "Describe how to deploy the service (e.g., Docker, CI/CD, cloud platforms)\n"
"Include environment config or scaling considerations if relevant\n\n",
},
],
"mobile_app": [
{
"name": "Supported Platforms",
"description": "Indicate whether the app runs on Android, iOS, or both\n"
"Mention any platform-specific dependencies or limitations\n\n",
},
{
"name": "Running the App",
"description": "Show commands to run the app on a simulator/emulator or real device\n"
"Include platform-specific commands if needed (e.g., `npx react-native run-ios`, `flutter run`)\n\n",
},
{
"name": "Technologies Used",
"description": "List the frameworks, SDKs, and libraries used (e.g., React Native, Flutter, Firebase)\n\n",
},
{
"name": "Key Screens and Features",
"description": "Highlight core screens or flows within the app (e.g., login, profile, dashboard)\n"
"Optionally include screenshots or descriptions of user interactions\n\n",
},
{
"name": "Build and Deployment",
"description": "Provide steps for creating production builds\n"
"Mention any tools or services used for distribution (e.g., TestFlight, Play Store, Expo)\n\n",
},
],
"tutorial": [
{
"name": "Tutorial Structure",
"description": "Break down the tutorial into sections, stages, or lessons\n"
"Briefly describe what each section teaches or builds\n"
"Link to key files or folders associated with each part\n\n",
},
{
"name": "Learning Outcomes",
"description": "Clearly list the skills or concepts users will have mastered by the end\n\n",
},
{
"name": "Code Examples and Exercises",
"description": "Mention inline code snippets, checkpoints, or interactive examples\n"
"If exercises are included, describe how users should complete or test them\n\n",
},
{
"name": "Next Steps / Further Reading",
"description": "Suggest where users can go after completing the tutorial\n"
"Include links to additional docs, libraries, or related tutorials\n\n",
},
],
"template": (
{
"name": "Customization Guide",
"description": "Explain which parts of the codebase are intended to be modified by users\n"
"Offer guidance on how to rename, rebrand, or restructure parts of the template\n\n",
},
{
"name": "Technologies Used",
"description": "List the frameworks, libraries, and tools integrated into the template "
"(e.g., ESLint, Prettier, Tailwind, Express)\n\n",
},
{
"name": "Use Cases",
"description": "Provide example scenarios where this template is useful "
"(e.g., 'Use this for building a REST API with authentication')\n"
"Link to live demos or projects built from this template if available\n\n",
},
),
"cli_tool": [
{
"name": "Usage",
"description": "Show how to use the tool from the command line\n"
"Include at least 23 example commands with explanations of the output\n"
"Demonstrate the most common and useful flags or options\n"
"If the tool supports subcommands, show examples of each\n\n",
},
{
"name": "Command Reference",
"description": "List all available commands, flags, and options in a table or list format\n"
"Explain each option clearly, including defaults and accepted values\n\n",
},
{
"name": "Configuration",
"description": "Describe any optional or required configuration files (e.g., `.clirc`, `config.json`)\n"
"Show example configurations and where to place them\n\n",
},
],
"framework": [
{
"name": "Core Concepts",
"description": "Explain the main components or building blocks "
"(e.g., modules, services, lifecycle, routing, etc.)\n"
"Include diagrams or conceptual overviews if helpful\n\n",
},
{
"name": "Extension Points",
"description": "Describe how developers can extend the framework "
"(e.g., plugins, middleware, custom components)\n"
"Include examples of common extension use cases\n\n",
},
{
"name": "Technologies Used",
"description": "List core dependencies, supported environments, or language-level features leveraged\n\n",
},
{
"name": "Best Practices",
"description": "Offer guidance for structuring large projects, writing maintainable code, or "
"following framework conventions\n\n",
},
],
"data_science": [
{
"name": "Dataset",
"description": "Describe the dataset used (source, size, structure)\n"
"Include schema information or link to external data sources\n\n",
},
{
"name": "Model Architecture and Training",
"description": "Briefly describe the model(s) used and why they were chosen\n"
"Include training scripts and command-line instructions\n\n",
},
{
"name": "Evaluation and Results",
"description": "Summarize how the model was evaluated and key performance metrics\n"
" - Include training scripts and command-line instructions\n"
" - Mention metrics used for evaluation\n\n",
},
{
"name": "Inference / How to Use the Model",
"description": "Explain how to run inference or apply the model to new data\n"
"Include input/output formats and example commands or code\n\n",
},
{
"name": "Technologies Used",
"description": "List key tools, libraries, and frameworks (e.g., scikit-learn, TensorFlow, pandas)\n\n",
},
],
"plugin": [
{
"name": "Usage",
"description": "Show how to enable and configure the plugin\n"
"Include code snippets or configuration steps\n\n",
},
{
"name": "Integration Points",
"description": "Describe hooks, lifecycle methods, or extension APIs the plugin interacts with\n\n",
},
{
"name": "Technologies Used",
"description": "List frameworks, languages, or tooling\n\n",
},
],
"chrome_extension": [
{
"name": "Usage",
"description": "Explain how users interact with the extension "
"(e.g., popup UI, context menu, background scripts)\n"
"Include example scenarios or screenshots if applicable\n\n",
},
{
"name": "Technologies Used",
"description": "List libraries or frameworks (e.g., vanilla JS, React, Tailwind)\n\n",
},
],
"jupyter_notebook": [
{
"name": "Notebook Summary",
"description": "List and briefly describe each notebook in the repo\n"
"Mention whether they build on each other or are standalone\n\n",
},
{
"name": "Dataset (if applicable)",
"description": "Describe any datasets used and where they come from\n\n",
},
{
"name": "Technologies Used",
"description": "List libraries (e.g., pandas, matplotlib, scikit-learn)\n\n",
},
],
"infrastructure": [
{
"name": "Configuration Files",
"description": "Explain the structure and purpose of major files (e.g., `main.tf`, `docker-compose.yml`, "
"`playbooks/`)\n\n",
},
{
"name": "Deployment Workflow",
"description": "Describe how deployments are triggered and verified\n"
"Mention any CI/CD pipelines, remote state management, or secrets handling\n\n",
},
{
"name": "Environments",
"description": "Clarify how to deploy to multiple environments (dev, staging, prod)\n\n",
},
],
"smart_contract": [
{
"name": "Contracts",
"description": "Describe the main contract(s) and what each one does\n"
"Include deployment steps and how to interact with them\n\n",
},
{
"name": "Testing",
"description": "Explain how to run tests and what framework is used\n\n",
},
],
"dapp": [
{
"name": "Architecture",
"description": "Provide a high-level diagram or explanation of how the frontend "
"interacts with smart contracts\n"
"Mention key technologies used on both sides (e.g., React, Ethers.js, Anchor, Web3.js)\n\n",
},
{
"name": "Smart Contracts",
"description": "Describe the deployed contracts and how to interact with them\n"
"Include deployment instructions and test commands\n\n",
},
{
"name": "Frontend",
"description": "Describe key UI components and user flows (e.g., connect wallet, mint token, submit vote)\n"
"Mention any integrations with IPFS, oracles, or off-chain data\n\n",
},
],
"game": [
{
"name": "Controls and Gameplay",
"description": "Explain player controls and core mechanics\n"
"Optionally include screenshots, video, or demo links\n\n",
},
{
"name": "Technologies Used",
"description": "List engines, frameworks, or libraries used to build the game\n\n",
},
],
"desktop_app": [
{
"name": "Usage",
"description": "Describe the app's main features and user workflows\n"
"Include screenshots if applicable\n\n",
},
{
"name": "Technologies Used",
"description": "List major libraries, frameworks, and build tools\n\n",
},
],
"dataset": [
{
"name": "Dataset Details",
"description": "Describe the structure and format (e.g., CSV, JSON, images, text)\n"
"Include column definitions, schema, or data dictionaries\n"
"Mention the number of records, size, and any notable characteristics\n\n",
},
{
"name": "Usage Instructions",
"description": "Provide example code snippets for loading and using the dataset (e.g., pandas, SQL, etc.)\n"
"Mention any preprocessing steps if needed\n\n",
},
{
"name": "Related Work / Source Links",
"description": "Link to original data sources, research papers, or related projects (if applicable)\n\n",
},
],
"other": [
{
"name": "Features / Capabilities",
"description": "List the core features or components of the project\n"
"Include relevant examples, demos, or configurations if applicable\n\n",
},
{
"name": "Technologies Used",
"description": "List any major frameworks, libraries, or languages identified in the project\n\n",
},
{
"name": "Usage Examples",
"description": "Include example commands or steps showing how to use the project\n\n",
},
],
}
INITIAL_SECTIONS = [
{
"name": "Project Overview",
"description": "A concise description of what the codebase does\n"
"- Its main purpose and the problems it solves\n"
"- Key features and benefits\n\n",
},
{
"name": "Getting Started, Installation, and Setup",
"description": "Include a quick start guide with usage instructions. Leave detailed installation instructions "
"to the Installation and Setup section.\n\n"
"Provide all necessary instruction to install and use the project, including dependencies and "
"platform-specific instructions (if applicable)\n"
"Include steps for both running the app in development and building a production release (if applicable)\n\n",
},
]
FINAL_SECTIONS = [
{
"name": "Project Structure",
"description": "Briefly explain the purpose of key directories and files\n\n",
},
{
"name": "Additional Notes",
"description": "Focus on making the README useful and descriptive, "
"even if the project type is ambiguous\n"
"- Use best judgment to tailor the content to the actual "
"functionality and audience of the project\n"
"- Avoid placeholder text and strive to extract real, useful information from the codebase",
},
{
"name": "Contributing",
"description": "Include basic instructions for how others can contribute\n"
"- Mention any specific guidelines or requirements for contributions (e.g. code style, testing, etc.)\n\n",
},
{
"name": "License",
"description": "State the type of license and include a link to the license file\n\n"
"If no license is mentioned, state that the code is unlicensed and explain the implications.",
},
]

View File

@ -3,9 +3,6 @@
from prometheus_swarm.workflows.base import WorkflowPhase, Workflow
class BranchCreationPhase(WorkflowPhase):
def __init__(self, workflow: Workflow, conversation_id: str = None):
super().__init__(
@ -24,24 +21,33 @@ class RepoClassificationPhase(WorkflowPhase):
prompt_name="classify_repository",
available_tools=["read_file", "list_files", "classify_repository"],
conversation_id=conversation_id,
name="Repository Classificati on",
name="Repository Classification",
)
class ReadmeGenerationPhase(WorkflowPhase):
def __init__(
self, workflow: Workflow, conversation_id: str = None, prompt_name: str = None
):
class ReadmeSectionGenerationPhase(WorkflowPhase):
def __init__(self, workflow: Workflow, conversation_id: str = None):
super().__init__(
workflow=workflow,
prompt_name=prompt_name,
prompt_name="generate_readme_section",
available_tools=[
"read_file",
"list_files",
"write_file",
"create_readme_section",
],
conversation_id=conversation_id,
name="Readme Generation",
name="Readme Section Generation",
)
class ReadmeFileCreationPhase(WorkflowPhase):
def __init__(self, workflow: Workflow, conversation_id: str = None):
super().__init__(
workflow=workflow,
prompt_name="generate_readme",
required_tool="create_readme_file",
conversation_id=conversation_id,
name="Readme File Creation",
)

View File

@ -33,6 +33,34 @@ PROMPTS = {
"- Desktop App: GUI application for desktop environments (e.g., Electron, Qt, Tauri)\n"
"- Dataset: Repository containing structured data for analysis or training\n"
"- Other: If it doesn't fit into any of the above categories\n"
"IMPORTANT: Do not assume that the README is correct. "
"Classify the repository based on the codebase.\n"
"If files are mentioned in the README but are not present in the codebase, "
"do NOT use them as a source of information.\n"
),
"generate_readme_section": (
"You are writing the {section_name} section of a README file for a repository.\n"
"The repository has been cloned to the current directory and the files are available for inspection.\n"
"The readme will contain the following sections:\n"
"{all_sections}\n"
"Restrict your documentation to the section you are writing.\n"
"Read all files relevant to your task and generate comprehensive, clear documentation.\n"
"The section should include the following information:\n"
"{section_description}\n"
"Write the section in markdown format.\n"
"The section name will be automatically added as a second level heading.\n"
"Do not include the section name in your documentation.\n"
"Any sub-sections should be added as third level headings.\n"
"IMPORTANT: DO NOT assume that any existing documentation is correct. It may be inaccurate or outdated.\n"
"Create the documentation based SOLELY on the files actually present in the codebase.\n"
"EXTREMELY IMPORTANT: If files are mentioned in the README but are not present in the codebase, "
"do NOT mention them in your documentation. They do not exist and are not relevant.\n"
"If this section is not relevant to the repository, return an empty string.\n"
),
"generate_readme": (
"Create a descriptive title for the following README contents and create the README file:\n"
"{readme_content}\n"
"The content will be added automatically, your job is just to create a good title."
),
"create_pr": (
"You are creating a pull request for the documentation you have generated:\n"
@ -44,550 +72,20 @@ PROMPTS = {
" - Description of all changes made\n"
" - The main features and value of the documentation\n"
),
"library": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a software library intended"
" for use by developers.\n\n"
"Your README should be formatted in Markdown and include clearly defined section headers.\n\n"
"Please include the following sections:\n"
"1. **Project Overview**\n"
" - A concise description of what the library does\n"
" - Its main purpose and the problems it solves\n"
" - Key features and benefits\n\n"
"2. **Installation**\n"
" - Instructions for installing the library using relevant package managers (e.g., npm, pip, etc.)\n"
" - Mention any prerequisites if applicable\n\n"
"3. **API Reference**\n"
" - Generate a complete list of all publicly exported functions, classes, and constants from the library\n"
" - For each item, include:\n"
" - Its name\n"
" - Description of what it does\n"
" - Function signature with types and descriptions of parameters and return values\n"
" - Example usage\n"
" - Do not omit any significant exports — include everything that would be relevant to a developer using "
"this library\n"
" - Group related items (e.g., utility functions, configuration, components) under subsections if helpful\n"
"4. **Repository Structure**\n"
" - Briefly explain the purpose of key directories and files\n\n"
"5. **Contributing**\n"
" - Include basic instructions for how others can contribute\n"
" - Mention where to find or how to run tests (if available)\n\n"
"6. **License**\n"
" - State the type of license and include a link to the license file\n\n"
"Additional notes:\n"
"- Use bullet points and code blocks to improve readability\n"
"- Keep language friendly but technical and precise\n"
"- If configuration or extension points exist, explain them clearly\n\n"
),
"web_app": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a web application "
"project.\n\n"
"Format the output using Markdown with clear section headers and proper formatting.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Describe the purpose and core functionality of the application\n"
" - Highlight key features and typical use cases\n\n"
"2. **Getting Started**\n"
" - Provide setup instructions to run the app locally\n"
" - Include steps for installing dependencies and starting the development server\n"
" - Mention any required environment variables and how to configure them (e.g., `.env` file)\n\n"
"3. **Deployment**\n"
" - Describe how to build and deploy the application to production\n"
" - Include relevant deployment commands and target platforms (e.g., Vercel, Netlify, Docker)\n\n"
"4. **Project Structure**\n"
" - Briefly explain the purpose of major folders and files (e.g., `src/`, `public/`, `components/`)\n\n"
"5. **Technologies Used**\n"
" - List the main frameworks, libraries, and tools (e.g., React, Vue, Vite, Tailwind)\n\n"
"6. **Feature Highlights**\n"
" - Describe core user-facing features or flows (e.g., authentication, dashboards, routing)\n\n"
"7. **Configuration**\n"
" - Mention any configurable options, build settings, or plugins used\n\n"
"8. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use bullet points, code blocks, and links where appropriate\n"
"- Make sure commands are copy-pasteable\n"
"- Keep language clear and helpful for developers new to the project"
),
"api_service": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a backend service that "
"exposes an API (e.g., REST, GraphQL, or similar).\n\n"
"Format the output using Markdown with clear section headers and developer-friendly formatting.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Describe the purpose of the API and its core functionality\n"
" - Highlight key features and typical use cases\n\n"
"2. **Getting Started**\n"
" - Provide setup instructions to run the service locally\n"
" - Include dependency installation and environment variable setup\n"
" - Describe how to start the server in development mode\n\n"
"3. **API Documentation**\n"
" - List the available endpoints or routes\n"
" - For each endpoint, include:\n"
" - Method (GET, POST, etc.)\n"
" - Path and parameters\n"
" - Example request and response\n"
" - Authentication requirements (if any)\n"
" - If an OpenAPI/Swagger spec or GraphQL schema exists, link to it\n\n"
"4. **Authentication**\n"
" - Describe how authentication works (e.g., API keys, OAuth, JWT)\n"
" - Include example headers or auth flow steps if needed\n\n"
"5. **Project Structure**\n"
" - Explain key folders and files, such as `routes/`, `controllers/`, `models/`\n\n"
"6. **Technologies Used**\n"
" - List major frameworks, libraries, or tools (e.g., Express, FastAPI, Prisma)\n\n"
"7. **Deployment**\n"
" - Describe how to deploy the service (e.g., Docker, CI/CD, cloud platforms)\n"
" - Include environment config or scaling considerations if relevant\n\n"
"8. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use bullet points, code blocks, and sample payloads for clarity\n"
"- Focus on making the API easy to understand and consume\n"
"- Keep the tone clear and helpful for developers using the API"
),
"mobile_app": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a mobile application "
"project.\n\n"
"Format the output using Markdown with clear section headers and mobile developerfriendly formatting.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Describe the purpose and core functionality of the app\n"
" - List key features and intended user experience\n\n"
"2. **Supported Platforms**\n"
" - Indicate whether the app runs on Android, iOS, or both\n"
" - Mention any platform-specific dependencies or limitations\n\n"
"3. **Getting Started**\n"
" - Provide setup instructions for running the app locally\n"
" - Include steps for installing dependencies and required SDKs (e.g., Android Studio, Xcode)\n"
" - Describe how to configure environment variables or API keys\n\n"
"4. **Running the App**\n"
" - Show commands to run the app on a simulator/emulator or real device\n"
" - Include platform-specific commands if needed (e.g., `npx react-native run-ios`, `flutter run`)\n\n"
"5. **Project Structure**\n"
" - Briefly explain the layout of important folders and files (e.g., `src/`, `ios/`, `android/`, `lib/`)\n\n"
"6. **Technologies Used**\n"
" - List the frameworks, SDKs, and libraries used (e.g., React Native, Flutter, Firebase)\n\n"
"7. **Key Screens and Features**\n"
" - Highlight core screens or flows within the app (e.g., login, profile, dashboard)\n"
" - Optionally include screenshots or descriptions of user interactions\n\n"
"8. **Build and Deployment**\n"
" - Provide steps for creating production builds\n"
" - Mention any tools or services used for distribution (e.g., TestFlight, Play Store, Expo)\n\n"
"9. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use bullet points, code blocks, and platform-specific sections where needed\n"
"- Make sure setup steps work for both Android and iOS where applicable\n"
"- Keep the tone clear and helpful for mobile developers"
),
"tutorial": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is designed as an educational "
"tutorial or learning resource.\n\n"
"Format the output using Markdown with clear section headers and a logical, beginner-friendly structure.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Summarize the goal of the tutorial and what users will learn\n"
" - List key topics or technologies covered\n"
" - Mention any prerequisites (e.g., knowledge of a language, tools to install)\n\n"
"2. **Getting Started**\n"
" - Provide step-by-step setup instructions\n"
" - Include installation of dependencies, toolchain setup, and environment config\n"
" - Ensure instructions work on major operating systems\n\n"
"3. **Tutorial Structure**\n"
" - Break down the tutorial into sections, stages, or lessons\n"
" - Briefly describe what each section teaches or builds\n"
" - Link to key files or folders associated with each part\n\n"
"4. **Learning Outcomes**\n"
" - Clearly list the skills or concepts users will have mastered by the end\n\n"
"5. **Code Examples and Exercises**\n"
" - Mention inline code snippets, checkpoints, or interactive examples\n"
" - If exercises are included, describe how users should complete or test them\n\n"
"6. **Project Structure**\n"
" - Describe the layout of the repository and which files correspond to different tutorial stages\n\n"
"7. **Next Steps / Further Reading**\n"
" - Suggest where users can go after completing the tutorial\n"
" - Include links to additional docs, libraries, or related tutorials\n\n"
"8. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use beginner-friendly language without dumbing things down\n"
"- Include code blocks, links, and visual structure to aid readability\n"
"- Help users stay oriented by reminding them what they've done and what's next"
),
"template": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which serves as a project starter or "
"boilerplate template.\n\n"
"Format the output using Markdown with clear section headers and developer-friendly formatting.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Describe the purpose of this template and the type of projects it's meant for\n"
" - List key features, tools, or configurations included by default\n\n"
"2. **Getting Started**\n"
" - Provide instructions for cloning or copying the template\n"
" - Include setup steps: installing dependencies, environment config, and running locally\n\n"
"3. **Customization Guide**\n"
" - Explain which parts of the codebase are intended to be modified by users\n"
" - Offer guidance on how to rename, rebrand, or restructure parts of the template\n\n"
"4. **Project Structure**\n"
" - Describe the layout of important directories and files\n"
" - Highlight which files are meant for customization vs. boilerplate\n\n"
"5. **Technologies Used**\n"
" - List the frameworks, libraries, and tools integrated into the template (e.g., ESLint, Prettier, "
"Tailwind, Express)\n\n"
"6. **Use Cases**\n"
" - Provide example scenarios where this template is useful (e.g., 'Use this for building a REST API with "
"authentication')\n"
" - Link to live demos or projects built from this template if available\n\n"
"7. **Contributing**\n"
" - If the template is open to contributions, provide basic instructions for submitting improvements\n\n"
"8. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Focus on helping users get started quickly and confidently\n"
"- Use code blocks and examples to show how things work\n"
"- Encourage best practices and provide defaults users can trust or extend"
),
"cli_tool": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a command-line "
"interface (CLI) tool.\n\n"
"Format the output using Markdown with clear section headers and include clear command-line examples.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Explain what the CLI tool does and why it's useful\n"
" - Mention common use cases or problems it solves\n\n"
"2. **Installation**\n"
" - Provide steps to install the tool (e.g., npm, pip, Homebrew, binary download)\n"
" - Mention any required dependencies or environment setup\n\n"
"3. **Usage**\n"
" - Show how to use the tool from the command line\n"
" - Include at least 23 example commands with explanations of the output\n"
" - Demonstrate the most common and useful flags or options\n"
" - If the tool supports subcommands, show examples of each\n\n"
"4. **Command Reference**\n"
" - List all available commands, flags, and options in a table or list format\n"
" - Explain each option clearly, including defaults and accepted values\n\n"
"5. **Configuration (if applicable)**\n"
" - Describe any optional or required configuration files (e.g., `.clirc`, `config.json`)\n"
" - Show example configurations and where to place them\n\n"
"6. **Project Structure**\n"
" - Briefly describe key files or folders related to the CLI's source code\n\n"
"7. **Contributing**\n"
" - Outline how to contribute, test changes, or add new commands\n\n"
"8. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use code blocks for command examples and outputs\n"
"- Keep tone practical and clear, suitable for developers or power users\n"
"- Focus on usability and real-world examples of the tool in action"
),
"framework": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a software framework "
"designed to be extended or used as a foundation for building applications.\n\n"
"Format the output using Markdown with clear section headers and structured, developer-friendly formatting.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Describe what the framework does and the type of projects it's built for\n"
" - Highlight key concepts and design philosophy (e.g., convention over configuration, modularity)\n\n"
"2. **Getting Started**\n"
" - Include steps for installing and initializing a new project using the framework\n"
" - Provide a minimal working example with code blocks\n\n"
"3. **Core Concepts**\n"
" - Explain the main components or building blocks (e.g., modules, services, lifecycle, routing, etc.)\n"
" - Include diagrams or conceptual overviews if helpful\n\n"
"4. **Extension Points**\n"
" - Describe how developers can extend the framework (e.g., plugins, middleware, custom components)\n"
" - Include examples of common extension use cases\n\n"
"5. **Project Structure**\n"
" - Explain the directory layout of a typical project using the framework\n"
" - Highlight where user code should live and where internal framework logic resides\n\n"
"6. **Technologies Used**\n"
" - List core dependencies, supported environments, or language-level features leveraged\n\n"
"7. **Best Practices**\n"
" - Offer guidance for structuring large projects, writing maintainable code, or following framework "
"conventions\n\n"
"8. **Contributing**\n"
" - Outline how contributors can report issues, add features, or build plugins for the framework\n\n"
"9. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use clear examples and code snippets to explain key abstractions\n"
"- Keep the tone empowering and oriented toward other developers building on top of the framework\n"
"- Emphasize extensibility and conceptual clarity"
),
"data_science": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a data science or "
"machine learning project.\n\n"
"Format the output using Markdown with clear section headers and helpful formatting for technical readers.\n\n"
"Include the following sections:\n"
"1. **Project Overview**\n"
" - Explain the goal of the project (e.g., prediction, classification, analysis)\n"
" - Summarize key findings or outcomes (if applicable)\n\n"
"2. **Dataset**\n"
" - Describe the dataset used (source, size, structure)\n"
" - Include schema information or link to external data sources\n"
" - Mention whether the data is included in the repo or needs to be downloaded\n\n"
"3. **Installation and Setup**\n"
" - List dependencies and setup instructions (e.g., `requirements.txt`, `environment.yml`)\n"
" - Mention any additional setup (e.g., downloading data, creating folders)\n\n"
"4. **Project Structure**\n"
" - Explain the layout of scripts, notebooks, data folders, and model outputs\n"
" - Highlight the main entry points for running the pipeline\n\n"
"5. **Model Architecture and Training**\n"
" - Briefly describe the model(s) used and why they were chosen\n"
" - Include training scripts and command-line instructions\n"
" - Mention metrics used for evaluation\n\n"
"6. **Evaluation and Results**\n"
" - Summarize how the model was evaluated and key performance metrics\n"
" - Optionally include plots, confusion matrices, or sample outputs\n\n"
"7. **Inference / How to Use the Model**\n"
" - Explain how to run inference or apply the model to new data\n"
" - Include input/output formats and example commands or code\n\n"
"8. **Technologies Used**\n"
" - List key tools, libraries, and frameworks (e.g., scikit-learn, TensorFlow, pandas)\n\n"
"9. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Use code blocks and examples where appropriate\n"
"- Ensure reproducibility by including all necessary setup instructions\n"
"- Keep the tone professional and geared toward data scientists or ML engineers"
),
"plugin": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a plugin or extension "
"designed to integrate with a larger platform (such as a CMS, IDE, or framework).\n\n"
"Format the output using Markdown with clear section headers.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe what this plugin does and the host system it's built for\n"
" - List key features and benefits\n\n"
"2. **Installation**\n"
" - Provide installation instructions specific to the host platform\n"
" - Mention compatible versions and any prerequisites\n\n"
"3. **Usage**\n"
" - Show how to enable and configure the plugin\n"
" - Include code snippets or configuration steps\n\n"
"4. **Integration Points**\n"
" - Describe hooks, lifecycle methods, or extension APIs the plugin interacts with\n\n"
"5. **Project Structure**\n"
" - Briefly explain key files and folders\n\n"
"6. **Technologies Used**\n"
" - List frameworks, languages, or tooling\n\n"
"7. **License**\n"
" - State the license type and link to the license file"
),
"chrome_extension": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a Chrome extension "
"project.\n\n"
"Format the output using Markdown with clear section headers.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe the purpose and features of the extension\n\n"
"2. **Installation**\n"
" - Include instructions for loading the extension in Chrome (via the Extensions page or Chrome Web Store)\n"
" - Mention required permissions and how to review the manifest\n\n"
"3. **Usage**\n"
" - Explain how users interact with the extension (e.g., popup UI, context menu, background scripts)\n"
" - Include example scenarios or screenshots if applicable\n\n"
"4. **Project Structure**\n"
" - Briefly describe key files like `manifest.json`, `background.js`, and popup components\n\n"
"5. **Technologies Used**\n"
" - List libraries or frameworks (e.g., vanilla JS, React, Tailwind)\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"jupyter_notebook": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which consists of one or more "
"Jupyter notebooks.\n\n"
"Format the output using Markdown with clear section headers.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe the purpose of the notebooks and what they demonstrate or analyze\n\n"
"2. **Getting Started**\n"
" - Provide instructions for setting up the environment (e.g., installing Jupyter, dependencies, "
"virtualenv)\n"
" - Mention how to launch the notebooks (e.g., `jupyter notebook` or `jupyter lab`)\n\n"
"3. **Notebook Summary**\n"
" - List and briefly describe each notebook in the repo\n"
" - Mention whether they build on each other or are standalone\n\n"
"4. **Dataset (if applicable)**\n"
" - Describe any datasets used and where they come from\n\n"
"5. **Technologies Used**\n"
" - List libraries (e.g., pandas, matplotlib, scikit-learn)\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"infrastructure": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which contains "
"infrastructure-as-code or deployment configuration (e.g., Docker, Terraform, Ansible).\n\n"
"Format the output using Markdown with clear section headers.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Explain what infrastructure is being managed and its intended use\n\n"
"2. **Setup**\n"
" - Describe any prerequisites (e.g., installing Docker, Terraform CLI, cloud access credentials)\n"
" - Include instructions for initializing and applying the configuration\n\n"
"3. **Configuration Files**\n"
" - Explain the structure and purpose of major files (e.g., `main.tf`, `docker-compose.yml`, "
"`playbooks/`)\n\n"
"4. **Deployment Workflow**\n"
" - Describe how deployments are triggered and verified\n"
" - Mention any CI/CD pipelines, remote state management, or secrets handling\n\n"
"5. **Environments**\n"
" - Clarify how to deploy to multiple environments (dev, staging, prod)\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"smart_contract": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which contains smart contracts "
"written for a blockchain platform (e.g., Ethereum, Solana).\n\n"
"Format the output using Markdown with clear section headers.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Explain the purpose and functionality of the smart contracts\n"
" - Mention the target blockchain platform\n\n"
"2. **Installation and Setup**\n"
" - List dependencies and setup instructions (e.g., hardhat, anchor, solana-cli)\n"
" - Include local devnet instructions if applicable\n\n"
"3. **Contracts**\n"
" - Describe the main contract(s) and what each one does\n"
" - Include deployment steps and how to interact with them\n\n"
"4. **Testing**\n"
" - Explain how to run tests and what framework is used\n\n"
"5. **Project Structure**\n"
" - Describe layout of contracts, migrations, and test files\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"dapp": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a decentralized application "
"(dApp) that includes both smart contract(s) and a web-based frontend.\n\n"
"Format the output using Markdown with clear section headers and examples for both on-chain and off-chain "
"components.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe what the dApp does and the blockchain ecosystem it runs on\n"
" - Mention the smart contract platform (e.g., Ethereum, Solana, NEAR) and wallet compatibility\n\n"
"2. **Architecture**\n"
" - Provide a high-level diagram or explanation of how the frontend interacts with smart contracts\n"
" - Mention key technologies used on both sides (e.g., React, Ethers.js, Anchor, Web3.js)\n\n"
"3. **Getting Started**\n"
" - Provide setup instructions for both frontend and backend\n"
" - Include how to install dependencies, configure environment variables, and run locally\n\n"
"4. **Smart Contracts**\n"
" - Describe the deployed contracts and how to interact with them\n"
" - Include deployment instructions and test commands\n\n"
"5. **Frontend**\n"
" - Describe key UI components and user flows (e.g., connect wallet, mint token, submit vote)\n"
" - Mention any integrations with IPFS, oracles, or off-chain data\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"game": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a game or game engine "
"project.\n\n"
"Format the output using Markdown with clear section headers and provide clear instructions for playing and "
"modifying the game.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe the game concept, genre, and platform (e.g., browser, desktop, mobile)\n"
" - Mention gameplay goals or mechanics\n\n"
"2. **Installation and Setup**\n"
" - Provide instructions for installing dependencies and running the game\n"
" - Include setup for game engines or SDKs (e.g., Unity, Godot, Phaser, Unreal)\n\n"
"3. **Controls and Gameplay**\n"
" - Explain player controls and core mechanics\n"
" - Optionally include screenshots, video, or demo links\n\n"
"4. **Project Structure**\n"
" - Describe key files and folders (e.g., assets, levels, scripts)\n\n"
"5. **Technologies Used**\n"
" - List engines, frameworks, or libraries used to build the game\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"desktop_app": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which is a desktop application "
"project built with technologies like Electron, Tauri, Qt, or native frameworks.\n\n"
"Format the output using Markdown with clear section headers and platform-aware instructions.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe what the desktop app does and who it's for\n"
" - Mention platforms supported (e.g., Windows, macOS, Linux)\n\n"
"2. **Installation and Setup**\n"
" - Provide platform-specific install/build instructions\n"
" - Include steps for running the app in development and building a production release\n\n"
"3. **Usage**\n"
" - Describe the app's main features and user workflows\n"
" - Include screenshots if applicable\n\n"
"4. **Project Structure**\n"
" - Describe key files and folders (e.g., main process, renderer process, assets)\n\n"
"5. **Technologies Used**\n"
" - List major libraries, frameworks, and build tools\n\n"
"6. **License**\n"
" - State the license type and link to the license file"
),
"dataset": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository, which contains a dataset for "
"analysis, training, or research purposes.\n\n"
"Format the output using Markdown with clear section headers and data-focused structure.\n\n"
"Include the following sections:\n"
"1. **Overview**\n"
" - Describe what the dataset contains and its intended purpose\n"
" - Mention the source and whether it was collected, generated, or aggregated\n\n"
"2. **Dataset Details**\n"
" - Describe the structure and format (e.g., CSV, JSON, images, text)\n"
" - Include column definitions, schema, or data dictionaries\n"
" - Mention the number of records, size, and any notable characteristics\n\n"
"3. **Usage Instructions**\n"
" - Provide example code snippets for loading and using the dataset (e.g., pandas, SQL, etc.)\n"
" - Mention any preprocessing steps if needed\n\n"
"4. **Licensing and Terms of Use**\n"
" - State the license and any restrictions on usage or distribution\n"
" - Include citation or attribution instructions if required\n\n"
"5. **Related Work / Source Links**\n"
" - Link to original data sources, research papers, or related projects (if applicable)"
),
"other": (
"Please scan the repository and generate or update a complete and professional readme_prometheus.md file for this repository.\n\n"
"{previous_review_comments_section}\n\n"
"Analyze the contents of the repository to infer its intent, and format the README using Markdown with "
"clear section headers.\n\n"
"Include the following general sections, customizing them as needed based on the repository type:\n"
"1. **Project Overview**\n"
" - Describe the purpose of the project and its main functionality\n"
" - Summarize what the project does and who it's for\n\n"
"2. **Getting Started**\n"
" - Include setup or usage instructions based on the repo's structure\n"
" - Mention installation steps, dependencies, and commands to run or use the project\n\n"
"3. **Features / Capabilities**\n"
" - List the core features or components of the project\n"
" - Include relevant examples, demos, or configurations if applicable\n\n"
"4. **Project Structure**\n"
" - Describe the layout of files and folders, especially any key scripts, configs, or assets\n\n"
"5. **Technologies Used**\n"
" - List any major frameworks, libraries, or languages identified in the project\n\n"
"6. **Usage Examples** (if applicable)\n"
" - Include example commands or steps showing how to use the project\n\n"
"7. **License**\n"
" - State the license type and link to the license file\n\n"
"Additional Notes:\n"
"- Focus on making the README useful and descriptive, even if the project type is ambiguous\n"
"- Use best judgment to tailor the content to the actual functionality and audience of the project\n"
"- Avoid placeholder text and strive to extract real, useful information from the codebase"
),
"review_readme_file": (
"Review the readme_prometheus.md file in the repository and evaluate its quality and relevance to the repository.\n\n"
"Review the README_Prometheus.md file in the repository and evaluate its quality and "
"relevance to the repository.\n\n"
"Please analyze:\n"
"1. Is the readme_prometheus.md file related to this specific repository? (Does it describe the actual code and purpose of this repo?)\n"
"1. Is the README_Prometheus.md file related to this specific repository? (Does it describe the actual code "
"and purpose of this repo?)\n"
"2. Does it correctly explain the repository's purpose, features, and functionality?\n"
"3. Is it comprehensive enough to help users understand and use the repository?\n"
"4. Does it follow best practices for README documentation?\n\n"
"Use the validate_implementation tool to submit your findings.\n"
"Use the validate_implementation tool to submit your findings.\n"
"IMPORTANT: Do not assume that an existing README is correct. "
"Evaluate README_Prometheus.md against the codebase.\n"
"DO NOT consider the filename in your analysis, only the content.\n"
"STOP after submitting the review report."
),
"previous_review_comments": (
"Here are the comments from the previous review:\n"
),
"previous_review_comments": ("Here are the comments from the previous review:\n"),
}

View File

@ -9,9 +9,14 @@ from prometheus_swarm.workflows.utils import (
check_required_env_vars,
cleanup_repository,
validate_github_auth,
setup_repository
setup_repository,
)
from src.workflows.repoSummarizer.prompts import PROMPTS
from src.workflows.repoSummarizer.docs_sections import (
DOCS_SECTIONS,
INITIAL_SECTIONS,
FINAL_SECTIONS,
)
class Task:
@ -81,7 +86,11 @@ class RepoSummarizerWorkflow(Workflow):
self.context["base"] = "main"
# Set up repository directory
setup_result = setup_repository(self.context["repo_url"], github_token=os.getenv("GITHUB_TOKEN"), github_username=os.getenv("GITHUB_USERNAME"))
setup_result = setup_repository(
self.context["repo_url"],
github_token=os.getenv("GITHUB_TOKEN"),
github_username=os.getenv("GITHUB_USERNAME"),
)
if not setup_result["success"]:
raise Exception(f"Failed to set up repository: {setup_result['message']}")
self.context["github_token"] = os.getenv("GITHUB_TOKEN")
@ -145,7 +154,7 @@ class RepoSummarizerWorkflow(Workflow):
}
# Get prompt name for README generation
prompt_name = repo_classification_result["data"].get("prompt_name")
prompt_name = repo_classification_result["data"].get("repo_type")
if not prompt_name:
log_error(
Exception("No prompt name returned from repository classification"),
@ -163,7 +172,9 @@ class RepoSummarizerWorkflow(Workflow):
prompt_name = "other"
readme_result = self.generate_readme_file(prompt_name)
if not readme_result or not readme_result.get("success"):
log_error(Exception("README generation failed"), "README generation failed")
log_error(
Exception("README generation failed"), "README generation failed"
)
return {
"success": False,
"message": "README generation failed",
@ -179,15 +190,17 @@ class RepoSummarizerWorkflow(Workflow):
"data": None,
}
log_key_value("README review result", review_result.get("data"))
if review_result.get("success") and review_result.get("data").get("recommendation") == "APPROVE":
if (
review_result.get("success")
and review_result.get("data").get("recommendation") == "APPROVE"
):
result = self.create_pull_request()
return result
else:
self.context["previous_review_comments_section"] = PROMPTS["previous_review_comments"] + review_result.get("data").get("comment")
self.context["previous_review_comments_section"] = PROMPTS[
"previous_review_comments"
] + review_result.get("data").get("comment")
return {
"success": False,
"message": "README Review Exceed Max Attempts",
@ -206,6 +219,7 @@ class RepoSummarizerWorkflow(Workflow):
"message": f"Repository classification workflow failed: {str(e)}",
"data": None,
}
def review_readme_file(self, readme_result):
"""Execute the issue generation workflow."""
try:
@ -220,16 +234,20 @@ class RepoSummarizerWorkflow(Workflow):
"data": None,
}
def generate_readme_file(self, prompt_name):
"""Execute the issue generation workflow."""
def generate_readme_section(self, section):
"""Create the subsections of the README file."""
self.context["section_name"] = section["name"]
self.context["section_description"] = section["description"]
try:
# ==================== Generate README file ====================
log_section("GENERATING README FILE")
generate_readme_file_phase = phases.ReadmeGenerationPhase(
workflow=self, prompt_name=prompt_name
log_section("GENERATING README SECTION")
generate_readme_section_phase = phases.ReadmeSectionGenerationPhase(
workflow=self
)
readme_result = generate_readme_file_phase.execute()
readme_result = generate_readme_section_phase.execute()
# Check README Generation Result
if not readme_result or not readme_result.get("success"):
@ -249,13 +267,66 @@ class RepoSummarizerWorkflow(Workflow):
"data": None,
}
def generate_readme_file(self, repo_type):
"""Generate the README file."""
readme_sections_spec = (
list(INITIAL_SECTIONS)
+ list(DOCS_SECTIONS[repo_type])
+ list(FINAL_SECTIONS)
)
self.context["repo_type"] = repo_type
self.context["all_sections"] = ", ".join(
[section["name"] for section in readme_sections_spec]
)
try:
readme_sections = []
for section in readme_sections_spec:
readme_result = self.generate_readme_section(section)
print("README RESULT", readme_result)
if not readme_result or not readme_result.get("success"):
log_error(
Exception(readme_result.get("error", "No result")),
"Readme file generation failed",
)
return None
readme_section_content = readme_result.get("data", {}).get(
"section_content"
)
if readme_section_content:
readme_section_title = readme_result.get("data", {}).get(
"section_name"
)
readme_section = (
f"## {readme_section_title}\n\n" f"{readme_section_content}"
)
readme_sections.append(readme_section)
self.context["readme_content"] = "\n\n".join(readme_sections)
generate_readme_file_phase = phases.ReadmeFileCreationPhase(workflow=self)
return generate_readme_file_phase.execute()
except Exception as e:
log_error(e, "Readme file generation workflow failed")
return {
"success": False,
"message": f"Readme file generation workflow failed: {str(e)}",
"data": None,
}
def create_pull_request(self):
"""Create a pull request for the README file."""
try:
log_section("CREATING PULL REQUEST")
# Add required PR title and description parameters to context
self.context["title"] = f"Prometheus: Add README for {self.context['repo_name']}"
self.context["title"] = (
f"Prometheus: Add README for {self.context['repo_name']}"
)
self.context["description"] = (
f"This PR adds a README file for the {self.context['repo_name']} repository."
)

View File

@ -1,57 +0,0 @@
"""Entry point for the todo creator workflow."""
import sys
import os
import argparse
from dotenv import load_dotenv
from src.workflows.starRepo.workflow import StarRepoWorkflow
from src.workflows.starRepo.prompts import PROMPTS
from prometheus_swarm.clients import setup_client
# Load environment variables
load_dotenv()
def main():
"""Run the todo creator workflow."""
parser = argparse.ArgumentParser(
description="Create tasks from a feature specification for a GitHub repository"
)
parser.add_argument(
"--repo",
type=str,
required=True,
help="GitHub repository URL (e.g., https://github.com/owner/repo)",
)
parser.add_argument(
"--model",
type=str,
default="anthropic",
choices=["anthropic", "openai", "xai"],
help="Model provider to use (default: anthropic)",
)
args = parser.parse_args()
# Initialize client
client = setup_client(args.model)
# Run the todo creator workflow
workflow = StarRepoWorkflow(
client=client,
prompts=PROMPTS,
repo_url=args.repo,
)
result = workflow.run()
if not result or not result.get("success"):
print("Todo creator workflow failed")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -1,15 +0,0 @@
"""Task decomposition workflow phases implementation."""
from prometheus_swarm.workflows.base import WorkflowPhase, Workflow
class ReadmeGenerationPhase(WorkflowPhase):
def __init__(self, workflow: Workflow, conversation_id: str = None):
super().__init__(
workflow=workflow,
prompt_name="generate_readme_file",
available_tools=["read_file", "write_file", "list_files", "commit_and_push"],
conversation_id=conversation_id,
name="Readme Generation",
)

View File

@ -1,29 +0,0 @@
"""Prompts for the repository summarization workflow."""
PROMPTS = {
"system_prompt": (
"You are an expert software architect and technical lead specializing in summarizing "
"repositories into comprehensive documentation. You excel at analyzing codebases "
"and creating clear, structured documentation."
),
"generate_readme_file": (
"Generate a comprehensive README file for the following repository:\n"
"Repository: {repo_url}\n\n"
"Please include:\n"
"1. Project Overview\n"
" - Purpose and main functionality\n"
" - Key features\n"
"2. Repository Structure\n"
" - Detailed breakdown of directories and their purposes\n"
" - Key files and their roles\n"
"3. Technical Details\n"
" - Technologies used\n"
" - Architecture overview\n"
"4. File Contents\n"
" - Specific description of each significant file\n\n"
"Format the output in markdown, ensuring clear section headers and proper formatting."
"Please commit and push the changes to the repository after generating the README file."
),
}

View File

@ -1,141 +0,0 @@
"""Task decomposition workflow implementation."""
import os
from github import Github
from prometheus_swarm.workflows.base import Workflow
from prometheus_swarm.tools.github_operations.implementations import star_repository
from prometheus_swarm.utils.logging import log_section, log_key_value, log_error
from src.workflows.repoSummarizer import phases
from prometheus_swarm.workflows.utils import (
check_required_env_vars,
validate_github_auth,
)
class Task:
def __init__(self, title: str, description: str, acceptance_criteria: list[str]):
self.title = title
self.description = description
self.acceptance_criteria = acceptance_criteria
def to_dict(self) -> dict:
"""Convert task to dictionary format."""
return {
"title": self.title,
"description": self.description,
"acceptance_criteria": self.acceptance_criteria,
}
@classmethod
def from_dict(cls, data: dict) -> "Task":
"""Create task from dictionary."""
return cls(
title=data["title"],
description=data["description"],
acceptance_criteria=data["acceptance_criteria"],
)
class StarRepoWorkflow(Workflow):
def __init__(
self,
client,
prompts,
repo_url,
):
# Extract owner and repo name from URL
# URL format: https://github.com/owner/repo
parts = repo_url.strip("/").split("/")
repo_owner = parts[-2]
repo_name = parts[-1]
super().__init__(
client=client,
prompts=prompts,
repo_url=repo_url,
repo_owner=repo_owner,
repo_name=repo_name,
)
self.context["repo_owner"] = repo_owner
self.context["repo_name"] = repo_name
self.context["github_token"] = os.getenv("GITHUB_TOKEN")
def setup(self):
"""Set up repository and workspace."""
check_required_env_vars(["GITHUB_TOKEN", "GITHUB_USERNAME"])
validate_github_auth(os.getenv("GITHUB_TOKEN"), os.getenv("GITHUB_USERNAME"))
# # Get the default branch from GitHub
# try:
# gh = Github(os.getenv("GITHUB_TOKEN"))
# repo = gh.get_repo(
# f"{self.context['repo_owner']}/{self.context['repo_name']}"
# )
# self.context["base_branch"] = repo.default_branch
# log_key_value("Default branch", self.context["base_branch"])
# except Exception as e:
# log_error(e, "Failed to get default branch, using 'main'")
# self.context["base_branch"] = "main"
# Set up repository directory
# repo_path, original_dir = setup_repo_directory()
# self.context["repo_path"] = repo_path
# self.original_dir = original_dir
# # Fork and clone repository
# log_section("FORKING AND CLONING REPOSITORY")
# fork_result = fork_repository(
# f"{self.context['repo_owner']}/{self.context['repo_name']}",
# self.context["repo_path"],
# )
# if not fork_result["success"]:
# error = fork_result.get("error", "Unknown error")
# log_error(Exception(error), "Fork failed")
# raise Exception(error)
# # Enter repo directory
# os.chdir(self.context["repo_path"])
# # Configure Git user info
# setup_git_user_config(self.context["repo_path"])
# Get current files for context
def cleanup(self):
"""Cleanup workspace."""
# cleanup_repository(self.original_dir, self.context.get("repo_path", ""))
# Make sure we're not in the repo directory before cleaning up
# if os.getcwd() == self.context.get("repo_path", ""):
# os.chdir(self.original_dir)
# # Clean up the repository directory
# cleanup_repo_directory(self.original_dir, self.context.get("repo_path", ""))
# Clean up the MongoDB
def run(self):
star_repo_result = self.start_star_repo()
return star_repo_result
def start_star_repo(self):
"""Execute the issue generation workflow."""
try:
self.setup()
# ==================== Generate issues ====================
star_repo_result = star_repository(
self.context["repo_owner"], self.context["repo_name"], self.context["github_token"]
)
if not star_repo_result or not star_repo_result.get("success"):
log_error(
Exception(star_repo_result.get("error", "No result")),
"Repository star failed",
)
return None
return star_repo_result
except Exception as e:
log_error(e, "Readme file generation workflow failed")
print(e)
return {
"success": False,
"message": f"Readme file generation workflow failed: {str(e)}",
"data": None,
}

View File

@ -1,58 +0,0 @@
"""Entry point for the todo creator workflow."""
import sys
import os
import argparse
from dotenv import load_dotenv
from src.workflows.starRepoAudit.workflow import StarRepoAuditWorkflow
from src.workflows.starRepoAudit.prompts import PROMPTS
from prometheus_swarm.clients import setup_client
# Load environment variables
load_dotenv()
def main():
"""Run the todo creator workflow."""
parser = argparse.ArgumentParser(
description="Create tasks from a feature specification for a GitHub repository"
)
parser.add_argument(
"--repo",
type=str,
required=True,
help="GitHub repository URL (e.g., https://github.com/owner/repo)",
)
parser.add_argument(
"--model",
type=str,
default="anthropic",
choices=["anthropic", "openai", "xai"],
help="Model provider to use (default: anthropic)",
)
args = parser.parse_args()
# Initialize client
client = setup_client(args.model)
# Run the todo creator workflow
workflow = StarRepoAuditWorkflow(
client=client,
prompts=PROMPTS,
repo_url=args.repo,
github_username="HermanL02",
)
result = workflow.run()
if not result or not result.get("success"):
print("Todo creator workflow failed")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -1,15 +0,0 @@
"""Task decomposition workflow phases implementation."""
from src.workflows.base import WorkflowPhase, Workflow
class ReadmeGenerationPhase(WorkflowPhase):
def __init__(self, workflow: Workflow, conversation_id: str = None):
super().__init__(
workflow=workflow,
prompt_name="generate_readme_file",
available_tools=["read_file", "write_file", "list_files", "commit_and_push"],
conversation_id=conversation_id,
name="Readme Generation",
)

View File

@ -1,29 +0,0 @@
"""Prompts for the repository summarization workflow."""
PROMPTS = {
"system_prompt": (
"You are an expert software architect and technical lead specializing in summarizing "
"repositories into comprehensive documentation. You excel at analyzing codebases "
"and creating clear, structured documentation."
),
"generate_readme_file": (
"Generate a comprehensive README file for the following repository:\n"
"Repository: {repo_url}\n\n"
"Please include:\n"
"1. Project Overview\n"
" - Purpose and main functionality\n"
" - Key features\n"
"2. Repository Structure\n"
" - Detailed breakdown of directories and their purposes\n"
" - Key files and their roles\n"
"3. Technical Details\n"
" - Technologies used\n"
" - Architecture overview\n"
"4. File Contents\n"
" - Specific description of each significant file\n\n"
"Format the output in markdown, ensuring clear section headers and proper formatting."
"Please commit and push the changes to the repository after generating the README file."
),
}

View File

@ -1,151 +0,0 @@
"""Task decomposition workflow implementation."""
import os
from github import Github
from prometheus_swarm.workflows.base import Workflow
from prometheus_swarm.tools.github_operations.implementations import (
get_user_starred_repos,
)
from prometheus_swarm.utils.logging import log_section, log_key_value, log_error
from src.workflows.repoSummarizer import phases
from prometheus_swarm.workflows.utils import (
check_required_env_vars,
validate_github_auth,
)
class Task:
def __init__(self, title: str, description: str, acceptance_criteria: list[str]):
self.title = title
self.description = description
self.acceptance_criteria = acceptance_criteria
def to_dict(self) -> dict:
"""Convert task to dictionary format."""
return {
"title": self.title,
"description": self.description,
"acceptance_criteria": self.acceptance_criteria,
}
@classmethod
def from_dict(cls, data: dict) -> "Task":
"""Create task from dictionary."""
return cls(
title=data["title"],
description=data["description"],
acceptance_criteria=data["acceptance_criteria"],
)
class StarRepoAuditWorkflow(Workflow):
def __init__(
self,
client,
prompts,
repo_url,
github_username,
):
# Extract owner and repo name from URL
# URL format: https://github.com/owner/repo
parts = repo_url.strip("/").split("/")
repo_owner = parts[-2]
repo_name = parts[-1]
super().__init__(
client=client,
prompts=prompts,
repo_url=repo_url,
repo_owner=repo_owner,
repo_name=repo_name,
github_username=github_username,
)
self.context["repo_owner"] = repo_owner
self.context["repo_name"] = repo_name
self.context["github_username"] = github_username
def setup(self):
"""Set up repository and workspace."""
check_required_env_vars(["GITHUB_TOKEN", "GITHUB_USERNAME"])
validate_github_auth(os.getenv("GITHUB_TOKEN"), os.getenv("GITHUB_USERNAME"))
# # Get the default branch from GitHub
# try:
# gh = Github(os.getenv("GITHUB_TOKEN"))
# repo = gh.get_repo(
# f"{self.context['repo_owner']}/{self.context['repo_name']}"
# )
# self.context["base_branch"] = repo.default_branch
# log_key_value("Default branch", self.context["base_branch"])
# except Exception as e:
# log_error(e, "Failed to get default branch, using 'main'")
# self.context["base_branch"] = "main"
# # Set up repository directory
# repo_path, original_dir = setup_repo_directory()
# self.context["repo_path"] = repo_path
# self.original_dir = original_dir
# # Fork and clone repository
# log_section("FORKING AND CLONING REPOSITORY")
# fork_result = fork_repository(
# f"{self.context['repo_owner']}/{self.context['repo_name']}",
# self.context["repo_path"],
# )
# if not fork_result["success"]:
# error = fork_result.get("error", "Unknown error")
# log_error(Exception(error), "Fork failed")
# raise Exception(error)
# # Enter repo directory
# os.chdir(self.context["repo_path"])
# # Configure Git user info
# setup_git_user_config(self.context["repo_path"])
# # Get current files for context
def cleanup(self):
"""Cleanup workspace."""
# # Make sure we're not in the repo directory before cleaning up
# if os.getcwd() == self.context.get("repo_path", ""):
# os.chdir(self.original_dir)
# # Clean up the repository directory
# cleanup_repo_directory(self.original_dir, self.context.get("repo_path", ""))
# Clean up the MongoDB
def run(self):
star_repo_result = self.check_star_repo()
if not star_repo_result:
log_error(
Exception("Repository is not starred"), "Repository is not starred"
)
return False
return star_repo_result
def check_star_repo(self):
"""Check if the repository is starred."""
try:
print(self.context["github_username"])
starred_repos = get_user_starred_repos(self.context["github_username"])
print(starred_repos)
if not starred_repos or not starred_repos.get("success"):
log_error(
Exception(starred_repos.get("error", "No result")),
"Failed to get starred repositories",
)
return False
# check if the repository is in the starred_repos
if f"{self.context['repo_owner']}/{self.context['repo_name']}" in [
repo["full_name"] for repo in starred_repos["data"]["starred_repos"]
]:
print("Repository is starred")
return {"success": True, "result": "Repository is starred"}
else:
print("Repository is not starred")
return {"success": False, "result": "Repository is not starred"}
except Exception as e:
log_error(e, "Failed to check if repository is starred")
return False

View File

@ -0,0 +1,26 @@
# Test Configuration
task_id: "62n2aAVVV42rtt53wxieotTdnKpTRjiChsHYdSxHDhAZ" # Task ID from config-task.yml
middle_server_url: "http://localhost:3000"
# base_port: 5000 # Base port for worker servers
# max_rounds: 1 # Maximum number of test rounds
# Paths
# relative to the test directory
data_dir: data/minimal # Directory containing test data
# workers_config: workers.json # Worker configuration file
# MongoDB Configuration
mongodb:
database: summarizer
collections:
docs:
data_file: docs.json # Relative to data_dir
required_count: 1 # Minimum number of documents required
summaries:
data_file: summaries.json
required_count: 1
systemprompts:
data_file: prompts.json
required_count: 1
audits:
required_count: 0 # No data file, just needs to exist

View File

@ -0,0 +1,10 @@
[
{
"taskId": "62n2aAVVV42rtt53wxieotTdnKpTRjiChsHYdSxHDhAZ",
"content": "This is a test document that needs to be summarized. It contains multiple sentences and paragraphs to test the summarization functionality. The document discusses various topics and should be processed by the workers to generate a concise summary.",
"metadata": {
"source": "test",
"type": "text"
}
}
]

View File

@ -0,0 +1,7 @@
[
{
"taskId": "62n2aAVVV42rtt53wxieotTdnKpTRjiChsHYdSxHDhAZ",
"prompt": "Please provide a concise summary of the following document, focusing on the main points and key information.",
"type": "summarization"
}
]

View File

@ -0,0 +1,10 @@
[
{
"taskId": "62n2aAVVV42rtt53wxieotTdnKpTRjiChsHYdSxHDhAZ",
"summary": "Test document containing multiple sentences for summarization testing.",
"metadata": {
"source": "test",
"type": "summary"
}
}
]

View File

@ -6,7 +6,6 @@ import dotenv
import argparse
import uuid
from .steps import steps
dotenv.load_dotenv()
@ -21,21 +20,24 @@ def parse_args():
return parser.parse_args()
def post_load_callback(db):
def add_uuids(db):
"""Post-load callback to process MongoDB data after JSON import"""
# Process todos collection
todos = list(db.todos.find({"taskId": runner.config.task_id}))
for todo in todos:
if "uuid" not in todo:
todo["uuid"] = str(uuid.uuid4())
db.todos.replace_one({"_id": todo["_id"]}, todo)
# Process docs collection
docs = list(db.docs.find({"taskId": runner.config.task_id}))
for doc in docs:
if "uuid" not in doc:
doc["uuid"] = str(uuid.uuid4())
db.docs.replace_one({"_id": doc["_id"]}, doc)
# Process issues collection
issues = list(db.issues.find({"taskId": runner.config.task_id}))
for issue in issues:
if "uuid" not in issue:
issue["uuid"] = str(uuid.uuid4())
db.issues.replace_one({"_id": issue["_id"]}, issue)
# Process summaries collection
summaries = list(db.summaries.find({"taskId": runner.config.task_id}))
for summary in summaries:
if "uuid" not in summary:
summary["uuid"] = str(uuid.uuid4())
if "docUuid" not in summary and docs:
# Link to first doc for simplicity
summary["docUuid"] = docs[0]["uuid"]
db.summaries.replace_one({"_id": summary["_id"]}, summary)
# Global reference to the test runner
@ -46,12 +48,15 @@ def main():
global runner
args = parse_args()
# Import steps here to avoid circular imports
from .steps import steps
# Create test runner with config from YAML
base_dir = Path(__file__).parent
runner = TestRunner(
steps=steps,
config_file=base_dir / "config.yaml",
config_overrides={"post_load_callback": post_load_callback},
config_overrides={"post_load_callback": add_uuids},
)
# Run test sequence

View File

@ -0,0 +1,72 @@
"""Stage for executing worker tasks."""
import requests
from prometheus_test.utils import create_signature
def prepare(runner, worker):
"""Prepare data for worker task"""
# Create fetch-todo payload for stakingSignature and publicSignature
payload = {
"taskId": runner.config.task_id,
"roundNumber": runner.current_round,
"action": "fetch-todo",
"githubUsername": worker.env.get("GITHUB_USERNAME"),
"stakingKey": worker.staking_public_key,
"pubKey": worker.public_key,
}
return {
"taskId": runner.config.task_id,
"roundNumber": runner.current_round,
"stakingKey": worker.staking_public_key,
"pubKey": worker.public_key,
"stakingSignature": create_signature(worker.staking_signing_key, payload),
"publicSignature": create_signature(worker.public_signing_key, payload),
}
def execute(runner, worker, data):
"""Execute worker task step"""
url = f"{worker.url}/worker-task/{data['roundNumber']}"
response = requests.post(
url,
json={"signature": data["stakingSignature"], "stakingKey": data["stakingKey"]},
)
result = response.json()
# Handle 409 gracefully - no eligible todos is an expected case
if response.status_code == 409:
print(
f"{result.get('message', 'No eligible todos')} for {worker.name} - continuing"
)
return {"success": True, "message": result.get("message")}
if result.get("success") and "pr_url" in result:
round_key = str(runner.current_round)
round_state = runner.state["rounds"].setdefault(round_key, {})
# Initialize pr_urls if not exists
if "pr_urls" not in round_state:
round_state["pr_urls"] = {}
round_state["pr_urls"][worker.name] = result["pr_url"]
# Initialize submission_data if not exists
if "submission_data" not in round_state:
round_state["submission_data"] = {}
# Store submission data
round_state["submission_data"][worker.name] = {
"githubUsername": worker.env.get("GITHUB_USERNAME"),
"nodeType": "worker",
"prUrl": result["pr_url"],
"repoName": result.get("repoName"),
"repoOwner": result.get("repoOwner"),
"roundNumber": runner.current_round,
"taskId": runner.config.task_id,
"uuid": result.get("uuid"), # Should be provided by the worker
"stakingKey": worker.staking_public_key,
"pubKey": worker.public_key,
}
return result

View File

@ -0,0 +1,42 @@
"""Test step definitions."""
from prometheus_test import TestStep
from functools import partial
from .stages import (
worker_task,
worker_submission,
worker_audit,
audit_results,
)
steps = [
TestStep(
name="worker_task",
description="Execute worker task",
prepare=worker_task.prepare,
execute=worker_task.execute,
worker="worker",
),
TestStep(
name="worker_submission",
description="Submit worker task",
prepare=worker_submission.prepare,
execute=worker_submission.execute,
worker="worker1",
),
TestStep(
name="worker_audit",
description="Worker2 audits Worker1",
prepare=partial(worker_audit.prepare, target_name="worker1"),
execute=worker_audit.execute,
worker="worker2",
),
TestStep(
name="audit_results",
description="Update audit results",
prepare=partial(audit_results.prepare, role="worker"),
execute=audit_results.execute,
worker="worker1",
),
]

View File

@ -0,0 +1,35 @@
{
"leader": {
"env_vars": {
"GITHUB_TOKEN": "LEADER_GITHUB_TOKEN",
"GITHUB_USERNAME": "LEADER_GITHUB_USERNAME",
"ANTHROPIC_API_KEY": "LEADER_ANTHROPIC_API_KEY"
},
"keypairs": {
"staking": "LEADER_STAKING_KEYPAIR",
"public": "LEADER_PUBLIC_KEYPAIR"
}
},
"worker1": {
"env_vars": {
"GITHUB_TOKEN": "WORKER1_GITHUB_TOKEN",
"GITHUB_USERNAME": "WORKER1_GITHUB_USERNAME",
"ANTHROPIC_API_KEY": "WORKER1_ANTHROPIC_API_KEY"
},
"keypairs": {
"staking": "WORKER1_STAKING_KEYPAIR",
"public": "WORKER1_PUBLIC_KEYPAIR"
}
},
"worker2": {
"env_vars": {
"GITHUB_TOKEN": "WORKER2_GITHUB_TOKEN",
"GITHUB_USERNAME": "WORKER2_GITHUB_USERNAME",
"ANTHROPIC_API_KEY": "WORKER2_ANTHROPIC_API_KEY"
},
"keypairs": {
"staking": "WORKER2_STAKING_KEYPAIR",
"public": "WORKER2_PUBLIC_KEYPAIR"
}
}
}

View File

@ -1,7 +1,6 @@
import { getOrcaClient } from "@_koii/task-manager/extensions";
import { namespaceWrapper, TASK_ID } from "@_koii/namespace-wrapper";
import "dotenv/config";
import { getRandomNodes } from "../utils/leader";
import { getExistingIssues } from "../utils/existingIssues";
import { status, middleServerUrl } from "../utils/constant";
import dotenv from "dotenv";
@ -12,7 +11,6 @@ import { actionMessage } from "../utils/constant";
import { errorMessage } from "../utils/constant";
dotenv.config();
export async function task(roundNumber: number): Promise<void> {
/**
* Run your task and store the proofs to be submitted for auditing
@ -20,46 +18,68 @@ export async function task(roundNumber: number): Promise<void> {
* The submission of the proofs is done in the submission function
*/
// FORCE TO PAUSE 30 SECONDS
// No submission on Round 0 so no need to trigger fetch audit result before round 3
// Changed from 3 to 4 to have more time
// No submission on Round 0 so no need to trigger fetch audit result before round 3
// Changed from 3 to 4 to have more time
if (roundNumber >= 4) {
const triggerFetchAuditResult = await fetch(`${middleServerUrl}/api/builder/summarizer/trigger-fetch-audit-result`, {
const triggerFetchAuditResult = await fetch(`${middleServerUrl}/summarizer/worker/update-audit-result`, {
method: "POST",
headers: {
"Content-Type": "application/json"
"Content-Type": "application/json",
},
body: JSON.stringify({ taskId: TASK_ID, round: roundNumber - 4 })
body: JSON.stringify({ taskId: TASK_ID, round: roundNumber - 4 }),
});
console.log(`[TASK] Trigger fetch audit result for round ${roundNumber - 3}. Result is ${triggerFetchAuditResult.status}.`);
console.log(
`[TASK] Trigger fetch audit result for round ${roundNumber - 3}. Result is ${triggerFetchAuditResult.status}.`,
);
}
console.log(`[TASK] EXECUTE TASK FOR ROUND ${roundNumber}`);
try {
const orcaClient = await getOrcaClient();
// check if the env variable is valid
if (!process.env.ANTHROPIC_API_KEY) {
await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.ANTHROPIC_API_KEY_INVALID, actionMessage.ANTHROPIC_API_KEY_INVALID);
await namespaceWrapper.logMessage(
LogLevel.Error,
errorMessage.ANTHROPIC_API_KEY_INVALID,
actionMessage.ANTHROPIC_API_KEY_INVALID,
);
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ANTHROPIC_API_KEY_INVALID);
return;
}
if (!isValidAnthropicApiKey(process.env.ANTHROPIC_API_KEY!)) {
await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.ANTHROPIC_API_KEY_INVALID, actionMessage.ANTHROPIC_API_KEY_INVALID);
await namespaceWrapper.logMessage(
LogLevel.Error,
errorMessage.ANTHROPIC_API_KEY_INVALID,
actionMessage.ANTHROPIC_API_KEY_INVALID,
);
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ANTHROPIC_API_KEY_INVALID);
return;
}
const isAnthropicAPIKeyValid = await checkAnthropicAPIKey(process.env.ANTHROPIC_API_KEY!);
if (!isAnthropicAPIKeyValid) {
await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.ANTHROPIC_API_KEY_NO_CREDIT, actionMessage.ANTHROPIC_API_KEY_NO_CREDIT);
await namespaceWrapper.logMessage(
LogLevel.Error,
errorMessage.ANTHROPIC_API_KEY_NO_CREDIT,
actionMessage.ANTHROPIC_API_KEY_NO_CREDIT,
);
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ANTHROPIC_API_KEY_NO_CREDIT);
return;
}
if (!process.env.GITHUB_USERNAME || !process.env.GITHUB_TOKEN) {
await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.GITHUB_CHECK_FAILED, actionMessage.GITHUB_CHECK_FAILED);
await namespaceWrapper.logMessage(
LogLevel.Error,
errorMessage.GITHUB_CHECK_FAILED,
actionMessage.GITHUB_CHECK_FAILED,
);
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.GITHUB_CHECK_FAILED);
return;
}
const isGitHubValid = await checkGitHub(process.env.GITHUB_USERNAME!, process.env.GITHUB_TOKEN!);
if (!isGitHubValid) {
await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.GITHUB_CHECK_FAILED, actionMessage.GITHUB_CHECK_FAILED);
await namespaceWrapper.logMessage(
LogLevel.Error,
errorMessage.GITHUB_CHECK_FAILED,
actionMessage.GITHUB_CHECK_FAILED,
);
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.GITHUB_CHECK_FAILED);
return;
}
@ -78,31 +98,16 @@ export async function task(roundNumber: number): Promise<void> {
if (!pubKey) {
throw new Error("No public key found");
}
/****************** All issues need to be starred ******************/
const existingIssues = await getExistingIssues();
const githubUrls = existingIssues.map((issue) => issue.githubUrl);
try {
await orcaClient.podCall(`star/${roundNumber}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ taskId: TASK_ID, round_number: String(roundNumber), github_urls: githubUrls }),
});
} catch (error) {
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.STAR_ISSUE_FAILED);
console.error("Error starring issues:", error);
}
/****************** All these issues need to be generate a markdown file ******************/
const signature = await namespaceWrapper.payloadSigning(
{
taskId: TASK_ID,
roundNumber: roundNumber,
action: "fetch",
action: "fetch-todo",
githubUsername: stakingKey,
stakingKey: stakingKey
stakingKey: stakingKey,
},
stakingKeypair.secretKey,
);
@ -110,10 +115,10 @@ export async function task(roundNumber: number): Promise<void> {
// const initializedDocumentSummarizeIssues = await getInitializedDocumentSummarizeIssues(existingIssues);
console.log(`[TASK] Making Request to Middle Server with taskId: ${TASK_ID} and round: ${roundNumber}`);
const requiredWorkResponse = await fetch(`${middleServerUrl}/api/builder/summarizer/fetch-summarizer-todo`, {
const requiredWorkResponse = await fetch(`${middleServerUrl}/summarizer/worker/fetch-todo`, {
method: "POST",
headers: {
"Content-Type": "application/json"
"Content-Type": "application/json",
},
body: JSON.stringify({ signature: signature, stakingKey: stakingKey }),
});
@ -132,7 +137,7 @@ export async function task(roundNumber: number): Promise<void> {
};
console.log("[TASK] jsonBody: ", jsonBody);
try {
const repoSummaryResponse = await orcaClient.podCall(`repo_summary/${roundNumber}`, {
const repoSummaryResponse = await orcaClient.podCall(`worker-task/${roundNumber}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
@ -143,20 +148,17 @@ export async function task(roundNumber: number): Promise<void> {
console.log("[TASK] repoSummaryResponse.data.result.data ", repoSummaryResponse.data.result.data);
const payload = {
taskId: TASK_ID,
action: "add",
action: "add-todo-pr",
roundNumber: roundNumber,
prUrl: repoSummaryResponse.data.result.data.pr_url,
stakingKey: stakingKey
}
stakingKey: stakingKey,
};
console.log("[TASK] Signing payload: ", payload);
if (repoSummaryResponse.status === 200) {
try{
const signature = await namespaceWrapper.payloadSigning(
payload,
stakingKeypair.secretKey,
);
try {
const signature = await namespaceWrapper.payloadSigning(payload, stakingKeypair.secretKey);
console.log("[TASK] signature: ", signature);
const addPrToSummarizerTodoResponse = await fetch(`${middleServerUrl}/api/builder/summarizer/add-pr-to-summarizer-todo`, {
const addPrToSummarizerTodoResponse = await fetch(`${middleServerUrl}/summarizer/worker/add-todo-pr`, {
method: "POST",
headers: {
"Content-Type": "application/json",
@ -164,7 +166,7 @@ export async function task(roundNumber: number): Promise<void> {
body: JSON.stringify({ signature: signature, stakingKey: stakingKey }),
});
console.log("[TASK] addPrToSummarizerTodoResponse: ", addPrToSummarizerTodoResponse);
}catch(error){
} catch (error) {
await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_FAILED_TO_ADD_PR_TO_SUMMARIZER_TODO);
console.error("[TASK] Error adding PR to summarizer todo:", error);
}

View File

@ -21,24 +21,28 @@ export async function audit(cid: string, roundNumber: number, submitterKey: stri
// This returns a dummy true
return true;
}
const decodeResult = await submissionJSONSignatureDecode({submission_value: cid, submitterPublicKey: submitterKey, roundNumber: roundNumber});
const decodeResult = await submissionJSONSignatureDecode({
submission_value: cid,
submitterPublicKey: submitterKey,
roundNumber: roundNumber,
});
if (!decodeResult) {
console.log("[AUDIT] DECODE RESULT FAILED.")
console.log("[AUDIT] DECODE RESULT FAILED.");
return false;
}
console.log(`[AUDIT] ✅ Signature decoded successfully`);
console.log(`[AUDIT] Checking summarizer status for submitter ${submitterKey}`);
const checkSummarizerResponse = await fetch(`${middleServerUrl}/api/builder/summarizer/check-summarizer`, {
const checkSummarizerResponse = await fetch(`${middleServerUrl}/summarizer/worker/check-todo`, {
method: "POST",
headers: {
"Content-Type": "application/json"
"Content-Type": "application/json",
},
body: JSON.stringify({
stakingKey: submitterKey,
roundNumber,
githubUsername: decodeResult.githubUsername,
prUrl: decodeResult.prUrl
prUrl: decodeResult.prUrl,
}),
});
const checkSummarizerJSON = await checkSummarizerResponse.json();
@ -53,7 +57,7 @@ export async function audit(cid: string, roundNumber: number, submitterKey: stri
console.log(`[AUDIT] Sending audit request for submitter: ${submitterKey}`);
console.log(`[AUDIT] Submission data being sent to audit:`, decodeResult);
const result = await orcaClient.podCall(`audit/${roundNumber}`, {
const result = await orcaClient.podCall(`worker-audit/${roundNumber}`, {
method: "POST",
headers: {
"Content-Type": "application/json",

View File

@ -2,23 +2,17 @@ import { Submitter, DistributionList } from "@_koii/task-manager";
import { namespaceWrapper, TASK_ID } from "@_koii/namespace-wrapper";
import { customReward, status } from "../utils/constant";
import { Submission } from "@_koii/namespace-wrapper/dist/types";
import { middleServerUrl } from "../utils/constant";
import { getOrcaClient } from "@_koii/task-manager/extensions";
import { submissionJSONSignatureDecode } from "../utils/submissionJSONSignatureDecode";
import { getRandomNodes } from "../utils/leader";
const getSubmissionList = async (roundNumber: number): Promise<Record<string, Submission>> => {
const submissionInfo = await namespaceWrapper.getTaskSubmissionInfo(roundNumber);
return submissionInfo?.submissions[roundNumber] || {};
}
export const getEmptyDistributionList = async (
submitters: Submitter[],
): Promise<DistributionList> => {
};
export const getEmptyDistributionList = async (submitters: Submitter[]): Promise<DistributionList> => {
const distributionList: DistributionList = {};
for (const submitter of submitters) {
distributionList[submitter.publicKey] = 0;
}
return distributionList;
}
};
export const distribution = async (
submitters: Submitter[],
bounty: number,
@ -29,11 +23,11 @@ export const distribution = async (
for (const submitter of submitters) {
console.log(`\n[DISTRIBUTION] Processing submitter: ${submitter.publicKey}`);
console.log(`[DISTRIBUTION] Getting submission list for round ${roundNumber}`);
const submitterSubmissions = await getSubmissionList(roundNumber);
console.log(`[DISTRIBUTION] Total submissions found: ${Object.keys(submitterSubmissions).length}`);
const submitterSubmission = submitterSubmissions[submitter.publicKey];
if (!submitterSubmission || submitterSubmission.submission_value === "") {
console.log(`[DISTRIBUTION] ❌ No valid submission found for submitter ${submitter.publicKey}`);
@ -41,13 +35,13 @@ export const distribution = async (
continue;
}
if (Object.values(status).includes(submitterSubmission.submission_value)) {
distributionList[submitter.publicKey] = 0;
continue;
}else{
// TODO: Check if I should include = 0 here
distributionList[submitter.publicKey] = 0;
continue;
} else {
// TODO: Check if I should include = 0 here
if (submitter.votes >= 0) {
distributionList[submitter.publicKey] = customReward;
}else{
} else {
distributionList[submitter.publicKey] = 0;
}
}

View File

@ -1,12 +1,9 @@
import { namespaceWrapper, app } from "@_koii/task-manager/namespace-wrapper";
import { getLeaderNode, getRandomNodes } from "../utils/leader";
import { getLeaderNode } from "../utils/leader";
import { task } from "./1-task";
import { submission } from "./2-submission";
import { audit } from "./3-audit";
import { distribution } from "./4-distribution";
import { submissionJSONSignatureDecode } from "../utils/submissionJSONSignatureDecode";
import { Submission } from "@_koii/namespace-wrapper/dist/types";
import { taskRunner } from "@_koii/task-manager"
import { taskRunner } from "@_koii/task-manager";
/**
*
@ -25,7 +22,10 @@ export async function routes() {
app.get("/leader/:roundNumber/:submitterPublicKey", async (req, res) => {
const roundNumber = req.params.roundNumber;
const submitterPublicKey = req.params.submitterPublicKey;
const {isLeader, leaderNode} = await getLeaderNode({roundNumber: Number(roundNumber), submitterPublicKey: submitterPublicKey});
const { isLeader, leaderNode } = await getLeaderNode({
roundNumber: Number(roundNumber),
submitterPublicKey: submitterPublicKey,
});
res.status(200).json({ isLeader: isLeader, leaderNode: leaderNode });
});
@ -53,5 +53,4 @@ export async function routes() {
const submitDistributionResult = await taskRunner.submitDistributionList(Number(roundNumber));
res.status(200).json({ result: submitDistributionResult });
});
}

View File

@ -1,16 +0,0 @@
task_id: "summarizer"
base_port: 5000
max_rounds: 3
data_dir: data
workers_config: workers.json
mongodb:
database: summarizer_test
collections:
todos:
data_file: todos.json
required_count: 1
issues:
data_file: issues.json
required_count: 1

View File

@ -1,16 +0,0 @@
[
{
"taskId": "summarizer",
"githubUrl": "https://github.com/test_owner/test_repo/issues/1",
"title": "Test Issue 1",
"body": "This is a test issue for summarization",
"status": "open"
},
{
"taskId": "summarizer",
"githubUrl": "https://github.com/test_owner/test_repo/issues/2",
"title": "Test Issue 2",
"body": "This is another test issue for summarization",
"status": "open"
}
]

View File

@ -1,20 +0,0 @@
[
{
"taskId": "summarizer",
"roundNumber": 1,
"repo_owner": "test_owner",
"repo_name": "test_repo",
"prUrl": "https://github.com/test_owner/test_repo/pull/1",
"status": "pending",
"stakingKey": "test_key_1"
},
{
"taskId": "summarizer",
"roundNumber": 1,
"repo_owner": "test_owner",
"repo_name": "test_repo",
"prUrl": "https://github.com/test_owner/test_repo/pull/2",
"status": "pending",
"stakingKey": "test_key_2"
}
]

View File

@ -1,51 +0,0 @@
"""Test stage for auditing summary."""
import requests
from prometheus_test import Context
async def prepare(context: Context, target_name: str):
"""Prepare for auditing summary."""
staking_key = context.env.get("WORKER_ID")
target_submission = await context.storeGet(f"submission-{target_name}")
return {
"staking_key": staking_key,
"round_number": context.round_number,
"target_submission": target_submission,
"target_name": target_name,
}
async def execute(context: Context, prepare_data: dict):
"""Execute summary audit test."""
staking_key = prepare_data["staking_key"]
round_number = prepare_data["round_number"]
target_submission = prepare_data["target_submission"]
target_name = prepare_data["target_name"]
# Mock response for audit
response = requests.post(
"http://localhost:5000/api/builder/summarizer/audit",
json={
"taskId": context.config.task_id,
"roundNumber": round_number,
"stakingKey": staking_key,
"submitterKey": target_name,
"cid": target_submission.get("cid"),
"prUrl": target_submission.get("pr_url"),
"githubUsername": target_submission.get("github_username"),
},
)
if response.status_code != 200:
raise Exception(f"Failed to audit summary: {response.text}")
result = response.json()
if not result.get("success"):
raise Exception("Failed to audit summary")
# Store audit result
await context.storeSet(f"audit-{staking_key}-{target_name}", result.get("data"))
return True

View File

@ -1,39 +0,0 @@
"""Test stage for fetching summarizer todo."""
import requests
from prometheus_test import Context
async def prepare(context: Context):
"""Prepare for fetching summarizer todo."""
return {
"staking_key": context.env.get("WORKER_ID"),
"round_number": context.round_number,
}
async def execute(context: Context, prepare_data: dict):
"""Execute fetch summarizer todo test."""
staking_key = prepare_data["staking_key"]
round_number = prepare_data["round_number"]
# Mock response for fetching todo
response = requests.post(
"http://localhost:5000/api/builder/summarizer/fetch-summarizer-todo",
json={
"stakingKey": staking_key,
"roundNumber": round_number,
},
)
if response.status_code != 200:
raise Exception(f"Failed to fetch summarizer todo: {response.text}")
result = response.json()
if not result.get("success"):
raise Exception("Failed to fetch summarizer todo")
# Store todo data for next steps
await context.storeSet(f"todo-{staking_key}", result.get("data"))
return True

View File

@ -1,47 +0,0 @@
"""Test stage for generating repository summary."""
import requests
from prometheus_test import Context
async def prepare(context: Context):
"""Prepare for generating summary."""
staking_key = context.env.get("WORKER_ID")
todo = await context.storeGet(f"todo-{staking_key}")
return {
"staking_key": staking_key,
"round_number": context.round_number,
"repo_owner": todo.get("repo_owner"),
"repo_name": todo.get("repo_name"),
}
async def execute(context: Context, prepare_data: dict):
"""Execute summary generation test."""
staking_key = prepare_data["staking_key"]
round_number = prepare_data["round_number"]
repo_owner = prepare_data["repo_owner"]
repo_name = prepare_data["repo_name"]
# Mock response for repo summary generation
response = requests.post(
"http://localhost:5000/api/builder/summarizer/generate-summary",
json={
"taskId": context.config.task_id,
"round_number": str(round_number),
"repo_url": f"https://github.com/{repo_owner}/{repo_name}",
},
)
if response.status_code != 200:
raise Exception(f"Failed to generate summary: {response.text}")
result = response.json()
if not result.get("success"):
raise Exception("Failed to generate summary")
# Store PR URL for next steps
await context.storeSet(f"pr-{staking_key}", result.get("data", {}).get("pr_url"))
return True

View File

@ -1,56 +0,0 @@
"""Test stage for submitting summary."""
import requests
from prometheus_test import Context
async def prepare(context: Context):
"""Prepare for submitting summary."""
staking_key = context.env.get("WORKER_ID")
pr_url = await context.storeGet(f"pr-{staking_key}")
return {
"staking_key": staking_key,
"round_number": context.round_number,
"pr_url": pr_url,
"github_username": context.env.get("GITHUB_USERNAME"),
}
async def execute(context: Context, prepare_data: dict):
"""Execute summary submission test."""
staking_key = prepare_data["staking_key"]
round_number = prepare_data["round_number"]
pr_url = prepare_data["pr_url"]
github_username = prepare_data["github_username"]
# Mock response for submission
response = requests.post(
"http://localhost:5000/api/builder/summarizer/submit",
json={
"taskId": context.config.task_id,
"roundNumber": round_number,
"prUrl": pr_url,
"stakingKey": staking_key,
"githubUsername": github_username,
},
)
if response.status_code != 200:
raise Exception(f"Failed to submit summary: {response.text}")
result = response.json()
if not result.get("success"):
raise Exception("Failed to submit summary")
# Store submission data for audit
await context.storeSet(
f"submission-{staking_key}",
{
"cid": result.get("data", {}).get("cid"),
"pr_url": pr_url,
"github_username": github_username,
},
)
return True

View File

@ -1,31 +0,0 @@
"""Test stage for validating API keys."""
import requests
from prometheus_test import Context
async def prepare(context: Context):
"""Prepare for API key validation test."""
return {
"api_key": context.env.get("ANTHROPIC_API_KEY"),
}
async def execute(context: Context, prepare_data: dict):
"""Execute API key validation test."""
api_key = prepare_data["api_key"]
# Mock response for Anthropic API validation
response = requests.post(
"http://localhost:5000/api/builder/summarizer/validate-api-key",
json={"api_key": api_key},
)
if response.status_code != 200:
raise Exception(f"API key validation failed: {response.text}")
result = response.json()
if not result.get("valid"):
raise Exception("API key is not valid")
return True

View File

@ -1,33 +0,0 @@
"""Test stage for validating GitHub credentials."""
import requests
from prometheus_test import Context
async def prepare(context: Context):
"""Prepare for GitHub validation test."""
return {
"github_username": context.env.get("GITHUB_USERNAME"),
"github_token": context.env.get("GITHUB_TOKEN"),
}
async def execute(context: Context, prepare_data: dict):
"""Execute GitHub validation test."""
username = prepare_data["github_username"]
token = prepare_data["github_token"]
# Mock response for GitHub validation
response = requests.post(
"http://localhost:5000/api/builder/summarizer/validate-github",
json={"username": username, "token": token},
)
if response.status_code != 200:
raise Exception(f"GitHub validation failed: {response.text}")
result = response.json()
if not result.get("valid"):
raise Exception("GitHub credentials are not valid")
return True

View File

@ -1,85 +0,0 @@
"""Test step definitions."""
from prometheus_test import TestStep
from functools import partial
from .stages import (
validate_api_keys,
validate_github,
fetch_summarizer_todo,
generate_summary,
submit_summary,
audit_summary,
)
steps = [
TestStep(
name="validate_api_keys",
description="Validate Anthropic API key",
prepare=validate_api_keys.prepare,
execute=validate_api_keys.execute,
worker="worker1",
),
TestStep(
name="validate_github",
description="Validate GitHub credentials",
prepare=validate_github.prepare,
execute=validate_github.execute,
worker="worker1",
),
TestStep(
name="fetch_todo_worker1",
description="Fetch summarizer todo for worker1",
prepare=fetch_summarizer_todo.prepare,
execute=fetch_summarizer_todo.execute,
worker="worker1",
),
TestStep(
name="fetch_todo_worker2",
description="Fetch summarizer todo for worker2",
prepare=fetch_summarizer_todo.prepare,
execute=fetch_summarizer_todo.execute,
worker="worker2",
),
TestStep(
name="generate_summary_worker1",
description="Generate summary for worker1's todo",
prepare=generate_summary.prepare,
execute=generate_summary.execute,
worker="worker1",
),
TestStep(
name="generate_summary_worker2",
description="Generate summary for worker2's todo",
prepare=generate_summary.prepare,
execute=generate_summary.execute,
worker="worker2",
),
TestStep(
name="submit_summary_worker1",
description="Submit summary for worker1",
prepare=submit_summary.prepare,
execute=submit_summary.execute,
worker="worker1",
),
TestStep(
name="submit_summary_worker2",
description="Submit summary for worker2",
prepare=submit_summary.prepare,
execute=submit_summary.execute,
worker="worker2",
),
TestStep(
name="audit_worker1",
description="Worker1 audits Worker2's submission",
prepare=partial(audit_summary.prepare, target_name="worker2"),
execute=audit_summary.execute,
worker="worker1",
),
TestStep(
name="audit_worker2",
description="Worker2 audits Worker1's submission",
prepare=partial(audit_summary.prepare, target_name="worker1"),
execute=audit_summary.execute,
worker="worker2",
),
]