Compare commits

..

11 Commits

12 changed files with 188 additions and 39 deletions

1
worker/.gitignore vendored
View File

@ -15,3 +15,4 @@ taskStateInfoKeypair.json
localKOIIDB.db localKOIIDB.db
metadata.json metadata.json
.npmrc .npmrc
**/*.log

View File

@ -124,7 +124,7 @@ environment: "TEST"
#################################### FOR UPDATING TASKS ONLY #################################### #################################### FOR UPDATING TASKS ONLY ####################################
## Old Task ID ## ## Old Task ID ##
task_id: "5bc74eTjGgNigupFBZXtfzAYVksPqSGBEVgRLubk7ak7" task_id: "A1UwX31uCMhZN4x9ZeH5xv3dzZcKLXsXytk6r7PzDLn3"
## Migration Description ## ## Migration Description ##
migrationDescription: "Log Reminder, Time Based Logic" migrationDescription: "Time Based Logic, Poll Task"

View File

@ -69,6 +69,7 @@ def start_task():
swarmBountyId=swarmBountyId, swarmBountyId=swarmBountyId,
repo_url=repo_url, repo_url=repo_url,
db=db, # Pass db instance db=db, # Pass db instance
podcall_signature=podcall_signature,
) )
return jsonify(result) return jsonify(result)
else: else:

View File

@ -11,7 +11,7 @@ from src.database.models import Submission
load_dotenv() load_dotenv()
def handle_task_creation(task_id, swarmBountyId, repo_url, db=None): def handle_task_creation(task_id, swarmBountyId, repo_url, db=None, podcall_signature=None):
"""Handle task creation request.""" """Handle task creation request."""
try: try:
if db is None: if db is None:
@ -22,6 +22,8 @@ def handle_task_creation(task_id, swarmBountyId, repo_url, db=None):
client=client, client=client,
prompts=PROMPTS, prompts=PROMPTS,
repo_url=repo_url, repo_url=repo_url,
podcall_signature=podcall_signature,
task_id=task_id,
) )
result = workflow.run() result = workflow.run()

View File

@ -19,7 +19,7 @@ class RepoClassificationPhase(WorkflowPhase):
super().__init__( super().__init__(
workflow=workflow, workflow=workflow,
prompt_name="classify_repository", prompt_name="classify_repository",
available_tools=["read_file", "list_files", "classify_repository"], available_tools=["read_file", "search_code", "list_directory_contents", "classify_repository"],
conversation_id=conversation_id, conversation_id=conversation_id,
name="Repository Classification", name="Repository Classification",
) )
@ -32,7 +32,8 @@ class ReadmeSectionGenerationPhase(WorkflowPhase):
prompt_name="generate_readme_section", prompt_name="generate_readme_section",
available_tools=[ available_tools=[
"read_file", "read_file",
"list_files", "search_code",
"list_directory_contents",
"create_readme_section", "create_readme_section",
], ],
conversation_id=conversation_id, conversation_id=conversation_id,
@ -56,7 +57,7 @@ class ReadmeReviewPhase(WorkflowPhase):
super().__init__( super().__init__(
workflow=workflow, workflow=workflow,
prompt_name="review_readme_file", prompt_name="review_readme_file",
available_tools=["read_file", "list_files", "review_readme_file"], available_tools=["read_file", "search_code", "list_directory_contents", "review_readme_file"],
conversation_id=conversation_id, conversation_id=conversation_id,
name="Readme Review", name="Readme Review",
) )
@ -67,7 +68,7 @@ class CreatePullRequestPhase(WorkflowPhase):
super().__init__( super().__init__(
workflow=workflow, workflow=workflow,
prompt_name="create_pr", prompt_name="create_pr",
available_tools=["read_file", "list_files", "create_pull_request_legacy"], available_tools=["read_file", "search_code", "list_directory_contents", "create_pull_request_legacy"],
conversation_id=conversation_id, conversation_id=conversation_id,
name="Create Pull Request", name="Create Pull Request",
) )

View File

@ -67,7 +67,7 @@ PROMPTS = {
"The content will be added automatically, your job is just to create a good title." "The content will be added automatically, your job is just to create a good title."
), ),
"create_pr": ( "create_pr": (
"You are creating a pull request for the file README_Prometheus.md you have generated. " "You are creating a pull request."
"The repository has been cloned to the current directory.\n" "The repository has been cloned to the current directory.\n"
"Use the `create_pull_request_legacy` tool to create the pull request.\n" "Use the `create_pull_request_legacy` tool to create the pull request.\n"
"IMPORTANT: Always use relative paths (e.g., 'src/file.py' not '/src/file.py')\n\n" "IMPORTANT: Always use relative paths (e.g., 'src/file.py' not '/src/file.py')\n\n"

View File

@ -2,6 +2,7 @@
import os import os
from github import Github from github import Github
import requests
from prometheus_swarm.workflows.base import Workflow from prometheus_swarm.workflows.base import Workflow
from prometheus_swarm.utils.logging import log_section, log_key_value, log_error from prometheus_swarm.utils.logging import log_section, log_key_value, log_error
from src.workflows.repoSummarizer import phases from src.workflows.repoSummarizer import phases
@ -11,12 +12,16 @@ from prometheus_swarm.workflows.utils import (
validate_github_auth, validate_github_auth,
setup_repository, setup_repository,
) )
from kno_sdk import index_repo
from prometheus_swarm.tools.kno_sdk_wrapper.implementations import build_tools_wrapper
from src.workflows.repoSummarizer.prompts import PROMPTS from src.workflows.repoSummarizer.prompts import PROMPTS
from src.workflows.repoSummarizer.docs_sections import ( from src.workflows.repoSummarizer.docs_sections import (
DOCS_SECTIONS, DOCS_SECTIONS,
INITIAL_SECTIONS, INITIAL_SECTIONS,
FINAL_SECTIONS, FINAL_SECTIONS,
) )
from pathlib import Path
from prometheus_swarm.tools.git_operations.implementations import commit_and_push from prometheus_swarm.tools.git_operations.implementations import commit_and_push
@ -50,6 +55,8 @@ class RepoSummarizerWorkflow(Workflow):
client, client,
prompts, prompts,
repo_url, repo_url,
podcall_signature=None,
task_id=None,
): ):
# Extract owner and repo name from URL # Extract owner and repo name from URL
# URL format: https://github.com/owner/repo # URL format: https://github.com/owner/repo
@ -63,8 +70,30 @@ class RepoSummarizerWorkflow(Workflow):
repo_url=repo_url, repo_url=repo_url,
repo_owner=repo_owner, repo_owner=repo_owner,
repo_name=repo_name, repo_name=repo_name,
podcall_signature=podcall_signature,
task_id=task_id,
) )
def submit_draft_pr(self, pr_url):
"""Submit the draft PR."""
try:
response = requests.post(
f"http://host.docker.internal:30017/task/{self.task_id}/add-todo-draft-pr",
json={
"prUrl": pr_url,
"signature": self.podcall_signature,
"swarmBountyId": self.swarmBountyId,
"success": True,
"message": "",
},
)
except Exception as e:
log_error(e, "Failed to submit draft PR")
return {
"success": False,
"message": "Failed to submit draft PR",
"data": None,
}
def setup(self): def setup(self):
"""Set up repository and workspace.""" """Set up repository and workspace."""
check_required_env_vars(["GITHUB_TOKEN", "GITHUB_USERNAME"]) check_required_env_vars(["GITHUB_TOKEN", "GITHUB_USERNAME"])
@ -103,12 +132,22 @@ class RepoSummarizerWorkflow(Workflow):
# Enter repo directory # Enter repo directory
os.chdir(self.context["repo_path"]) os.chdir(self.context["repo_path"])
tools_build_result = self.build_tools_setup()
if not tools_build_result:
log_error(Exception("Failed to build tools setup"), "Failed to build tools setup")
return {
"success": False,
"message": "Failed to build tools setup",
"data": None,
}
# Configure Git user info # Configure Git user info
# setup_git_user_config(self.context["repo_path"]) # setup_git_user_config(self.context["repo_path"])
# Get current files for context # Get current files for context
def build_tools_setup(self):
index = index_repo(Path(self.context["repo_path"]))
tools = build_tools_wrapper(index)
return tools
def cleanup(self): def cleanup(self):
"""Cleanup workspace.""" """Cleanup workspace."""
# Make sure we're not in the repo directory before cleaning up # Make sure we're not in the repo directory before cleaning up
@ -139,7 +178,17 @@ class RepoSummarizerWorkflow(Workflow):
log_key_value("Branch created", self.context["head"]) log_key_value("Branch created", self.context["head"])
try: try:
commit_and_push(message="empty commit", allow_empty=True) commit_and_push(message="empty commit", allow_empty=True)
self.create_pull_request() draft_pr_result = self.create_pull_request()
if draft_pr_result.get("success"):
print("DRAFT PR RESULT", draft_pr_result)
self.submit_draft_pr(draft_pr_result.get("data").get("pr_url"))
else:
return {
"success": False,
"message": "Failed to create pull request",
"data": None,
}
except Exception as e: except Exception as e:
log_error(e, "Failed to commit and push") log_error(e, "Failed to commit and push")
return { return {
@ -260,6 +309,9 @@ class RepoSummarizerWorkflow(Workflow):
readme_result = generate_readme_section_phase.execute() readme_result = generate_readme_section_phase.execute()
# Check README Generation Result # Check README Generation Result
log_key_value("README RESULT", readme_result)
if not readme_result or not readme_result.get("success"): if not readme_result or not readme_result.get("success"):
log_error( log_error(
Exception(readme_result.get("error", "No result")), Exception(readme_result.get("error", "No result")),

View File

@ -7,8 +7,8 @@ PROMPTS = {
"and creating clear, structured documentation." "and creating clear, structured documentation."
), ),
"check_readme_file": ( "check_readme_file": (
"A pull request has been checked out for you. Review the file README_Prometheus.md in the repository " "Review the README_Prometheus.md in the repository and evaluate its quality and "
"and evaluate its quality and relevance to the repository.\n\n" "relevance to the repository.\n\n"
"Please analyze:\n" "Please analyze:\n"
"1. Is the README_Prometheus.md file related to this specific repository? (Does it describe the actual code " "1. Is the README_Prometheus.md file related to this specific repository? (Does it describe the actual code "
"and purpose of this repo?)\n" "and purpose of this repo?)\n"
@ -16,7 +16,7 @@ PROMPTS = {
"3. Is it comprehensive enough to help users understand and use the repository?\n" "3. Is it comprehensive enough to help users understand and use the repository?\n"
"4. Does it follow best practices for README documentation?\n\n" "4. Does it follow best practices for README documentation?\n\n"
"Use the `review_readme_file` tool to submit your findings.\n" "Use the `review_readme_file` tool to submit your findings.\n"
"IMPORTANT: Do not assume that an existing README is correct. " # "IMPORTANT: Do not assume that an existing README is correct. "
"Evaluate README_Prometheus.md against the codebase.\n" "Evaluate README_Prometheus.md against the codebase.\n"
"DO NOT consider the filename in your analysis, only the content.\n" "DO NOT consider the filename in your analysis, only the content.\n"
"STOP after submitting the review report." "STOP after submitting the review report."

View File

@ -2,13 +2,11 @@ import { getOrcaClient } from "@_koii/task-manager/extensions";
import { middleServerUrl, status } from "../utils/constant"; import { middleServerUrl, status } from "../utils/constant";
import { submissionJSONSignatureDecode } from "../utils/submissionJSONSignatureDecode"; import { submissionJSONSignatureDecode } from "../utils/submissionJSONSignatureDecode";
// import { status } from '../utils/constant' // import { status } from '../utils/constant'
export async function audit(cid: string, roundNumber: number, submitterKey: string): Promise<boolean | void> {
/** const TIMEOUT_MS = 180000; // 3 minutes in milliseconds
* Audit a submission const MAX_RETRIES = 3;
* This function should return true if the submission is correct, false otherwise
* The default implementation retrieves the proofs from IPFS async function auditWithTimeout(cid: string, roundNumber: number, submitterKey: string): Promise<boolean | void> {
* and sends them to your container for auditing
*/
let orcaClient; let orcaClient;
try { try {
orcaClient = await getOrcaClient(); orcaClient = await getOrcaClient();
@ -83,3 +81,28 @@ export async function audit(cid: string, roundNumber: number, submitterKey: stri
console.log("[AUDIT] Cleaning up resources"); console.log("[AUDIT] Cleaning up resources");
} }
} }
export async function audit(cid: string, roundNumber: number, submitterKey: string): Promise<boolean | void> {
let retries = 0;
while (retries < MAX_RETRIES) {
try {
const result = await Promise.race<boolean | void>([
auditWithTimeout(cid, roundNumber, submitterKey),
new Promise((_, reject) => setTimeout(() => reject(new Error("Audit timeout")), TIMEOUT_MS)),
]);
return result;
} catch (error) {
retries++;
console.log(`[AUDIT] Attempt ${retries} failed:`, error);
if (retries === MAX_RETRIES) {
console.log(`[AUDIT] Max retries (${MAX_RETRIES}) reached. Giving up.`);
return true; // Return true as a fallback
}
// Wait for a short time before retrying
await new Promise(resolve => setTimeout(resolve, 5000));
}
}
}

View File

@ -54,6 +54,68 @@ export async function routes() {
const submitDistributionResult = await taskRunner.submitDistributionList(Number(roundNumber)); const submitDistributionResult = await taskRunner.submitDistributionList(Number(roundNumber));
res.status(200).json({ result: submitDistributionResult }); res.status(200).json({ result: submitDistributionResult });
}); });
app.post("/add-todo-draft-pr", async (req, res) => {
const signature = req.body.signature;
const prUrl = req.body.prUrl;
const swarmBountyId = req.body.swarmBountyId;
console.log("[TASK] req.body", req.body);
try {
const publicKey = await namespaceWrapper.getMainAccountPubkey();
const stakingKeypair = await namespaceWrapper.getSubmitterAccount();
if (!stakingKeypair) {
throw new Error("No staking key found");
}
const stakingKey = stakingKeypair.publicKey.toBase58();
const secretKey = stakingKeypair.secretKey;
if (!publicKey) {
throw new Error("No public key found");
}
const payload = await namespaceWrapper.verifySignature(signature, stakingKey);
if (!payload) {
throw new Error("Invalid signature");
}
console.log("[TASK] payload: ", payload);
const data = payload.data;
if (!data) {
throw new Error("No signature data found");
}
const jsonData = JSON.parse(data);
if (jsonData.taskId !== TASK_ID) {
throw new Error(`Invalid task ID from signature: ${jsonData.taskId}. Actual task ID: ${TASK_ID}`);
}
const middleServerPayload = {
taskId: jsonData.taskId,
swarmBountyId,
prUrl,
stakingKey,
publicKey,
action: "add-todo-draft-pr",
};
const middleServerSignature = await namespaceWrapper.payloadSigning(middleServerPayload, secretKey);
const middleServerResponse = await fetch(`${middleServerUrl}/summarizer/worker/add-todo-draft-pr`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ signature: middleServerSignature, stakingKey: stakingKey }),
});
console.log("[TASK] Add Draft PR Response: ", middleServerResponse);
if (middleServerResponse.status !== 200) {
throw new Error(`Posting to middle server failed: ${middleServerResponse.statusText}`);
}
res.status(200).json({ result: "Successfully saved PR" });
} catch (error) {
console.error("[TASK] Error adding PR to summarizer todo:", error);
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.SAVING_TODO_PR_FAILED);
res.status(400).json({ error: "Failed to save PR" });
}
});
app.post("/add-todo-pr", async (req, res) => { app.post("/add-todo-pr", async (req, res) => {
const signature = req.body.signature; const signature = req.body.signature;

View File

@ -57,6 +57,6 @@ export const actionMessage = {
export const defaultBountyMarkdownFile = export const defaultBountyMarkdownFile =
"https://raw.githubusercontent.com/koii-network/prometheus-swarm-bounties/master/README.md"; "https://raw.githubusercontent.com/koii-network/prometheus-swarm-bounties/master/README.md";
export const customReward = 1; // This should be in ROE! export const customReward = 400 * 10 ** 9; // This should be in ROE!
export const middleServerUrl = "https://builder247-test.dev1.koii.network"; export const middleServerUrl = "https://builder247-prod.dev.koii.network";

View File

@ -28,7 +28,7 @@ export async function task() {
taskId: TASK_ID, taskId: TASK_ID,
// roundNumber: roundNumber, // roundNumber: roundNumber,
action: "fetch-todo", action: "fetch-todo",
githubUsername: stakingKey, githubUsername: process.env.GITHUB_USERNAME,
stakingKey: stakingKey, stakingKey: stakingKey,
}, },
stakingKeypair.secretKey, stakingKeypair.secretKey,
@ -57,14 +57,16 @@ export async function task() {
// check if the response is 200 after all retries // check if the response is 200 after all retries
if (!requiredWorkResponse || requiredWorkResponse.status !== 200) { if (!requiredWorkResponse || requiredWorkResponse.status !== 200) {
return; // return;
continue;
} }
const requiredWorkResponseData = await requiredWorkResponse.json(); const requiredWorkResponseData = await requiredWorkResponse.json();
console.log("[TASK] requiredWorkResponseData: ", requiredWorkResponseData); console.log("[TASK] requiredWorkResponseData: ", requiredWorkResponseData);
// const uuid = uuidv4(); // const uuid = uuidv4();
const alreadyAssigned = await namespaceWrapper.storeGet(JSON.stringify(requiredWorkResponseData.data.id)); const alreadyAssigned = await namespaceWrapper.storeGet(JSON.stringify(requiredWorkResponseData.data.id));
if (alreadyAssigned) { if (alreadyAssigned) {
return; continue;
// return;
} else { } else {
await namespaceWrapper.storeSet(JSON.stringify(requiredWorkResponseData.data.id), "initialized"); await namespaceWrapper.storeSet(JSON.stringify(requiredWorkResponseData.data.id), "initialized");
} }
@ -90,30 +92,35 @@ export async function task() {
while (retryCount < maxRetries) { while (retryCount < maxRetries) {
try { try {
repoSummaryResponse = await Promise.race([ const podcallPromise = orcaClient.podCall(`worker-task`, {
orcaClient.podCall(`worker-task`, { method: "POST",
method: "POST", headers: {
headers: { "Content-Type": "application/json",
"Content-Type": "application/json", },
}, body: JSON.stringify(jsonBody),
body: JSON.stringify(jsonBody), });
}),
new Promise((_, reject) => setTimeout(() => reject(new Error("Timeout")), timeout)), const timeoutPromise = new Promise((_, reject) =>
]); setTimeout(() => reject(new Error("Podcall timeout after 100 seconds")), timeout)
);
repoSummaryResponse = await Promise.race([podcallPromise, timeoutPromise]);
console.log("[TASK] repoSummaryResponse: ", repoSummaryResponse); console.log("[TASK] repoSummaryResponse: ", repoSummaryResponse);
break; // If successful, break the retry loop break; // If successful, break the retry loop
} catch (error) { } catch (error: any) {
console.log(`[TASK] Podcall attempt ${retryCount + 1} failed:`, error);
retryCount++; retryCount++;
if (retryCount === maxRetries) { if (retryCount === maxRetries) {
throw error; // If we've exhausted retries, throw the error throw new Error(`Podcall failed after ${maxRetries} attempts: ${error.message}`);
} }
console.log(`[TASK] Attempt ${retryCount} failed, retrying...`); console.log(`[TASK] Retrying in 10 seconds...`);
await new Promise((resolve) => setTimeout(resolve, 10000)); // Wait 10 seconds before retry await new Promise((resolve) => setTimeout(resolve, 10000)); // Wait 10 seconds before retry
} }
} }
} catch (error) { } catch (error) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_SUMMARIZATION_FAILED); // await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_SUMMARIZATION_FAILED);
console.error("[TASK] EXECUTE TASK ERROR:", error); console.error("[TASK] EXECUTE TASK ERROR:", error);
continue;
} }
} catch (error) { } catch (error) {
console.error("[TASK] EXECUTE TASK ERROR:", error); console.error("[TASK] EXECUTE TASK ERROR:", error);