chore: apply prettier

This commit is contained in:
2025-05-05 22:51:21 -03:00
parent 290bebb112
commit 3f10aacb72
26 changed files with 2126 additions and 2051 deletions

View File

@ -1,26 +1,26 @@
# Earn Crypto with AI Agents: Prometheus Document & Summarize Task (Beta v0) # Earn Crypto with AI Agents: Prometheus Document & Summarize Task (Beta v0)
## Overview ## Overview
The **Prometheus Document & Summarize Task** spins up an **AI agent** capable of continuously summarizing repositories, **earning you KOII**. Automated document summarization agents can constantly process and summarize information, increasing the value of the network _and_ your node. Our ultimate goal is to have **AI agents summarizing Koii tasks**, growing the network with **more opportunities for node operators to earn rewards**. The **Prometheus Document & Summarize Task** spins up an **AI agent** capable of continuously summarizing repositories, **earning you KOII**. Automated document summarization agents can constantly process and summarize information, increasing the value of the network _and_ your node. Our ultimate goal is to have **AI agents summarizing Koii tasks**, growing the network with **more opportunities for node operators to earn rewards**.
## Releases ## Releases
### Beta v0 ### Beta v0
- This is the **first beta release** of the task. - This is the **first beta release** of the task.
- The AI agent reads documents and generates summaries automatically. - The AI agent reads documents and generates summaries automatically.
- Documentations are sent to the user repository. - Documentations are sent to the user repository.
- Future versions will introduce **enhanced AI logic, more complex summarization tasks, and more!** - Future versions will introduce **enhanced AI logic, more complex summarization tasks, and more!**
## Task Setup ## Task Setup
**[How to set up a Claude API key and a GitHub API key for the 247 Document & Summarize Task.](https://www.koii.network/blog/Earn-Crypto-With-AI-Agent)** **[How to set up a Claude API key and a GitHub API key for the 247 Document & Summarize Task.](https://www.koii.network/blog/Earn-Crypto-With-AI-Agent)**
## How It Works ## How It Works
1. The Koii Node **launches an AI agent** inside a lightweight runtime. 1. The Koii Node **launches an AI agent** inside a lightweight runtime.
2. The agent reads an active **repository list** from the bounty repository. 2. The agent reads an active **repository list** from the bounty repository.
3. It picks a **repository**, generates the necessary **documentation**, and submits a **Github pull request** (a request to have its documentation added to the repository). 3. It picks a **repository**, generates the necessary **documentation**, and submits a **Github pull request** (a request to have its documentation added to the repository).
4. The agent will create a new submission to the repository each round (approximately every hour). 4. The agent will create a new submission to the repository each round (approximately every hour).
5. Koii Nodes **earn rewards** for running the AI agent and contributing documentation. 5. Koii Nodes **earn rewards** for running the AI agent and contributing documentation.

View File

@ -1,130 +1,130 @@
######################## ALL FIELDS ARE REQUIRED UNLESS OTHERWISE NOTED ######################### ######################## ALL FIELDS ARE REQUIRED UNLESS OTHERWISE NOTED #########################
######################################### TASK METADATA ######################################### ######################################### TASK METADATA #########################################
############################ Will be displayed in the desktop node ############################## ############################ Will be displayed in the desktop node ##############################
## Task Name ## ## Task Name ##
# Maximum 24 characters. # Maximum 24 characters.
task_name: "Prometheus Docs Agent" task_name: "Prometheus Docs Agent"
## Task Author ## ## Task Author ##
author: "Prometheus" author: "Prometheus"
# Task Description Markdown ## # Task Description Markdown ##
# If you specify a markdown file, the description field will be ignored. # If you specify a markdown file, the description field will be ignored.
# Markdown is recommended for better formatting. # Markdown is recommended for better formatting.
markdownDescriptionPath: "./README.md" markdownDescriptionPath: "./README.md"
## Task Description ## ## Task Description ##
# Ignored if you specify a markdown file. # Ignored if you specify a markdown file.
description: "Task description." description: "Task description."
## Repository URL ## ## Repository URL ##
# Must be public for whitelisted tasks. # Must be public for whitelisted tasks.
repositoryUrl: "https://github.com/koii-network/builder-247" repositoryUrl: "https://github.com/koii-network/builder-247"
## Image URL ## ## Image URL ##
# 230x86 pixels. # 230x86 pixels.
imageUrl: "https://koii-k2-task-metadata.s3.us-east-2.amazonaws.com/Docs.png" imageUrl: "https://koii-k2-task-metadata.s3.us-east-2.amazonaws.com/Docs.png"
## Info URL ## ## Info URL ##
infoUrl: "https://www.koii.network/blog/Earn-Crypto-With-AI-Agent" infoUrl: "https://www.koii.network/blog/Earn-Crypto-With-AI-Agent"
####################################### TASK CONFIGURATION ###################################### ####################################### TASK CONFIGURATION ######################################
## Task Executable Network ## ## Task Executable Network ##
# IPFS or DEVELOPMENT # IPFS or DEVELOPMENT
# Keep this as IPFS unless you know you need to change it. # Keep this as IPFS unless you know you need to change it.
task_executable_network: "IPFS" task_executable_network: "IPFS"
## Task Audit Program ## ## Task Audit Program ##
# Task Executable Network IPFS: Path to your executable. # Task Executable Network IPFS: Path to your executable.
# Task Executable Network DEVELOPMENT: The value should be 'main'. # Task Executable Network DEVELOPMENT: The value should be 'main'.
# Keep this as-is unless you know you need to change it. # Keep this as-is unless you know you need to change it.
task_audit_program: "dist/main.js" task_audit_program: "dist/main.js"
## Round Time ## ## Round Time ##
# Duration of task, measured in slots (with each slot approximately equal to 408ms). Should be at least 800 slots. # Duration of task, measured in slots (with each slot approximately equal to 408ms). Should be at least 800 slots.
# See https://www.koii.network/docs/concepts/what-are-tasks/what-are-tasks/gradual-consensus for more information on how round time, audit window, and submission window work. # See https://www.koii.network/docs/concepts/what-are-tasks/what-are-tasks/gradual-consensus for more information on how round time, audit window, and submission window work.
round_time: 3000 round_time: 3000
## Audit Window ## ## Audit Window ##
# The audit window should be at least 1/3 of the round time. # The audit window should be at least 1/3 of the round time.
audit_window: 1300 audit_window: 1300
## Submission Window ## ## Submission Window ##
# The submission window should be at least 1/3 of the round time. # The submission window should be at least 1/3 of the round time.
submission_window: 1300 submission_window: 1300
## Minimum Stake Amount ## ## Minimum Stake Amount ##
# The minimum amount of KOII or KPL that a user must stake in order to participate in the task. # The minimum amount of KOII or KPL that a user must stake in order to participate in the task.
minimum_stake_amount: 0.01 minimum_stake_amount: 0.01
## Task Bounty Type ## ## Task Bounty Type ##
# KOII or KPL # KOII or KPL
task_type: "KOII" task_type: "KOII"
## Token Mint Address (ONLY for KPL tasks) ## ## Token Mint Address (ONLY for KPL tasks) ##
# The Fire Token address is provided as an example. # The Fire Token address is provided as an example.
token_type: "4qayyw53kWz6GzypcejjT1cvwMXS1qYLSMQRE8se3gTv" token_type: "4qayyw53kWz6GzypcejjT1cvwMXS1qYLSMQRE8se3gTv"
## Total Bounty Amount ## ## Total Bounty Amount ##
# The total bounty amount that will be available for distribution over all rounds. # The total bounty amount that will be available for distribution over all rounds.
# Does nothing when updating a task. # Does nothing when updating a task.
total_bounty_amount: 12000 total_bounty_amount: 12000
## Bounty Amount per Round ## ## Bounty Amount per Round ##
# The maximum amount that can be distributed per round. # The maximum amount that can be distributed per round.
# If the actual distribution per round exceeds this amount, the distribution list will fail. # If the actual distribution per round exceeds this amount, the distribution list will fail.
bounty_amount_per_round: 2001 bounty_amount_per_round: 2001
## Allowed Failed Distributions ## ## Allowed Failed Distributions ##
# Number of retries allowed for the distribution list if it is fails audit. # Number of retries allowed for the distribution list if it is fails audit.
# If all retries fail, the task will not distribute anything for the round. # If all retries fail, the task will not distribute anything for the round.
# This is also the number of rounds of submissions it will keep. # This is also the number of rounds of submissions it will keep.
allowed_failed_distributions: 8 allowed_failed_distributions: 8
## Space ## ## Space ##
# Expected Task Data Size in MBs for the account size. # Expected Task Data Size in MBs for the account size.
# Minimums: 2 for whitelisted tasks, 1 for production, 0.1 for testing. # Minimums: 2 for whitelisted tasks, 1 for production, 0.1 for testing.
# See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#space for calculation details. # See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#space for calculation details.
space: 5 space: 5
## Requirement Tags (Optional) ## ## Requirement Tags (Optional) ##
# To add more global variables and task variables, please refer to the type, value, description format shown below. # To add more global variables and task variables, please refer to the type, value, description format shown below.
# The ORCA_TASK addon is REQUIRED # The ORCA_TASK addon is REQUIRED
requirementsTags: requirementsTags:
- type: ADDON - type: ADDON
value: "ORCA_TASK" value: "ORCA_TASK"
- type: CPU - type: CPU
value: "4-core" value: "4-core"
- type: RAM - type: RAM
value: "5 GB" value: "5 GB"
- type: STORAGE - type: STORAGE
value: "5 GB" value: "5 GB"
- type: TASK_VARIABLE - type: TASK_VARIABLE
value: "ANTHROPIC_API_KEY" value: "ANTHROPIC_API_KEY"
description: "Your Anthropic API key. You can get one here: https://console.anthropic.com/settings/keys" description: "Your Anthropic API key. You can get one here: https://console.anthropic.com/settings/keys"
- type: TASK_VARIABLE - type: TASK_VARIABLE
value: "GITHUB_USERNAME" value: "GITHUB_USERNAME"
description: "Your GitHub username. You can sign up for an account here: https://github.com/join" description: "Your GitHub username. You can sign up for an account here: https://github.com/join"
- type: TASK_VARIABLE - type: TASK_VARIABLE
value: "GITHUB_TOKEN" value: "GITHUB_TOKEN"
description: "Your GitHub Personal Access Token. You can create one here: https://github.com/settings/tokens" description: "Your GitHub Personal Access Token. You can create one here: https://github.com/settings/tokens"
## Tags ## ## Tags ##
# See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#tags for available tag options. # See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#tags for available tag options.
tags: ["AI"] tags: ["AI"]
# Environment ## # Environment ##
# TEST or PRODUCTION # TEST or PRODUCTION
# Production mode will expose your task to all the task runners, even if not whitelisted. # Production mode will expose your task to all the task runners, even if not whitelisted.
environment: "TEST" environment: "TEST"
#################################### FOR UPDATING TASKS ONLY #################################### #################################### FOR UPDATING TASKS ONLY ####################################
## Old Task ID ## ## Old Task ID ##
task_id: "5bc74eTjGgNigupFBZXtfzAYVksPqSGBEVgRLubk7ak7" task_id: "5bc74eTjGgNigupFBZXtfzAYVksPqSGBEVgRLubk7ak7"
## Migration Description ## ## Migration Description ##
migrationDescription: "Log Reminder, Time Based Logic" migrationDescription: "Log Reminder, Time Based Logic"

View File

@ -1,130 +1,130 @@
######################## ALL FIELDS ARE REQUIRED UNLESS OTHERWISE NOTED ######################### ######################## ALL FIELDS ARE REQUIRED UNLESS OTHERWISE NOTED #########################
######################################### TASK METADATA ######################################### ######################################### TASK METADATA #########################################
############################ Will be displayed in the desktop node ############################## ############################ Will be displayed in the desktop node ##############################
## Task Name ## ## Task Name ##
# Maximum 24 characters. # Maximum 24 characters.
task_name: "Prometheus Docs Agent" task_name: "Prometheus Docs Agent"
## Task Author ## ## Task Author ##
author: "Prometheus" author: "Prometheus"
# Task Description Markdown ## # Task Description Markdown ##
# If you specify a markdown file, the description field will be ignored. # If you specify a markdown file, the description field will be ignored.
# Markdown is recommended for better formatting. # Markdown is recommended for better formatting.
markdownDescriptionPath: "./README.md" markdownDescriptionPath: "./README.md"
## Task Description ## ## Task Description ##
# Ignored if you specify a markdown file. # Ignored if you specify a markdown file.
description: "Task description." description: "Task description."
## Repository URL ## ## Repository URL ##
# Must be public for whitelisted tasks. # Must be public for whitelisted tasks.
repositoryUrl: "https://github.com/koii-network/builder-247" repositoryUrl: "https://github.com/koii-network/builder-247"
## Image URL ## ## Image URL ##
# 230x86 pixels. # 230x86 pixels.
imageUrl: "https://koii-k2-task-metadata.s3.us-east-2.amazonaws.com/Docs.png" imageUrl: "https://koii-k2-task-metadata.s3.us-east-2.amazonaws.com/Docs.png"
## Info URL ## ## Info URL ##
infoUrl: "https://www.koii.network/blog/Earn-Crypto-With-AI-Agent" infoUrl: "https://www.koii.network/blog/Earn-Crypto-With-AI-Agent"
####################################### TASK CONFIGURATION ###################################### ####################################### TASK CONFIGURATION ######################################
## Task Executable Network ## ## Task Executable Network ##
# IPFS or DEVELOPMENT # IPFS or DEVELOPMENT
# Keep this as IPFS unless you know you need to change it. # Keep this as IPFS unless you know you need to change it.
task_executable_network: "IPFS" task_executable_network: "IPFS"
## Task Audit Program ## ## Task Audit Program ##
# Task Executable Network IPFS: Path to your executable. # Task Executable Network IPFS: Path to your executable.
# Task Executable Network DEVELOPMENT: The value should be 'main'. # Task Executable Network DEVELOPMENT: The value should be 'main'.
# Keep this as-is unless you know you need to change it. # Keep this as-is unless you know you need to change it.
task_audit_program: "dist/main.js" task_audit_program: "dist/main.js"
## Round Time ## ## Round Time ##
# Duration of task, measured in slots (with each slot approximately equal to 408ms). Should be at least 800 slots. # Duration of task, measured in slots (with each slot approximately equal to 408ms). Should be at least 800 slots.
# See https://www.koii.network/docs/concepts/what-are-tasks/what-are-tasks/gradual-consensus for more information on how round time, audit window, and submission window work. # See https://www.koii.network/docs/concepts/what-are-tasks/what-are-tasks/gradual-consensus for more information on how round time, audit window, and submission window work.
round_time: 3000 round_time: 3000
## Audit Window ## ## Audit Window ##
# The audit window should be at least 1/3 of the round time. # The audit window should be at least 1/3 of the round time.
audit_window: 1300 audit_window: 1300
## Submission Window ## ## Submission Window ##
# The submission window should be at least 1/3 of the round time. # The submission window should be at least 1/3 of the round time.
submission_window: 1300 submission_window: 1300
## Minimum Stake Amount ## ## Minimum Stake Amount ##
# The minimum amount of KOII or KPL that a user must stake in order to participate in the task. # The minimum amount of KOII or KPL that a user must stake in order to participate in the task.
minimum_stake_amount: 0.01 minimum_stake_amount: 0.01
## Task Bounty Type ## ## Task Bounty Type ##
# KOII or KPL # KOII or KPL
task_type: "KOII" task_type: "KOII"
## Token Mint Address (ONLY for KPL tasks) ## ## Token Mint Address (ONLY for KPL tasks) ##
# The Fire Token address is provided as an example. # The Fire Token address is provided as an example.
token_type: "4qayyw53kWz6GzypcejjT1cvwMXS1qYLSMQRE8se3gTv" token_type: "4qayyw53kWz6GzypcejjT1cvwMXS1qYLSMQRE8se3gTv"
## Total Bounty Amount ## ## Total Bounty Amount ##
# The total bounty amount that will be available for distribution over all rounds. # The total bounty amount that will be available for distribution over all rounds.
# Does nothing when updating a task. # Does nothing when updating a task.
total_bounty_amount: 11 total_bounty_amount: 11
## Bounty Amount per Round ## ## Bounty Amount per Round ##
# The maximum amount that can be distributed per round. # The maximum amount that can be distributed per round.
# If the actual distribution per round exceeds this amount, the distribution list will fail. # If the actual distribution per round exceeds this amount, the distribution list will fail.
bounty_amount_per_round: 1 bounty_amount_per_round: 1
## Allowed Failed Distributions ## ## Allowed Failed Distributions ##
# Number of retries allowed for the distribution list if it is fails audit. # Number of retries allowed for the distribution list if it is fails audit.
# If all retries fail, the task will not distribute anything for the round. # If all retries fail, the task will not distribute anything for the round.
# This is also the number of rounds of submissions it will keep. # This is also the number of rounds of submissions it will keep.
allowed_failed_distributions: 8 allowed_failed_distributions: 8
## Space ## ## Space ##
# Expected Task Data Size in MBs for the account size. # Expected Task Data Size in MBs for the account size.
# Minimums: 2 for whitelisted tasks, 1 for production, 0.1 for testing. # Minimums: 2 for whitelisted tasks, 1 for production, 0.1 for testing.
# See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#space for calculation details. # See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#space for calculation details.
space: 0.1 space: 0.1
## Requirement Tags (Optional) ## ## Requirement Tags (Optional) ##
# To add more global variables and task variables, please refer to the type, value, description format shown below. # To add more global variables and task variables, please refer to the type, value, description format shown below.
# The ORCA_TASK addon is REQUIRED # The ORCA_TASK addon is REQUIRED
requirementsTags: requirementsTags:
- type: ADDON - type: ADDON
value: "ORCA_TASK" value: "ORCA_TASK"
- type: CPU - type: CPU
value: "4-core" value: "4-core"
- type: RAM - type: RAM
value: "5 GB" value: "5 GB"
- type: STORAGE - type: STORAGE
value: "5 GB" value: "5 GB"
- type: TASK_VARIABLE - type: TASK_VARIABLE
value: "ANTHROPIC_API_KEY" value: "ANTHROPIC_API_KEY"
description: "Your Anthropic API key. You can get one here: https://console.anthropic.com/settings/keys" description: "Your Anthropic API key. You can get one here: https://console.anthropic.com/settings/keys"
- type: TASK_VARIABLE - type: TASK_VARIABLE
value: "GITHUB_USERNAME" value: "GITHUB_USERNAME"
description: "Your GitHub username. You can sign up for an account here: https://github.com/join" description: "Your GitHub username. You can sign up for an account here: https://github.com/join"
- type: TASK_VARIABLE - type: TASK_VARIABLE
value: "GITHUB_TOKEN" value: "GITHUB_TOKEN"
description: "Your GitHub Personal Access Token. You can create one here: https://github.com/settings/tokens" description: "Your GitHub Personal Access Token. You can create one here: https://github.com/settings/tokens"
## Tags ## ## Tags ##
# See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#tags for available tag options. # See https://www.koii.network/docs/develop/command-line-tool/create-task-cli/create-task#tags for available tag options.
tags: ["AI"] tags: ["AI"]
# Environment ## # Environment ##
# TEST or PRODUCTION # TEST or PRODUCTION
# Production mode will expose your task to all the task runners, even if not whitelisted. # Production mode will expose your task to all the task runners, even if not whitelisted.
environment: "TEST" environment: "TEST"
#################################### FOR UPDATING TASKS ONLY #################################### #################################### FOR UPDATING TASKS ONLY ####################################
## Old Task ID ## ## Old Task ID ##
task_id: "48h3f4r3AR7MdgCMkET4v3yh7PpPHuqGDWzqgH52rny1" task_id: "48h3f4r3AR7MdgCMkET4v3yh7PpPHuqGDWzqgH52rny1"
## Migration Description ## ## Migration Description ##
migrationDescription: "Fix audit bug" migrationDescription: "Fix audit bug"

View File

@ -1,7 +1,6 @@
export default { export default {
transform: { "^.+\\.tsx?$": "babel-jest" }, transform: { "^.+\\.tsx?$": "babel-jest" },
transformIgnorePatterns: ["/node_modules/(?!@babel/runtime)"], transformIgnorePatterns: ["/node_modules/(?!@babel/runtime)"],
moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"],
testEnvironment: "node", testEnvironment: "node",
}; };

View File

@ -21,7 +21,6 @@ export async function task(roundNumber: number): Promise<void> {
// FORCE TO PAUSE 30 SECONDS // FORCE TO PAUSE 30 SECONDS
// No submission on Round 0 so no need to trigger fetch audit result before round 3 // No submission on Round 0 so no need to trigger fetch audit result before round 3
// Changed from 3 to 4 to have more time // Changed from 3 to 4 to have more time
// if (roundNumber >= 4) { // if (roundNumber >= 4) {
// const auditRound = roundNumber - 4; // const auditRound = roundNumber - 4;
// const response = await fetch(`${middleServerUrl}/summarizer/worker/update-audit-result`, { // const response = await fetch(`${middleServerUrl}/summarizer/worker/update-audit-result`, {
@ -32,4 +31,4 @@ export async function task(roundNumber: number): Promise<void> {
// console.log(`[TASK] Fetched audit result for round ${auditRound}. Status: ${response.status}`); // console.log(`[TASK] Fetched audit result for round ${auditRound}. Status: ${response.status}`);
// } // }
// console.log(`[TASK] EXECUTE TASK FOR ROUND ${roundNumber}`); // console.log(`[TASK] EXECUTE TASK FOR ROUND ${roundNumber}`);
} }

View File

@ -24,7 +24,7 @@ export async function submission(roundNumber: number): Promise<string | void> {
* The default implementation handles uploading the proofs to IPFS * The default implementation handles uploading the proofs to IPFS
* and returning the CID * and returning the CID
*/ */
if(!await preRunCheck(roundNumber.toString())){ if (!(await preRunCheck(roundNumber.toString()))) {
return; return;
} }
const stakingKeypair = await namespaceWrapper.getSubmitterAccount(); const stakingKeypair = await namespaceWrapper.getSubmitterAccount();
@ -34,14 +34,14 @@ export async function submission(roundNumber: number): Promise<string | void> {
throw new Error("No staking keypair or public key found"); throw new Error("No staking keypair or public key found");
} }
const stakingKey = stakingKeypair.publicKey.toBase58(); const stakingKey = stakingKeypair.publicKey.toBase58();
const secretKey = stakingKeypair.secretKey; const secretKey = stakingKeypair.secretKey;
console.log(`[SUBMISSION] Starting submission process for round ${roundNumber}`); console.log(`[SUBMISSION] Starting submission process for round ${roundNumber}`);
try { try {
const orcaClient = await initializeOrcaClient(); const orcaClient = await initializeOrcaClient();
const shouldMakeSubmission = await namespaceWrapper.storeGet(`shouldMakeSubmission`); const shouldMakeSubmission = await namespaceWrapper.storeGet(`shouldMakeSubmission`);
if (!shouldMakeSubmission || shouldMakeSubmission !== "true") { if (!shouldMakeSubmission || shouldMakeSubmission !== "true") {
return; return;
} }
@ -51,7 +51,7 @@ export async function submission(roundNumber: number): Promise<string | void> {
roundNumber, roundNumber,
stakingKey, stakingKey,
publicKey: pubKey, publicKey: pubKey,
secretKey secretKey,
}); });
return cid || void 0; return cid || void 0;
@ -64,19 +64,19 @@ export async function submission(roundNumber: number): Promise<string | void> {
async function initializeOrcaClient() { async function initializeOrcaClient() {
console.log("[SUBMISSION] Initializing Orca client..."); console.log("[SUBMISSION] Initializing Orca client...");
const orcaClient = await getOrcaClient(); const orcaClient = await getOrcaClient();
if (!orcaClient) { if (!orcaClient) {
console.error("[SUBMISSION] Failed to initialize Orca client"); console.error("[SUBMISSION] Failed to initialize Orca client");
throw new Error("Failed to initialize Orca client"); throw new Error("Failed to initialize Orca client");
} }
console.log("[SUBMISSION] Orca client initialized successfully"); console.log("[SUBMISSION] Orca client initialized successfully");
return orcaClient; return orcaClient;
} }
async function makeSubmission(params: SubmissionParams): Promise<string | void> { async function makeSubmission(params: SubmissionParams): Promise<string | void> {
const { orcaClient, roundNumber, stakingKey, publicKey, secretKey } = params; const { orcaClient, roundNumber, stakingKey, publicKey, secretKey } = params;
const swarmBountyId = await namespaceWrapper.storeGet(`swarmBountyId`); const swarmBountyId = await namespaceWrapper.storeGet(`swarmBountyId`);
if (!swarmBountyId) { if (!swarmBountyId) {
console.log("[SUBMISSION] No swarm bounty id found for this round"); console.log("[SUBMISSION] No swarm bounty id found for this round");
@ -94,35 +94,36 @@ async function makeSubmission(params: SubmissionParams): Promise<string | void>
prUrl: submissionData.prUrl, prUrl: submissionData.prUrl,
stakingKey, stakingKey,
publicKey, publicKey,
secretKey secretKey,
}); });
const signature = await signSubmissionPayload({ const signature = await signSubmissionPayload(
taskId: TASK_ID, {
roundNumber, taskId: TASK_ID,
stakingKey, roundNumber,
pubKey: publicKey, stakingKey,
...submissionData pubKey: publicKey,
}, secretKey); ...submissionData,
},
secretKey,
);
const cid = await storeSubmissionOnIPFS(signature); const cid = await storeSubmissionOnIPFS(signature);
await cleanupSubmissionState(); await cleanupSubmissionState();
return cid; return cid;
} }
async function fetchSubmissionData(orcaClient: any, swarmBountyId: string): Promise<SubmissionData | null> { async function fetchSubmissionData(orcaClient: any, swarmBountyId: string): Promise<SubmissionData | null> {
console.log(`[SUBMISSION] Fetching submission data for swarm bounty ${swarmBountyId}`); console.log(`[SUBMISSION] Fetching submission data for swarm bounty ${swarmBountyId}`);
const result = await orcaClient.podCall(`submission/${swarmBountyId}`); const result = await orcaClient.podCall(`submission/${swarmBountyId}`);
if (!result || result.data === "No submission") { if (!result || result.data === "No submission") {
console.log("[SUBMISSION] No existing submission found"); console.log("[SUBMISSION] No existing submission found");
return null; return null;
} }
const submission = typeof result.data === 'object' && 'data' in result.data const submission = typeof result.data === "object" && "data" in result.data ? result.data.data : result.data;
? result.data.data
: result.data;
if (!submission?.prUrl) { if (!submission?.prUrl) {
throw new Error("Submission is missing PR URL"); throw new Error("Submission is missing PR URL");
@ -140,7 +141,7 @@ async function notifyMiddleServer(params: {
secretKey: Uint8Array<ArrayBufferLike>; secretKey: Uint8Array<ArrayBufferLike>;
}) { }) {
const { taskId, swarmBountyId, prUrl, stakingKey, publicKey, secretKey } = params; const { taskId, swarmBountyId, prUrl, stakingKey, publicKey, secretKey } = params;
const payload = { const payload = {
taskId, taskId,
swarmBountyId, swarmBountyId,
@ -184,4 +185,4 @@ async function storeSubmissionOnIPFS(signature: string): Promise<string> {
async function cleanupSubmissionState(): Promise<void> { async function cleanupSubmissionState(): Promise<void> {
await namespaceWrapper.storeSet(`shouldMakeSubmission`, "false"); await namespaceWrapper.storeSet(`shouldMakeSubmission`, "false");
await namespaceWrapper.storeSet(`swarmBountyId`, ""); await namespaceWrapper.storeSet(`swarmBountyId`, "");
} }

View File

@ -82,4 +82,4 @@ export async function audit(cid: string, roundNumber: number, submitterKey: stri
// When Error---NO RETURN; // When Error---NO RETURN;
// return true; // return true;
} }
} }

View File

@ -14,7 +14,6 @@ import { middleServerUrl, status } from "../utils/constant";
//Example route //Example route
export async function routes() { export async function routes() {
app.get("/value", async (_req, res) => { app.get("/value", async (_req, res) => {
const value = await namespaceWrapper.storeGet("value"); const value = await namespaceWrapper.storeGet("value");
console.log("value", value); console.log("value", value);
@ -64,7 +63,7 @@ export async function routes() {
const message = req.body.message; const message = req.body.message;
console.log("[TASK] req.body", req.body); console.log("[TASK] req.body", req.body);
try { try {
if (!success){ if (!success) {
console.error("[TASK] Error summarizing repository:", message); console.error("[TASK] Error summarizing repository:", message);
return; return;
} }
@ -128,7 +127,6 @@ export async function routes() {
}); });
} }
// TODO: To be completed // TODO: To be completed
app.post("/failed-task", async (req, res) => { app.post("/failed-task", async (req, res) => {
res.status(200).json({ result: "Successfully saved task result" }); res.status(200).json({ result: "Successfully saved task result" });

View File

@ -1,36 +1,35 @@
export function isValidAnthropicApiKey(key: string) { export function isValidAnthropicApiKey(key: string) {
const regex = /^sk-ant-[a-zA-Z0-9_-]{32,}$/; const regex = /^sk-ant-[a-zA-Z0-9_-]{32,}$/;
return regex.test(key); return regex.test(key);
} }
export async function checkAnthropicAPIKey(apiKey: string) { export async function checkAnthropicAPIKey(apiKey: string) {
const response = await fetch('https://api.anthropic.com/v1/messages', { const response = await fetch("https://api.anthropic.com/v1/messages", {
method: 'POST', method: "POST",
headers: { headers: {
'x-api-key': apiKey, "x-api-key": apiKey,
'anthropic-version': '2023-06-01', "anthropic-version": "2023-06-01",
'content-type': 'application/json', "content-type": "application/json",
}, },
body: JSON.stringify({ body: JSON.stringify({
model: 'claude-3-opus-20240229', // or a cheaper model model: "claude-3-opus-20240229", // or a cheaper model
max_tokens: 1, // minimal usage max_tokens: 1, // minimal usage
messages: [{ role: 'user', content: 'Hi' }], messages: [{ role: "user", content: "Hi" }],
}), }),
}); });
if (response.status === 200) { if (response.status === 200) {
console.log('✅ API key is valid and has credit.'); console.log("✅ API key is valid and has credit.");
return true; return true;
} else { } else {
const data = await response.json().catch(() => ({})); const data = await response.json().catch(() => ({}));
if (response.status === 401) { if (response.status === 401) {
console.log('❌ Invalid API key.'); console.log("❌ Invalid API key.");
} else if (response.status === 403 && data.error?.message?.includes('billing')) { } else if (response.status === 403 && data.error?.message?.includes("billing")) {
console.log('❌ API key has no credit or is not authorized.'); console.log("❌ API key has no credit or is not authorized.");
} else { } else {
console.log('⚠️ Unexpected error:', data); console.log("⚠️ Unexpected error:", data);
} }
return false; return false;
} }
} }

View File

@ -5,8 +5,8 @@ import { LogLevel } from "@_koii/namespace-wrapper/dist/types";
import { errorMessage, actionMessage, status } from "../constant"; import { errorMessage, actionMessage, status } from "../constant";
import { checkAnthropicAPIKey } from "./anthropicCheck"; import { checkAnthropicAPIKey } from "./anthropicCheck";
import { checkGitHub } from "./githubCheck"; import { checkGitHub } from "./githubCheck";
export async function preRunCheck(roundNumber:string){ export async function preRunCheck(roundNumber: string) {
if (!process.env.ANTHROPIC_API_KEY) { if (!process.env.ANTHROPIC_API_KEY) {
await namespaceWrapper.logMessage( await namespaceWrapper.logMessage(
LogLevel.Error, LogLevel.Error,
errorMessage.ANTHROPIC_API_KEY_INVALID, errorMessage.ANTHROPIC_API_KEY_INVALID,
@ -54,4 +54,4 @@ if (!process.env.ANTHROPIC_API_KEY) {
return false; return false;
} }
return true; return true;
} }

View File

@ -1,36 +1,36 @@
export async function checkGitHub(username: string, token: string) { export async function checkGitHub(username: string, token: string) {
// 1. Check username // 1. Check username
const userRes = await fetch(`https://api.github.com/users/${username}`); const userRes = await fetch(`https://api.github.com/users/${username}`);
const isUsernameValid = userRes.status === 200; const isUsernameValid = userRes.status === 200;
// 2. Check token // 2. Check token
const tokenRes = await fetch('https://api.github.com/user', { const tokenRes = await fetch("https://api.github.com/user", {
headers: { headers: {
Authorization: `token ${token}`, Authorization: `token ${token}`,
}, },
}); });
const isTokenValid = tokenRes.status === 200; const isTokenValid = tokenRes.status === 200;
const isIdentityValid = await checkGitHubIdentity(username, token); const isIdentityValid = await checkGitHubIdentity(username, token);
return isIdentityValid&&isUsernameValid&&isTokenValid return isIdentityValid && isUsernameValid && isTokenValid;
} }
async function checkGitHubIdentity(username: string, token: string) { async function checkGitHubIdentity(username: string, token: string) {
const res = await fetch('https://api.github.com/user', { const res = await fetch("https://api.github.com/user", {
headers: { headers: {
Authorization: `token ${token}`, Authorization: `token ${token}`,
Accept: 'application/vnd.github.v3+json', Accept: "application/vnd.github.v3+json",
}, },
}); });
if (res.status !== 200) { if (res.status !== 200) {
return false return false;
} }
const data = await res.json(); const data = await res.json();
if (data.login.toLowerCase() !== username.toLowerCase()) { if (data.login.toLowerCase() !== username.toLowerCase()) {
return false return false;
} }
return true return true;
} }

View File

@ -59,4 +59,4 @@ export const defaultBountyMarkdownFile =
export const customReward = 400 * 10 ** 9; // This should be in ROE! export const customReward = 400 * 10 ** 9; // This should be in ROE!
export const middleServerUrl = "https://builder247-prod.dev.koii.network"; export const middleServerUrl = "https://builder247-prod.dev.koii.network";

View File

@ -12,90 +12,90 @@ interface BountyIssue {
} }
export async function getExistingIssues(): Promise<BountyIssue[]> { export async function getExistingIssues(): Promise<BountyIssue[]> {
try { try {
// read from the bounty markdown file // read from the bounty markdown file
// console.log('Fetching markdown file from:', defaultBountyMarkdownFile); // console.log('Fetching markdown file from:', defaultBountyMarkdownFile);
const bountyMarkdownFile = await fetch(defaultBountyMarkdownFile); const bountyMarkdownFile = await fetch(defaultBountyMarkdownFile);
const bountyMarkdownFileText = await bountyMarkdownFile.text(); const bountyMarkdownFileText = await bountyMarkdownFile.text();
// console.log('Raw markdown content:', bountyMarkdownFileText);
const bountyMarkdownFileLines = bountyMarkdownFileText.split("\n");
// console.log('Number of lines:', bountyMarkdownFileLines.length);
const issues: BountyIssue[] = [];
let isTableStarted = false;
for (const line of bountyMarkdownFileLines) {
// Skip empty lines
if (line.trim() === '') {
// console.log('Skipping empty line');
continue;
}
// console.log('Processing line:', line);
// Skip the title line starting with #
if (line.startsWith('#')) {
// console.log('Found title line:', line);
continue;
}
// Skip the header and separator lines
if (line.startsWith('|') && line.includes('GitHub URL')) {
//console.log('Found header line');
continue;
}
if (line.startsWith('|') && line.includes('-----')) {
// console.log('Found separator line');
continue;
}
// Process table rows
if (line.startsWith('|')) {
isTableStarted = true;
// Remove first and last | and split by |
const cells = line.slice(1, -1).split('|').map(cell => cell.trim());
// console.log('Parsed cells:', cells);
// Extract GitHub URL and name from markdown link format [name](url)
const githubUrlMatch = cells[0].match(/\[(.*?)\]\((.*?)\)/);
// console.log('GitHub URL match:', githubUrlMatch);
const projectName = githubUrlMatch ? githubUrlMatch[1] : '';
const githubUrl = githubUrlMatch ? githubUrlMatch[2] : '';
const issue: BountyIssue = {
githubUrl,
projectName,
bountyTask: cells[1],
description: cells[3],
bountyAmount: cells[4],
bountyType: cells[5],
transactionHash: cells[6],
status: cells[7]
};
// console.log('Created issue object:', issue);
issues.push(issue);
}
}
// Filter all issues with status "Initialized" && Bounty Task is Document & Summarize
console.log('Final parsed issues number:', issues.length);
return issues
} catch (error) {
// console.error('Error processing markdown:', error);
throw error;
}
}
// console.log('Raw markdown content:', bountyMarkdownFileText);
const bountyMarkdownFileLines = bountyMarkdownFileText.split("\n");
// console.log('Number of lines:', bountyMarkdownFileLines.length);
const issues: BountyIssue[] = [];
let isTableStarted = false;
for (const line of bountyMarkdownFileLines) {
// Skip empty lines
if (line.trim() === "") {
// console.log('Skipping empty line');
continue;
}
// console.log('Processing line:', line);
// Skip the title line starting with #
if (line.startsWith("#")) {
// console.log('Found title line:', line);
continue;
}
// Skip the header and separator lines
if (line.startsWith("|") && line.includes("GitHub URL")) {
//console.log('Found header line');
continue;
}
if (line.startsWith("|") && line.includes("-----")) {
// console.log('Found separator line');
continue;
}
// Process table rows
if (line.startsWith("|")) {
isTableStarted = true;
// Remove first and last | and split by |
const cells = line
.slice(1, -1)
.split("|")
.map((cell) => cell.trim());
// console.log('Parsed cells:', cells);
// Extract GitHub URL and name from markdown link format [name](url)
const githubUrlMatch = cells[0].match(/\[(.*?)\]\((.*?)\)/);
// console.log('GitHub URL match:', githubUrlMatch);
const projectName = githubUrlMatch ? githubUrlMatch[1] : "";
const githubUrl = githubUrlMatch ? githubUrlMatch[2] : "";
const issue: BountyIssue = {
githubUrl,
projectName,
bountyTask: cells[1],
description: cells[3],
bountyAmount: cells[4],
bountyType: cells[5],
transactionHash: cells[6],
status: cells[7],
};
// console.log('Created issue object:', issue);
issues.push(issue);
}
}
// Filter all issues with status "Initialized" && Bounty Task is Document & Summarize
console.log("Final parsed issues number:", issues.length);
return issues;
} catch (error) {
// console.error('Error processing markdown:', error);
throw error;
}
}
export async function getInitializedDocumentSummarizeIssues(issues: BountyIssue[]) { export async function getInitializedDocumentSummarizeIssues(issues: BountyIssue[]) {
return issues.filter((issue) => issue.status === "Initialized" && issue.bountyTask === "Document & Summarize");
return issues.filter(issue => issue.status === "Initialized" && issue.bountyTask === "Document & Summarize");
} }
// async function main(){ // async function main(){
// const existingIssues = await getExistingIssues(); // const existingIssues = await getExistingIssues();
// const transactionHashs = [ // const transactionHashs = [
@ -146,7 +146,7 @@ export async function getInitializedDocumentSummarizeIssues(issues: BountyIssue[
// if (initializedDocumentSummarizeIssues.length == 0) { // if (initializedDocumentSummarizeIssues.length == 0) {
// console.log("No issues pending to be summarized"); // console.log("No issues pending to be summarized");
// return; // return;
// } // }
// console.log("Initialized Document & Summarize issues number:", initializedDocumentSummarizeIssues.length); // console.log("Initialized Document & Summarize issues number:", initializedDocumentSummarizeIssues.length);
// } // }
// async function main() { // async function main() {
@ -158,4 +158,4 @@ export async function getInitializedDocumentSummarizeIssues(issues: BountyIssue[
// } // }
// } // }
// main(); // main();

View File

@ -140,13 +140,13 @@ export async function getRandomNodes(roundNumber: number, numberOfNodes: number)
const lastRoundSubmissions = lastRoundSubmission.submissions; const lastRoundSubmissions = lastRoundSubmission.submissions;
console.log("Last round submissions:", lastRoundSubmissions); console.log("Last round submissions:", lastRoundSubmissions);
// Get the last round number // Get the last round number
const lastRound = Object.keys(lastRoundSubmissions).pop(); const lastRound = Object.keys(lastRoundSubmissions).pop();
if (!lastRound) { if (!lastRound) {
return []; return [];
} }
// Get the submissions for that round // Get the submissions for that round
const submissions = lastRoundSubmissions[lastRound]; const submissions = lastRoundSubmissions[lastRound];
console.log("Submissions:", submissions); console.log("Submissions:", submissions);
@ -156,12 +156,12 @@ export async function getRandomNodes(roundNumber: number, numberOfNodes: number)
if (availableKeys.length <= numberOfNodes) { if (availableKeys.length <= numberOfNodes) {
return availableKeys; return availableKeys;
} }
const seed = TASK_ID + roundNumber.toString() || "default" + roundNumber; const seed = TASK_ID + roundNumber.toString() || "default" + roundNumber;
const rng = seedrandom(seed); const rng = seedrandom(seed);
// Use the keys from the submissions object // Use the keys from the submissions object
const randomKeys = availableKeys.sort(() => rng() - 0.5).slice(0, numberOfNodes); const randomKeys = availableKeys.sort(() => rng() - 0.5).slice(0, numberOfNodes);
console.log("Random keys:", randomKeys); console.log("Random keys:", randomKeys);
return randomKeys; return randomKeys;
} }

View File

@ -3,38 +3,45 @@ import { getFile } from "./ipfs";
import { Submission } from "@_koii/namespace-wrapper/dist/types"; import { Submission } from "@_koii/namespace-wrapper/dist/types";
import { Submitter } from "@_koii/task-manager/dist/types/global"; import { Submitter } from "@_koii/task-manager/dist/types/global";
import { namespaceWrapper } from "@_koii/namespace-wrapper"; import { namespaceWrapper } from "@_koii/namespace-wrapper";
export async function submissionJSONSignatureDecode({submission_value, submitterPublicKey, roundNumber}: {submission_value: string, submitterPublicKey: string, roundNumber: number}) { export async function submissionJSONSignatureDecode({
let submissionString; submission_value,
try { submitterPublicKey,
console.log("Getting file from IPFS", submission_value); roundNumber,
submissionString = await getFile(submission_value); }: {
console.log("submissionString", submissionString); submission_value: string;
} catch (error) { submitterPublicKey: string;
roundNumber: number;
console.log("error", error); }) {
console.error("INVALID SIGNATURE DATA"); let submissionString;
return null; try {
} console.log("Getting file from IPFS", submission_value);
// verify the signature of the submission submissionString = await getFile(submission_value);
const submission = JSON.parse(submissionString); console.log("submissionString", submissionString);
console.log("submission", submission); } catch (error) {
const signaturePayload = await namespaceWrapper.verifySignature(submission.signature, submitterPublicKey); console.log("error", error);
if (!signaturePayload.data) { console.error("INVALID SIGNATURE DATA");
console.error("INVALID SIGNATURE"); return null;
return null; }
} // verify the signature of the submission
const data = JSON.parse(signaturePayload.data); const submission = JSON.parse(submissionString);
console.log("signaturePayload", signaturePayload); console.log("submission", submission);
console.log("data", data); const signaturePayload = await namespaceWrapper.verifySignature(submission.signature, submitterPublicKey);
if ( if (!signaturePayload.data) {
data.taskId !== TASK_ID || console.error("INVALID SIGNATURE");
data.roundNumber !== roundNumber || return null;
data.stakingKey !== submitterPublicKey || }
!data.pubKey || const data = JSON.parse(signaturePayload.data);
!data.prUrl console.log("signaturePayload", signaturePayload);
) { console.log("data", data);
console.error("INVALID SIGNATURE DATA"); if (
return null; data.taskId !== TASK_ID ||
} data.roundNumber !== roundNumber ||
return data; data.stakingKey !== submitterPublicKey ||
} !data.pubKey ||
!data.prUrl
) {
console.error("INVALID SIGNATURE DATA");
return null;
}
return data;
}

View File

@ -5,105 +5,107 @@ import { actionMessage, errorMessage, middleServerUrl } from "../constant";
import { TASK_ID, namespaceWrapper } from "@_koii/namespace-wrapper"; import { TASK_ID, namespaceWrapper } from "@_koii/namespace-wrapper";
import { LogLevel } from "@_koii/namespace-wrapper/dist/types"; import { LogLevel } from "@_koii/namespace-wrapper/dist/types";
export async function task(){ export async function task() {
while (true) { while (true) {
try { try {
let requiredWorkResponse; let requiredWorkResponse;
const orcaClient = await getOrcaClient(); const orcaClient = await getOrcaClient();
// check if the env variable is valid // check if the env variable is valid
const stakingKeypair = await namespaceWrapper.getSubmitterAccount()!; const stakingKeypair = await namespaceWrapper.getSubmitterAccount()!;
const pubKey = await namespaceWrapper.getMainAccountPubkey(); const pubKey = await namespaceWrapper.getMainAccountPubkey();
if (!orcaClient || !stakingKeypair || !pubKey) { if (!orcaClient || !stakingKeypair || !pubKey) {
await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.NO_ORCA_CLIENT, actionMessage.NO_ORCA_CLIENT); await namespaceWrapper.logMessage(LogLevel.Error, errorMessage.NO_ORCA_CLIENT, actionMessage.NO_ORCA_CLIENT);
// Wait for 1 minute before retrying // Wait for 1 minute before retrying
await new Promise(resolve => setTimeout(resolve, 60000)); await new Promise((resolve) => setTimeout(resolve, 60000));
continue; continue;
} }
const stakingKey = stakingKeypair.publicKey.toBase58(); const stakingKey = stakingKeypair.publicKey.toBase58();
/****************** All these issues need to be generate a markdown file ******************/ /****************** All these issues need to be generate a markdown file ******************/
const signature = await namespaceWrapper.payloadSigning( const signature = await namespaceWrapper.payloadSigning(
{ {
taskId: TASK_ID, taskId: TASK_ID,
// roundNumber: roundNumber, // roundNumber: roundNumber,
action: "fetch-todo", action: "fetch-todo",
githubUsername: stakingKey, githubUsername: stakingKey,
stakingKey: stakingKey, stakingKey: stakingKey,
}, },
stakingKeypair.secretKey, stakingKeypair.secretKey,
); );
const retryDelay = 10000; // 10 seconds in milliseconds
while (true) { const retryDelay = 10000; // 10 seconds in milliseconds
requiredWorkResponse = await fetch(`${middleServerUrl}/summarizer/worker/fetch-todo`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ signature: signature, stakingKey: stakingKey }),
});
if (requiredWorkResponse.status === 200) { while (true) {
break; requiredWorkResponse = await fetch(`${middleServerUrl}/summarizer/worker/fetch-todo`, {
} method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ signature: signature, stakingKey: stakingKey }),
});
console.log(`[TASK] Server returned status ${requiredWorkResponse.status}, retrying in ${retryDelay/1000} seconds...`); if (requiredWorkResponse.status === 200) {
await new Promise(resolve => setTimeout(resolve, retryDelay)); break;
}
// check if the response is 200 after all retries
if (!requiredWorkResponse || requiredWorkResponse.status !== 200) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.NO_ISSUES_PENDING_TO_BE_SUMMARIZED);
return;
}
const requiredWorkResponseData = await requiredWorkResponse.json();
console.log("[TASK] requiredWorkResponseData: ", requiredWorkResponseData);
// const uuid = uuidv4();
const alreadyAssigned = await namespaceWrapper.storeGet(JSON.stringify(requiredWorkResponseData.data.id));
if (alreadyAssigned) {
return;
}else{
await namespaceWrapper.storeSet(JSON.stringify(requiredWorkResponseData.data.id), "initialized");
}
const podcallPayload = {
taskId: TASK_ID,
};
const podCallSignature = await namespaceWrapper.payloadSigning(podcallPayload, stakingKeypair.secretKey);
const jsonBody = {
task_id: TASK_ID,
swarmBountyId: requiredWorkResponseData.data.id,
repo_url: `https://github.com/${requiredWorkResponseData.data.repo_owner}/${requiredWorkResponseData.data.repo_name}`,
podcall_signature: podCallSignature,
};
console.log("[TASK] jsonBody: ", jsonBody);
try {
const repoSummaryResponse = await orcaClient.podCall(`worker-task`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(jsonBody),
});
console.log("[TASK] repoSummaryResponse: ", repoSummaryResponse);
if (repoSummaryResponse.status !== 200) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_SUMMARIZATION_FAILED);
}
} catch (error) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_SUMMARIZATION_FAILED);
console.error("[TASK] EXECUTE TASK ERROR:", error);
}
} catch (error) {
console.error("[TASK] EXECUTE TASK ERROR:", error);
// Wait for 1 minute before retrying on error
await new Promise(resolve => setTimeout(resolve, 60000));
} }
// Wait for 1 minute before starting the next iteration console.log(
await new Promise(resolve => setTimeout(resolve, 60000)); `[TASK] Server returned status ${requiredWorkResponse.status}, retrying in ${retryDelay / 1000} seconds...`,
);
await new Promise((resolve) => setTimeout(resolve, retryDelay));
}
// check if the response is 200 after all retries
if (!requiredWorkResponse || requiredWorkResponse.status !== 200) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.NO_ISSUES_PENDING_TO_BE_SUMMARIZED);
return;
}
const requiredWorkResponseData = await requiredWorkResponse.json();
console.log("[TASK] requiredWorkResponseData: ", requiredWorkResponseData);
// const uuid = uuidv4();
const alreadyAssigned = await namespaceWrapper.storeGet(JSON.stringify(requiredWorkResponseData.data.id));
if (alreadyAssigned) {
return;
} else {
await namespaceWrapper.storeSet(JSON.stringify(requiredWorkResponseData.data.id), "initialized");
}
const podcallPayload = {
taskId: TASK_ID,
};
const podCallSignature = await namespaceWrapper.payloadSigning(podcallPayload, stakingKeypair.secretKey);
const jsonBody = {
task_id: TASK_ID,
swarmBountyId: requiredWorkResponseData.data.id,
repo_url: `https://github.com/${requiredWorkResponseData.data.repo_owner}/${requiredWorkResponseData.data.repo_name}`,
podcall_signature: podCallSignature,
};
console.log("[TASK] jsonBody: ", jsonBody);
try {
const repoSummaryResponse = await orcaClient.podCall(`worker-task`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(jsonBody),
});
console.log("[TASK] repoSummaryResponse: ", repoSummaryResponse);
if (repoSummaryResponse.status !== 200) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_SUMMARIZATION_FAILED);
}
} catch (error) {
// await namespaceWrapper.storeSet(`result-${roundNumber}`, status.ISSUE_SUMMARIZATION_FAILED);
console.error("[TASK] EXECUTE TASK ERROR:", error);
}
} catch (error) {
console.error("[TASK] EXECUTE TASK ERROR:", error);
// Wait for 1 minute before retrying on error
await new Promise((resolve) => setTimeout(resolve, 60000));
} }
}
// Wait for 1 minute before starting the next iteration
await new Promise((resolve) => setTimeout(resolve, 60000));
}
}

View File

@ -19,11 +19,13 @@ tests/
## Prerequisites ## Prerequisites
1. Install the test framework: 1. Install the test framework:
```bash ```bash
pip install -e test-framework/ pip install -e test-framework/
``` ```
2. Set up environment variables in `.env`: 2. Set up environment variables in `.env`:
``` ```
ANTHROPIC_API_KEY=your_test_key ANTHROPIC_API_KEY=your_test_key
GITHUB_USERNAME=your_test_username GITHUB_USERNAME=your_test_username
@ -47,12 +49,15 @@ python -m tests.e2e --reset
## Test Flow ## Test Flow
1. API Key Validation 1. API Key Validation
- Validates Anthropic API key - Validates Anthropic API key
2. GitHub Validation 2. GitHub Validation
- Validates GitHub credentials - Validates GitHub credentials
3. Todo Management 3. Todo Management
- Fetches todos for each worker - Fetches todos for each worker
- Generates summaries - Generates summaries
- Submits results - Submits results

View File

@ -1,12 +1,8 @@
import "dotenv/config"; import "dotenv/config";
export const TASK_ID = export const TASK_ID = process.env.TASK_ID || "BXbYKFdXZhQgEaMFbeShaisQBYG1FD4MiSf9gg4n6mVn";
process.env.TASK_ID || "BXbYKFdXZhQgEaMFbeShaisQBYG1FD4MiSf9gg4n6mVn"; export const WEBPACKED_FILE_PATH = process.env.WEBPACKED_FILE_PATH || "../dist/main.js";
export const WEBPACKED_FILE_PATH =
process.env.WEBPACKED_FILE_PATH || "../dist/main.js";
const envKeywords = process.env.TEST_KEYWORDS ?? ""; const envKeywords = process.env.TEST_KEYWORDS ?? "";
export const TEST_KEYWORDS = envKeywords export const TEST_KEYWORDS = envKeywords ? envKeywords.split(",") : ["TEST", "EZ TESTING"];
? envKeywords.split(",")
: ["TEST", "EZ TESTING"];

View File

@ -1,188 +1,188 @@
import { initializeTaskManager, taskRunner } from "@_koii/task-manager"; import { initializeTaskManager, taskRunner } from "@_koii/task-manager";
import { setup } from "../src/task/0-setup"; import { setup } from "../src/task/0-setup";
import { task } from "../src/task/1-task"; import { task } from "../src/task/1-task";
import { submission } from "../src/task/2-submission"; import { submission } from "../src/task/2-submission";
import { audit } from "../src/task/3-audit"; import { audit } from "../src/task/3-audit";
import { distribution } from "../src/task/4-distribution"; import { distribution } from "../src/task/4-distribution";
import { routes } from "../src/task/5-routes"; import { routes } from "../src/task/5-routes";
import { namespaceWrapper, _server } from "@_koii/task-manager/namespace-wrapper"; import { namespaceWrapper, _server } from "@_koii/task-manager/namespace-wrapper";
import Joi from "joi"; import Joi from "joi";
import axios from "axios"; import axios from "axios";
import { Submitter } from "@_koii/task-manager"; import { Submitter } from "@_koii/task-manager";
beforeAll(async () => { beforeAll(async () => {
await namespaceWrapper.defaultTaskSetup(); await namespaceWrapper.defaultTaskSetup();
initializeTaskManager({ initializeTaskManager({
setup, setup,
task, task,
submission, submission,
audit, audit,
distribution, distribution,
routes, routes,
}); });
}); });
describe("Performing the task", () => { describe("Performing the task", () => {
it("should performs the core logic task", async () => { it("should performs the core logic task", async () => {
const round = 1; const round = 1;
await taskRunner.task(round); await taskRunner.task(round);
const value = await namespaceWrapper.storeGet("value"); const value = await namespaceWrapper.storeGet("value");
expect(value).toBeDefined(); expect(value).toBeDefined();
expect(value).not.toBeNull(); expect(value).not.toBeNull();
}); });
it("should make the submission to k2 for dummy round 1", async () => { it("should make the submission to k2 for dummy round 1", async () => {
const round = 1; const round = 1;
await taskRunner.submitTask(round); await taskRunner.submitTask(round);
const taskState = await namespaceWrapper.getTaskState({}); const taskState = await namespaceWrapper.getTaskState({});
const schema = Joi.object() const schema = Joi.object()
.pattern( .pattern(
Joi.string(), Joi.string(),
Joi.object().pattern( Joi.object().pattern(
Joi.string(), Joi.string(),
Joi.object({ Joi.object({
submission_value: Joi.string().required(), submission_value: Joi.string().required(),
slot: Joi.number().integer().required(), slot: Joi.number().integer().required(),
round: Joi.number().integer().required(), round: Joi.number().integer().required(),
}), }),
), ),
) )
.required() .required()
.min(1); .min(1);
const validationResult = schema.validate(taskState?.submissions); const validationResult = schema.validate(taskState?.submissions);
try { try {
expect(validationResult.error).toBeUndefined(); expect(validationResult.error).toBeUndefined();
} catch (e) { } catch (e) {
throw new Error("Submission doesn't exist or is incorrect"); throw new Error("Submission doesn't exist or is incorrect");
} }
}); });
it("should make an audit on submission", async () => { it("should make an audit on submission", async () => {
const round = 1; const round = 1;
await taskRunner.auditTask(round); await taskRunner.auditTask(round);
const taskState = await namespaceWrapper.getTaskState({}); const taskState = await namespaceWrapper.getTaskState({});
console.log("TASK STATE", taskState); console.log("TASK STATE", taskState);
console.log("audit task", taskState?.submissions_audit_trigger); console.log("audit task", taskState?.submissions_audit_trigger);
const schema = Joi.object() const schema = Joi.object()
.pattern( .pattern(
Joi.string(), Joi.string(),
Joi.object().pattern( Joi.object().pattern(
Joi.string(), Joi.string(),
Joi.object({ Joi.object({
trigger_by: Joi.string().required(), trigger_by: Joi.string().required(),
slot: Joi.number().integer().required(), slot: Joi.number().integer().required(),
votes: Joi.array().required(), votes: Joi.array().required(),
}), }),
), ),
) )
.required(); .required();
const validationResult = schema.validate(taskState?.submissions_audit_trigger); const validationResult = schema.validate(taskState?.submissions_audit_trigger);
try { try {
expect(validationResult.error).toBeUndefined(); expect(validationResult.error).toBeUndefined();
} catch (e) { } catch (e) {
throw new Error("Submission audit is incorrect"); throw new Error("Submission audit is incorrect");
} }
}); });
it("should make the distribution submission to k2 for dummy round 1", async () => { it("should make the distribution submission to k2 for dummy round 1", async () => {
const round = 1; const round = 1;
await taskRunner.submitDistributionList(round); await taskRunner.submitDistributionList(round);
const taskState = await namespaceWrapper.getTaskState({}); const taskState = await namespaceWrapper.getTaskState({});
const schema = Joi.object() const schema = Joi.object()
.pattern( .pattern(
Joi.string(), Joi.string(),
Joi.object().pattern( Joi.object().pattern(
Joi.string(), Joi.string(),
Joi.object({ Joi.object({
submission_value: Joi.string().required(), submission_value: Joi.string().required(),
slot: Joi.number().integer().required(), slot: Joi.number().integer().required(),
round: Joi.number().integer().required(), round: Joi.number().integer().required(),
}), }),
), ),
) )
.required() .required()
.min(1); .min(1);
console.log("Distribution submission", taskState?.distribution_rewards_submission); console.log("Distribution submission", taskState?.distribution_rewards_submission);
const validationResult = schema.validate(taskState?.distribution_rewards_submission); const validationResult = schema.validate(taskState?.distribution_rewards_submission);
try { try {
expect(validationResult.error).toBeUndefined(); expect(validationResult.error).toBeUndefined();
} catch (e) { } catch (e) {
throw new Error("Distribution submission doesn't exist or is incorrect"); throw new Error("Distribution submission doesn't exist or is incorrect");
} }
}); });
it("should make an audit on distribution submission", async () => { it("should make an audit on distribution submission", async () => {
const round = 1; const round = 1;
await taskRunner.auditDistribution(round); await taskRunner.auditDistribution(round);
const taskState = await namespaceWrapper.getTaskState({}); const taskState = await namespaceWrapper.getTaskState({});
console.log("audit task", taskState?.distributions_audit_trigger); console.log("audit task", taskState?.distributions_audit_trigger);
const schema = Joi.object() const schema = Joi.object()
.pattern( .pattern(
Joi.string(), Joi.string(),
Joi.object().pattern( Joi.object().pattern(
Joi.string(), Joi.string(),
Joi.object({ Joi.object({
trigger_by: Joi.string().required(), trigger_by: Joi.string().required(),
slot: Joi.number().integer().required(), slot: Joi.number().integer().required(),
votes: Joi.array().required(), votes: Joi.array().required(),
}), }),
), ),
) )
.required(); .required();
const validationResult = schema.validate(taskState?.distributions_audit_trigger); const validationResult = schema.validate(taskState?.distributions_audit_trigger);
try { try {
expect(validationResult.error).toBeUndefined(); expect(validationResult.error).toBeUndefined();
} catch (e) { } catch (e) {
throw new Error("Distribution audit is incorrect"); throw new Error("Distribution audit is incorrect");
} }
}); });
it("should make sure the submitted distribution list is valid", async () => { it("should make sure the submitted distribution list is valid", async () => {
const round = 1; const round = 1;
const distributionList = await namespaceWrapper.getDistributionList("", round); const distributionList = await namespaceWrapper.getDistributionList("", round);
console.log("Generated distribution List", JSON.parse(distributionList.toString())); console.log("Generated distribution List", JSON.parse(distributionList.toString()));
const schema = Joi.object().pattern(Joi.string().required(), Joi.number().integer().required()).required(); const schema = Joi.object().pattern(Joi.string().required(), Joi.number().integer().required()).required();
const validationResult = schema.validate(JSON.parse(distributionList.toString())); const validationResult = schema.validate(JSON.parse(distributionList.toString()));
console.log(validationResult); console.log(validationResult);
try { try {
expect(validationResult.error).toBeUndefined(); expect(validationResult.error).toBeUndefined();
} catch (e) { } catch (e) {
throw new Error("Submitted distribution list is not valid"); throw new Error("Submitted distribution list is not valid");
} }
}); });
it("should test the endpoint", async () => { it("should test the endpoint", async () => {
const response = await axios.get("http://localhost:3000"); const response = await axios.get("http://localhost:3000");
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.data).toEqual({ message: "Running", status: 200 }); expect(response.data).toEqual({ message: "Running", status: 200 });
}); });
it("should generate a empty distribution list when submission is 0", async () => { it("should generate a empty distribution list when submission is 0", async () => {
const submitters: Submitter[] = []; const submitters: Submitter[] = [];
const bounty = Math.floor(Math.random() * 1e15) + 1; const bounty = Math.floor(Math.random() * 1e15) + 1;
const roundNumber = Math.floor(Math.random() * 1e5) + 1; const roundNumber = Math.floor(Math.random() * 1e5) + 1;
const distributionList = await distribution(submitters, bounty, roundNumber); const distributionList = await distribution(submitters, bounty, roundNumber);
expect(distributionList).toEqual({}); expect(distributionList).toEqual({});
}); });
it("should generate a distribution list contains all the submitters", async () => { it("should generate a distribution list contains all the submitters", async () => {
const simulatedSubmitters = 5; const simulatedSubmitters = 5;
const submitters: Submitter[] = []; const submitters: Submitter[] = [];
// 10k is the rough maximum number of submitters // 10k is the rough maximum number of submitters
for (let i = 0; i < simulatedSubmitters; i++) { for (let i = 0; i < simulatedSubmitters; i++) {
const publicKey = `mockPublicKey${i}`; const publicKey = `mockPublicKey${i}`;
submitters.push({ submitters.push({
publicKey, publicKey,
votes: Math.floor(Math.random() * simulatedSubmitters) - 5000, votes: Math.floor(Math.random() * simulatedSubmitters) - 5000,
stake: Math.floor(Math.random() * 1e9) + 1, stake: Math.floor(Math.random() * 1e9) + 1,
}); });
} }
const bounty = Math.floor(Math.random() * 1e15) + 1; const bounty = Math.floor(Math.random() * 1e15) + 1;
const roundNumber = 1; const roundNumber = 1;
const distributionList = await distribution(submitters, bounty, roundNumber); const distributionList = await distribution(submitters, bounty, roundNumber);
expect(Object.keys(distributionList).length).toBe(submitters.length); expect(Object.keys(distributionList).length).toBe(submitters.length);
expect(Object.keys(distributionList).sort()).toEqual(submitters.map((submitter) => submitter.publicKey).sort()); expect(Object.keys(distributionList).sort()).toEqual(submitters.map((submitter) => submitter.publicKey).sort());
}); });
}); });
afterAll(async () => { afterAll(async () => {
_server.close(); _server.close();
}); });

View File

@ -1,84 +1,84 @@
import { taskRunner } from "@_koii/task-manager"; import { taskRunner } from "@_koii/task-manager";
import "../src/index.js"; import "../src/index.js";
import { namespaceWrapper } from "@_koii/task-manager/namespace-wrapper"; import { namespaceWrapper } from "@_koii/task-manager/namespace-wrapper";
import { Keypair } from "@_koii/web3.js"; import { Keypair } from "@_koii/web3.js";
const numRounds = parseInt(process.argv[2]) || 1; const numRounds = parseInt(process.argv[2]) || 1;
const roundDelay = parseInt(process.argv[3]) || 5000; const roundDelay = parseInt(process.argv[3]) || 5000;
const functionDelay = parseInt(process.argv[4]) || 1000; const functionDelay = parseInt(process.argv[4]) || 1000;
let TASK_TIMES: number[] = []; let TASK_TIMES: number[] = [];
let SUBMISSION_TIMES: number[] = []; let SUBMISSION_TIMES: number[] = [];
let AUDIT_TIMES: number[] = []; let AUDIT_TIMES: number[] = [];
function sleep(ms: number) { function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms)); return new Promise((resolve) => setTimeout(resolve, ms));
} }
async function executeTasks() { async function executeTasks() {
const keypair = Keypair.generate(); const keypair = Keypair.generate();
await namespaceWrapper.stakeOnChain(keypair.publicKey, keypair, keypair.publicKey, 10000); await namespaceWrapper.stakeOnChain(keypair.publicKey, keypair, keypair.publicKey, 10000);
for (let round = 0; round < numRounds; round++) { for (let round = 0; round < numRounds; round++) {
const taskStartTime = Date.now(); const taskStartTime = Date.now();
await taskRunner.task(round); await taskRunner.task(round);
const taskEndTime = Date.now(); const taskEndTime = Date.now();
TASK_TIMES.push(taskEndTime - taskStartTime); TASK_TIMES.push(taskEndTime - taskStartTime);
await sleep(functionDelay); await sleep(functionDelay);
const taskSubmissionStartTime = Date.now(); const taskSubmissionStartTime = Date.now();
await taskRunner.submitTask(round); await taskRunner.submitTask(round);
const taskSubmissionEndTime = Date.now(); const taskSubmissionEndTime = Date.now();
SUBMISSION_TIMES.push(taskSubmissionEndTime - taskSubmissionStartTime); SUBMISSION_TIMES.push(taskSubmissionEndTime - taskSubmissionStartTime);
await sleep(functionDelay); await sleep(functionDelay);
const auditStartTime = Date.now(); const auditStartTime = Date.now();
await taskRunner.auditTask(round); await taskRunner.auditTask(round);
const auditEndTime = Date.now(); const auditEndTime = Date.now();
AUDIT_TIMES.push(auditEndTime - auditStartTime); AUDIT_TIMES.push(auditEndTime - auditStartTime);
await sleep(functionDelay); await sleep(functionDelay);
await taskRunner.selectAndGenerateDistributionList(round); await taskRunner.selectAndGenerateDistributionList(round);
await sleep(functionDelay); await sleep(functionDelay);
await taskRunner.auditDistribution(round); await taskRunner.auditDistribution(round);
if (round < numRounds - 1) { if (round < numRounds - 1) {
await sleep(roundDelay); await sleep(roundDelay);
} }
} }
console.log("TIME METRICS BELOW"); console.log("TIME METRICS BELOW");
function metrics(name: string, times: number[]) { function metrics(name: string, times: number[]) {
const average = (arr: number[]) => arr.reduce((a, b) => a + b, 0) / arr.length; const average = (arr: number[]) => arr.reduce((a, b) => a + b, 0) / arr.length;
const formatTime = (ms: number) => (ms / 1000).toFixed(4); const formatTime = (ms: number) => (ms / 1000).toFixed(4);
const formatSlot = (ms: number) => Math.ceil(ms / 408); const formatSlot = (ms: number) => Math.ceil(ms / 408);
const min = Math.min(...times); const min = Math.min(...times);
const max = Math.max(...times); const max = Math.max(...times);
const avg = average(times); const avg = average(times);
const timeMin = formatTime(min); const timeMin = formatTime(min);
const timeMax = formatTime(max); const timeMax = formatTime(max);
const timeAvg = formatTime(avg); const timeAvg = formatTime(avg);
const slotMin = formatSlot(min); const slotMin = formatSlot(min);
const slotMax = formatSlot(max); const slotMax = formatSlot(max);
const slotAvg = formatSlot(avg); const slotAvg = formatSlot(avg);
return { return {
Metric: `SIMULATED ${name} WINDOW`, Metric: `SIMULATED ${name} WINDOW`,
"Avg Time (s)": timeAvg, "Avg Time (s)": timeAvg,
"Avg Slots": slotAvg, "Avg Slots": slotAvg,
"Min Time (s)": timeMin, "Min Time (s)": timeMin,
"Min Slots": slotMin, "Min Slots": slotMin,
"Max Time (s)": timeMax, "Max Time (s)": timeMax,
"Max Slots": slotMax, "Max Slots": slotMax,
}; };
} }
const timeMetrics = metrics("TASK", TASK_TIMES); const timeMetrics = metrics("TASK", TASK_TIMES);
const submissionMetrics = metrics("SUBMISSION", SUBMISSION_TIMES); const submissionMetrics = metrics("SUBMISSION", SUBMISSION_TIMES);
const auditMetrics = metrics("AUDIT", AUDIT_TIMES); const auditMetrics = metrics("AUDIT", AUDIT_TIMES);
console.table([timeMetrics, submissionMetrics, auditMetrics]); console.table([timeMetrics, submissionMetrics, auditMetrics]);
console.log("All tasks executed. Test completed."); console.log("All tasks executed. Test completed.");
process.exit(0); process.exit(0);
} }
setTimeout(executeTasks, 1500); setTimeout(executeTasks, 1500);

View File

@ -4,7 +4,7 @@
// headers: { // headers: {
// "Content-Type": "application/json", // "Content-Type": "application/json",
// }, // },
// body: JSON.stringify({ // body: JSON.stringify({
// text: `[TASK] Error summarizing issue:\n ${JSON.stringify({ // text: `[TASK] Error summarizing issue:\n ${JSON.stringify({
// status: "error", // status: "error",
// data: { // data: {
@ -16,4 +16,4 @@
// console.log("[TASK] slackResponse: ", slackResponse); // console.log("[TASK] slackResponse: ", slackResponse);
// } // }
// testSlackWebhook(); // testSlackWebhook();

File diff suppressed because it is too large Load Diff

View File

@ -1,225 +1,225 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */ /* eslint-disable */
/** /**
* @param {any} val * @param {any} val
* @returns {any} * @returns {any}
*/ */
export function bincode_js_deserialize(val: any): any; export function bincode_js_deserialize(val: any): any;
/** /**
* @param {any} val * @param {any} val
* @returns {any} * @returns {any}
*/ */
export function borsh_bpf_js_deserialize(val: any): any; export function borsh_bpf_js_deserialize(val: any): any;
/** /**
* Initialize Javascript logging and panic handler * Initialize Javascript logging and panic handler
*/ */
export function solana_program_init(): void; export function solana_program_init(): void;
/** /**
* A hash; the 32-byte output of a hashing algorithm. * A hash; the 32-byte output of a hashing algorithm.
* *
* This struct is used most often in `solana-sdk` and related crates to contain * This struct is used most often in `solana-sdk` and related crates to contain
* a [SHA-256] hash, but may instead contain a [blake3] hash, as created by the * a [SHA-256] hash, but may instead contain a [blake3] hash, as created by the
* [`blake3`] module (and used in [`Message::hash`]). * [`blake3`] module (and used in [`Message::hash`]).
* *
* [SHA-256]: https://en.wikipedia.org/wiki/SHA-2 * [SHA-256]: https://en.wikipedia.org/wiki/SHA-2
* [blake3]: https://github.com/BLAKE3-team/BLAKE3 * [blake3]: https://github.com/BLAKE3-team/BLAKE3
* [`blake3`]: crate::blake3 * [`blake3`]: crate::blake3
* [`Message::hash`]: crate::message::Message::hash * [`Message::hash`]: crate::message::Message::hash
*/ */
export class Hash { export class Hash {
free(): void; free(): void;
/** /**
* Create a new Hash object * Create a new Hash object
* *
* * `value` - optional hash as a base58 encoded string, `Uint8Array`, `[number]` * * `value` - optional hash as a base58 encoded string, `Uint8Array`, `[number]`
* @param {any} value * @param {any} value
*/ */
constructor(value: any); constructor(value: any);
/** /**
* Return the base58 string representation of the hash * Return the base58 string representation of the hash
* @returns {string} * @returns {string}
*/ */
toString(): string; toString(): string;
/** /**
* Checks if two `Hash`s are equal * Checks if two `Hash`s are equal
* @param {Hash} other * @param {Hash} other
* @returns {boolean} * @returns {boolean}
*/ */
equals(other: Hash): boolean; equals(other: Hash): boolean;
/** /**
* Return the `Uint8Array` representation of the hash * Return the `Uint8Array` representation of the hash
* @returns {Uint8Array} * @returns {Uint8Array}
*/ */
toBytes(): Uint8Array; toBytes(): Uint8Array;
} }
/** /**
* A directive for a single invocation of a Solana program. * A directive for a single invocation of a Solana program.
* *
* An instruction specifies which program it is calling, which accounts it may * An instruction specifies which program it is calling, which accounts it may
* read or modify, and additional data that serves as input to the program. One * read or modify, and additional data that serves as input to the program. One
* or more instructions are included in transactions submitted by Solana * or more instructions are included in transactions submitted by Solana
* clients. Instructions are also used to describe [cross-program * clients. Instructions are also used to describe [cross-program
* invocations][cpi]. * invocations][cpi].
* *
* [cpi]: https://docs.solana.com/developing/programming-model/calling-between-programs * [cpi]: https://docs.solana.com/developing/programming-model/calling-between-programs
* *
* During execution, a program will receive a list of account data as one of * During execution, a program will receive a list of account data as one of
* its arguments, in the same order as specified during `Instruction` * its arguments, in the same order as specified during `Instruction`
* construction. * construction.
* *
* While Solana is agnostic to the format of the instruction data, it has * While Solana is agnostic to the format of the instruction data, it has
* built-in support for serialization via [`borsh`] and [`bincode`]. * built-in support for serialization via [`borsh`] and [`bincode`].
* *
* [`borsh`]: https://docs.rs/borsh/latest/borsh/ * [`borsh`]: https://docs.rs/borsh/latest/borsh/
* [`bincode`]: https://docs.rs/bincode/latest/bincode/ * [`bincode`]: https://docs.rs/bincode/latest/bincode/
* *
* # Specifying account metadata * # Specifying account metadata
* *
* When constructing an [`Instruction`], a list of all accounts that may be * When constructing an [`Instruction`], a list of all accounts that may be
* read or written during the execution of that instruction must be supplied as * read or written during the execution of that instruction must be supplied as
* [`AccountMeta`] values. * [`AccountMeta`] values.
* *
* Any account whose data may be mutated by the program during execution must * Any account whose data may be mutated by the program during execution must
* be specified as writable. During execution, writing to an account that was * be specified as writable. During execution, writing to an account that was
* not specified as writable will cause the transaction to fail. Writing to an * not specified as writable will cause the transaction to fail. Writing to an
* account that is not owned by the program will cause the transaction to fail. * account that is not owned by the program will cause the transaction to fail.
* *
* Any account whose lamport balance may be mutated by the program during * Any account whose lamport balance may be mutated by the program during
* execution must be specified as writable. During execution, mutating the * execution must be specified as writable. During execution, mutating the
* lamports of an account that was not specified as writable will cause the * lamports of an account that was not specified as writable will cause the
* transaction to fail. While _subtracting_ lamports from an account not owned * transaction to fail. While _subtracting_ lamports from an account not owned
* by the program will cause the transaction to fail, _adding_ lamports to any * by the program will cause the transaction to fail, _adding_ lamports to any
* account is allowed, as long is it is mutable. * account is allowed, as long is it is mutable.
* *
* Accounts that are not read or written by the program may still be specified * Accounts that are not read or written by the program may still be specified
* in an `Instruction`'s account list. These will affect scheduling of program * in an `Instruction`'s account list. These will affect scheduling of program
* execution by the runtime, but will otherwise be ignored. * execution by the runtime, but will otherwise be ignored.
* *
* When building a transaction, the Solana runtime coalesces all accounts used * When building a transaction, the Solana runtime coalesces all accounts used
* by all instructions in that transaction, along with accounts and permissions * by all instructions in that transaction, along with accounts and permissions
* required by the runtime, into a single account list. Some accounts and * required by the runtime, into a single account list. Some accounts and
* account permissions required by the runtime to process a transaction are * account permissions required by the runtime to process a transaction are
* _not_ required to be included in an `Instruction`s account list. These * _not_ required to be included in an `Instruction`s account list. These
* include: * include:
* *
* - The program ID &mdash; it is a separate field of `Instruction` * - The program ID &mdash; it is a separate field of `Instruction`
* - The transaction's fee-paying account &mdash; it is added during [`Message`] * - The transaction's fee-paying account &mdash; it is added during [`Message`]
* construction. A program may still require the fee payer as part of the * construction. A program may still require the fee payer as part of the
* account list if it directly references it. * account list if it directly references it.
* *
* [`Message`]: crate::message::Message * [`Message`]: crate::message::Message
* *
* Programs may require signatures from some accounts, in which case they * Programs may require signatures from some accounts, in which case they
* should be specified as signers during `Instruction` construction. The * should be specified as signers during `Instruction` construction. The
* program must still validate during execution that the account is a signer. * program must still validate during execution that the account is a signer.
*/ */
export class Instruction { export class Instruction {
free(): void; free(): void;
} }
/** /**
*/ */
export class Instructions { export class Instructions {
free(): void; free(): void;
/** /**
*/ */
constructor(); constructor();
/** /**
* @param {Instruction} instruction * @param {Instruction} instruction
*/ */
push(instruction: Instruction): void; push(instruction: Instruction): void;
} }
/** /**
* A Solana transaction message (legacy). * A Solana transaction message (legacy).
* *
* See the [`message`] module documentation for further description. * See the [`message`] module documentation for further description.
* *
* [`message`]: crate::message * [`message`]: crate::message
* *
* Some constructors accept an optional `payer`, the account responsible for * Some constructors accept an optional `payer`, the account responsible for
* paying the cost of executing a transaction. In most cases, callers should * paying the cost of executing a transaction. In most cases, callers should
* specify the payer explicitly in these constructors. In some cases though, * specify the payer explicitly in these constructors. In some cases though,
* the caller is not _required_ to specify the payer, but is still allowed to: * the caller is not _required_ to specify the payer, but is still allowed to:
* in the `Message` structure, the first account is always the fee-payer, so if * in the `Message` structure, the first account is always the fee-payer, so if
* the caller has knowledge that the first account of the constructed * the caller has knowledge that the first account of the constructed
* transaction's `Message` is both a signer and the expected fee-payer, then * transaction's `Message` is both a signer and the expected fee-payer, then
* redundantly specifying the fee-payer is not strictly required. * redundantly specifying the fee-payer is not strictly required.
*/ */
export class Message { export class Message {
free(): void; free(): void;
/** /**
* The id of a recent ledger entry. * The id of a recent ledger entry.
*/ */
recent_blockhash: Hash; recent_blockhash: Hash;
} }
/** /**
* The address of a [Solana account][acc]. * The address of a [Solana account][acc].
* *
* Some account addresses are [ed25519] public keys, with corresponding secret * Some account addresses are [ed25519] public keys, with corresponding secret
* keys that are managed off-chain. Often, though, account addresses do not * keys that are managed off-chain. Often, though, account addresses do not
* have corresponding secret keys &mdash; as with [_program derived * have corresponding secret keys &mdash; as with [_program derived
* addresses_][pdas] &mdash; or the secret key is not relevant to the operation * addresses_][pdas] &mdash; or the secret key is not relevant to the operation
* of a program, and may have even been disposed of. As running Solana programs * of a program, and may have even been disposed of. As running Solana programs
* can not safely create or manage secret keys, the full [`Keypair`] is not * can not safely create or manage secret keys, the full [`Keypair`] is not
* defined in `solana-program` but in `solana-sdk`. * defined in `solana-program` but in `solana-sdk`.
* *
* [acc]: https://docs.solana.com/developing/programming-model/accounts * [acc]: https://docs.solana.com/developing/programming-model/accounts
* [ed25519]: https://ed25519.cr.yp.to/ * [ed25519]: https://ed25519.cr.yp.to/
* [pdas]: https://docs.solana.com/developing/programming-model/calling-between-programs#program-derived-addresses * [pdas]: https://docs.solana.com/developing/programming-model/calling-between-programs#program-derived-addresses
* [`Keypair`]: https://docs.rs/solana-sdk/latest/solana_sdk/signer/keypair/struct.Keypair.html * [`Keypair`]: https://docs.rs/solana-sdk/latest/solana_sdk/signer/keypair/struct.Keypair.html
*/ */
export class Pubkey { export class Pubkey {
free(): void; free(): void;
/** /**
* Create a new Pubkey object * Create a new Pubkey object
* *
* * `value` - optional public key as a base58 encoded string, `Uint8Array`, `[number]` * * `value` - optional public key as a base58 encoded string, `Uint8Array`, `[number]`
* @param {any} value * @param {any} value
*/ */
constructor(value: any); constructor(value: any);
/** /**
* Return the base58 string representation of the public key * Return the base58 string representation of the public key
* @returns {string} * @returns {string}
*/ */
toString(): string; toString(): string;
/** /**
* Check if a `Pubkey` is on the ed25519 curve. * Check if a `Pubkey` is on the ed25519 curve.
* @returns {boolean} * @returns {boolean}
*/ */
isOnCurve(): boolean; isOnCurve(): boolean;
/** /**
* Checks if two `Pubkey`s are equal * Checks if two `Pubkey`s are equal
* @param {Pubkey} other * @param {Pubkey} other
* @returns {boolean} * @returns {boolean}
*/ */
equals(other: Pubkey): boolean; equals(other: Pubkey): boolean;
/** /**
* Return the `Uint8Array` representation of the public key * Return the `Uint8Array` representation of the public key
* @returns {Uint8Array} * @returns {Uint8Array}
*/ */
toBytes(): Uint8Array; toBytes(): Uint8Array;
/** /**
* Derive a Pubkey from another Pubkey, string seed, and a program id * Derive a Pubkey from another Pubkey, string seed, and a program id
* @param {Pubkey} base * @param {Pubkey} base
* @param {string} seed * @param {string} seed
* @param {Pubkey} owner * @param {Pubkey} owner
* @returns {Pubkey} * @returns {Pubkey}
*/ */
static createWithSeed(base: Pubkey, seed: string, owner: Pubkey): Pubkey; static createWithSeed(base: Pubkey, seed: string, owner: Pubkey): Pubkey;
/** /**
* Derive a program address from seeds and a program id * Derive a program address from seeds and a program id
* @param {any[]} seeds * @param {any[]} seeds
* @param {Pubkey} program_id * @param {Pubkey} program_id
* @returns {Pubkey} * @returns {Pubkey}
*/ */
static createProgramAddress(seeds: any[], program_id: Pubkey): Pubkey; static createProgramAddress(seeds: any[], program_id: Pubkey): Pubkey;
/** /**
* Find a valid program address * Find a valid program address
* *
* Returns: * Returns:
* * `[PubKey, number]` - the program address and bump seed * * `[PubKey, number]` - the program address and bump seed
* @param {any[]} seeds * @param {any[]} seeds
* @param {Pubkey} program_id * @param {Pubkey} program_id
* @returns {any} * @returns {any}
*/ */
static findProgramAddress(seeds: any[], program_id: Pubkey): any; static findProgramAddress(seeds: any[], program_id: Pubkey): any;
} }

View File

@ -8,13 +8,37 @@ export function __wbg_get_message_recent_blockhash(a: number): number;
export function __wbg_set_message_recent_blockhash(a: number, b: number): void; export function __wbg_set_message_recent_blockhash(a: number, b: number): void;
export function solana_program_init(): void; export function solana_program_init(): void;
export function systeminstruction_createAccount(a: number, b: number, c: number, d: number, e: number): number; export function systeminstruction_createAccount(a: number, b: number, c: number, d: number, e: number): number;
export function systeminstruction_createAccountWithSeed(a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number): number; export function systeminstruction_createAccountWithSeed(
a: number,
b: number,
c: number,
d: number,
e: number,
f: number,
g: number,
h: number,
): number;
export function systeminstruction_assign(a: number, b: number): number; export function systeminstruction_assign(a: number, b: number): number;
export function systeminstruction_assignWithSeed(a: number, b: number, c: number, d: number, e: number): number; export function systeminstruction_assignWithSeed(a: number, b: number, c: number, d: number, e: number): number;
export function systeminstruction_transfer(a: number, b: number, c: number): number; export function systeminstruction_transfer(a: number, b: number, c: number): number;
export function systeminstruction_transferWithSeed(a: number, b: number, c: number, d: number, e: number, f: number, g: number): number; export function systeminstruction_transferWithSeed(
a: number,
b: number,
c: number,
d: number,
e: number,
f: number,
g: number,
): number;
export function systeminstruction_allocate(a: number, b: number): number; export function systeminstruction_allocate(a: number, b: number): number;
export function systeminstruction_allocateWithSeed(a: number, b: number, c: number, d: number, e: number, f: number): number; export function systeminstruction_allocateWithSeed(
a: number,
b: number,
c: number,
d: number,
e: number,
f: number,
): number;
export function systeminstruction_createNonceAccount(a: number, b: number, c: number, d: number): number; export function systeminstruction_createNonceAccount(a: number, b: number, c: number, d: number): number;
export function systeminstruction_advanceNonceAccount(a: number, b: number): number; export function systeminstruction_advanceNonceAccount(a: number, b: number): number;
export function systeminstruction_withdrawNonceAccount(a: number, b: number, c: number, d: number): number; export function systeminstruction_withdrawNonceAccount(a: number, b: number, c: number, d: number): number;

View File

@ -1,31 +1,31 @@
import path from 'path' import path from "path";
import Dotenv from 'dotenv-webpack' import Dotenv from "dotenv-webpack";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename); const __dirname = path.dirname(__filename);
export default { export default {
mode: "development", mode: "development",
entry: "./src/index.ts", entry: "./src/index.ts",
output: { output: {
filename: "main.js", filename: "main.js",
path: path.resolve(__dirname, "dist"), path: path.resolve(__dirname, "dist"),
libraryTarget: "commonjs2", libraryTarget: "commonjs2",
}, },
target: "node", target: "node",
resolve: { resolve: {
extensions: [".ts", ".js"], extensions: [".ts", ".js"],
}, },
module: { module: {
rules: [ rules: [
{ {
test: /\.ts$/, test: /\.ts$/,
use: "ts-loader", use: "ts-loader",
exclude: /node_modules/, exclude: /node_modules/,
}, },
], ],
}, },
devtool: "source-map", devtool: "source-map",
plugins: [new Dotenv()], plugins: [new Dotenv()],
}; };

View File

@ -11,14 +11,14 @@ export default {
filename: "main.js", filename: "main.js",
path: path.resolve(__dirname, "dist"), path: path.resolve(__dirname, "dist"),
libraryTarget: "commonjs2", libraryTarget: "commonjs2",
clean: true clean: true,
}, },
target: "node", target: "node",
resolve: { resolve: {
extensions: [".ts", ".js"] extensions: [".ts", ".js"],
}, },
module: { module: {
rules: [ rules: [
{ {
@ -26,16 +26,14 @@ export default {
use: { use: {
loader: "ts-loader", loader: "ts-loader",
options: { options: {
transpileOnly: true transpileOnly: true,
} },
}, },
exclude: /node_modules/ exclude: /node_modules/,
} },
] ],
}, },
plugins: [
], plugins: [],
devtool: "source-map", devtool: "source-map",
}; };