From 1aa5d98d461b04c10db13277c95fa733adf77d91 Mon Sep 17 00:00:00 2001 From: Gregor Balkovec Date: Wed, 4 Mar 2026 11:19:47 +0100 Subject: [PATCH 1/2] Upgrades langchain package to latest versions. Cursor also always runs linting which is why we have so many changed files....-.- --- .codex/review-prompt.md | 1 - .github/workflows/check-package-lock.yml | 14 +- .github/workflows/validate-plugin-tests.yml | 2 +- AGENTS.md | 3 + apps/agent/fix-uuid.js | 49 -- apps/agent/package.json | 17 +- apps/agent/src/app/(protected)/chat.tsx | 34 +- apps/agent/src/components/Chat/Input.tsx | 57 +- apps/agent/src/components/Chat/Messages.tsx | 39 +- apps/agent/src/components/Markdown.tsx | 13 +- apps/agent/src/server/scripts/setup.ts | 5 +- apps/agent/src/shared/chat.ts | 6 +- .../tests/e2e/utils/report-UI-Tests-config.js | 4 +- apps/agent/tests/integration/.mocharc.json | 8 +- .../performance/concurrent-operations.spec.ts | 17 +- .../tests/integration/setup/global-setup.ts | 4 +- .../integration/setup/mock-dkg-publisher.ts | 31 +- .../tests/integration/setup/redis-manager.ts | 10 +- .../dkg-publisher-api-contracts.spec.ts | 48 +- .../dkg-publisher-plugin-registration.spec.ts | 161 +++-- apps/agent/tests/ragas/evaluate.ts | 22 +- .../tests/ragas/scripts/insert_ragas_to_db.js | 14 +- apps/agent/tests/unit/chatInputHeight.spec.ts | 4 +- .../tests/unit/toolExecutionMode.spec.ts | 4 +- .../advanced-features-and-toolkits/README.md | 1 - .../dkg-paranets/README.md | 52 +- .../building-with-dkg-paranets.md | 24 +- .../dkg-paranets/deploying-a-dkg-paranet.md | 57 +- .../initial-paranet-offerings-ipos/README.md | 9 +- .../ipo-specification.md | 22 +- .../launching-your-ipo.md | 6 +- .../paranets-incentives-pool.md | 12 +- .../dkg-paranets/syncing-a-dkg-paranet.md | 5 +- .../dkg-sdk/README.md | 14 +- .../dkg-sdk/dkg-v8-js-client/README.md | 118 ++-- .../interact-with-dkg-paranets.md | 93 +-- .../knowledge-submission-and-curation.md | 63 +- ...paranets-incentives-pool-implementation.md | 37 +- .../dkg-v8-js-client/permissioned-paranets.md | 38 +- .../dkg-sdk/dkg-v8-py-client/README.md | 36 +- .../interact-with-dkg-paranets.md | 32 +- ...setting-up-your-development-environment.md | 15 +- .../querying-the-dkg.md | 18 +- .../build-a-dkg-node-ai-agent/architecture.md | 10 +- .../contributing-a-plugin.md | 87 ++- .../essentials-plugin.md | 79 +-- .../evaluating-agent-responses.md | 58 +- .../plugins/README.md | 1 - ...ur-custom-dkg-node-fork-and-update-flow.md | 22 +- .../bounties-and-rewards/README.md | 1 - .../code-contributions-and-v8-bug-bounty.md | 36 +- .../general-bug-bounty/README.md | 30 +- .../staking-security-bounty.md | 9 +- .../contribute/README.md | 28 +- ...elines-for-automated-test-contributions.md | 1 - .../delegated-staking/README.md | 30 +- .../delegated-staking/redelegating-stake.md | 13 +- .../delegated-staking/step-by-step-staking.md | 32 +- package-lock.json | 630 +++++++++--------- 59 files changed, 1200 insertions(+), 1086 deletions(-) delete mode 100644 apps/agent/fix-uuid.js diff --git a/.codex/review-prompt.md b/.codex/review-prompt.md index baca0a3d..53ab95f9 100644 --- a/.codex/review-prompt.md +++ b/.codex/review-prompt.md @@ -61,7 +61,6 @@ Every comment must be traceable to changed behavior in this PR and anchored to a #### Security - - Injection risks (SQL, command, XSS) when handling user input. - Hardcoded secrets — API keys, passwords, tokens in code. - Missing input validation at system boundaries (user input, external APIs). Not for internal function calls. diff --git a/.github/workflows/check-package-lock.yml b/.github/workflows/check-package-lock.yml index 5d42deba..008aba69 100644 --- a/.github/workflows/check-package-lock.yml +++ b/.github/workflows/check-package-lock.yml @@ -12,16 +12,16 @@ on: branches: - main paths: - - 'package.json' - - 'package-lock.json' - - '**/package.json' + - "package.json" + - "package-lock.json" + - "**/package.json" pull_request: branches: - "**" paths: - - 'package.json' - - 'package-lock.json' - - '**/package.json' + - "package.json" + - "package-lock.json" + - "**/package.json" jobs: verify-package-lock: @@ -55,7 +55,7 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v4 with: - node-version: '22' + node-version: "22" - name: Validate package-lock.json is valid and in sync run: npm ci --dry-run --ignore-scripts diff --git a/.github/workflows/validate-plugin-tests.yml b/.github/workflows/validate-plugin-tests.yml index 43c131b4..51d7b241 100644 --- a/.github/workflows/validate-plugin-tests.yml +++ b/.github/workflows/validate-plugin-tests.yml @@ -132,4 +132,4 @@ jobs: echo "✅ Core Functionality tests found" echo "✅ Error Handling tests found" echo "✅ All tests passing" - fi \ No newline at end of file + fi diff --git a/AGENTS.md b/AGENTS.md index d4453f6e..4aa73258 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -48,13 +48,16 @@ Before you consider a task "done", you must: # Task: [] ## Goal + - ## Subtasks + - [ ] - [ ] ## Notes + - ``` diff --git a/apps/agent/fix-uuid.js b/apps/agent/fix-uuid.js deleted file mode 100644 index 5ae44e35..00000000 --- a/apps/agent/fix-uuid.js +++ /dev/null @@ -1,49 +0,0 @@ -/** - * This is a postinstall script required for the project to work. - * Package 'uuid' that is required by @langchain/core is exporting - * the .mjs wrapper in a wrong way, unsupported by metro bundler - * that Expo is using. - * - * Hopefully this will be fixed in the next versions of uuid/langchain. - */ - -const fs = require("fs"); -const path = require("path"); - -async function fixUuidPackage(filePath) { - const f = await fs.promises.open(filePath, "r+"); - const buf = await f.readFile({ encoding: "utf8" }); - - let didFix = false; - if (buf.startsWith("import uuid from")) { - const newContent = - "import * as uuid from" + buf.substring("import uuid from".length); - - await f.truncate(); - await f.write(newContent, 0, "utf8"); - didFix = true; - } - - await f.close(); - return didFix; -} - -(async () => { - try { - const projectRoot = path.join(process.cwd(), "..", ".."); - const files = fs.promises.glob( - path.join("**", "node_modules", "**", "uuid", "wrapper.mjs"), - { cwd: projectRoot }, - ); - for await (const filePath of files) { - const fixed = await fixUuidPackage(path.join(projectRoot, filePath)); - if (fixed) { - console.log(`Fixed uuid package at '${filePath}' successfully.`); - } - } - process.exit(0); - } catch (error) { - console.error("Fixing uuid packages failed: ", error); - process.exit(1); - } -})(); diff --git a/apps/agent/package.json b/apps/agent/package.json index 9563affb..22f354ce 100644 --- a/apps/agent/package.json +++ b/apps/agent/package.json @@ -26,8 +26,7 @@ "test:ragas": "NODE_OPTIONS='--import tsx' tsx tests/ragas/evaluate.ts", "test:ragas:results": "NODE_OPTIONS='--import tsx' tsx tests/ragas/show-results.ts", "test:ragas:dashboard": "NODE_OPTIONS='--import tsx' tsx tests/ragas/dashboard.ts", - "ragas": "chmod +x tests/ragas/run-ragas.sh && tests/ragas/run-ragas.sh", - "postinstall": "node fix-uuid.js" + "ragas": "chmod +x tests/ragas/run-ragas.sh && tests/ragas/run-ragas.sh" }, "dependencies": { "@dkg/expo-forcegraph": "^0.0.0", @@ -39,13 +38,13 @@ "@expo-google-fonts/manrope": "^0.4.1", "@expo-google-fonts/space-grotesk": "^0.4.0", "@expo/vector-icons": "^14.1.0", - "@langchain/anthropic": "^0.3.28", - "@langchain/core": "^0.3.66", - "@langchain/google-genai": "^0.2.18", - "@langchain/groq": "^0.2.4", - "@langchain/mistralai": "^0.2.1", - "@langchain/openai": "^0.6.3", - "@langchain/xai": "^0.1.0", + "@langchain/anthropic": "^1.3.22", + "@langchain/core": "^1.1.30", + "@langchain/google-genai": "^2.1.24", + "@langchain/groq": "^1.1.4", + "@langchain/mistralai": "^1.0.7", + "@langchain/openai": "^1.2.12", + "@langchain/xai": "^1.3.8", "@modelcontextprotocol/sdk": "^1.16.0", "@node-rs/argon2": "^2.0.2", "@react-native-async-storage/async-storage": "2.1.2", diff --git a/apps/agent/src/app/(protected)/chat.tsx b/apps/agent/src/app/(protected)/chat.tsx index 7de7d1c7..bd6eb50b 100644 --- a/apps/agent/src/app/(protected)/chat.tsx +++ b/apps/agent/src/app/(protected)/chat.tsx @@ -118,7 +118,10 @@ export default function ChatPage() { const s = toToolExecutionSettings(mode); await settings.set("autoApproveMcpTools", s.autoApproveMcpTools); - await settings.set("showMcpToolExecutionPanels", s.showMcpToolExecutionPanels); + await settings.set( + "showMcpToolExecutionPanels", + s.showMcpToolExecutionPanels, + ); await settings.reload(); }, [settings, tools], @@ -180,8 +183,7 @@ export default function ChatPage() { continue; } - const existingId = - typeof tc.id === "string" ? tc.id.trim() : ""; + const existingId = typeof tc.id === "string" ? tc.id.trim() : ""; normalizedToolCalls.push({ ...tc, id: existingId || `local-tool-call-${localToolCallIdCounter.current++}`, @@ -507,8 +509,8 @@ export default function ChatPage() { parsedContent.metadata .at(0) ?.[ - "https://ontology.origintrail.io/dkg/1.0#publishTime" - ]?.at(0)?.["@value"] ?? Date.now(), + "https://ontology.origintrail.io/dkg/1.0#publishTime" + ]?.at(0)?.["@value"] ?? Date.now(), ).getTime(), txHash: parsedContent.metadata .at(0) @@ -540,7 +542,7 @@ export default function ChatPage() { parsedContent.metadata .at(0) ?.["https://ontology.origintrail.io/dkg/1.0#publishTx"]?.at(0)?.[ - "@value" + "@value" ] ?? "unknown"; resolved.publisher = parsedContent.metadata @@ -638,7 +640,7 @@ export default function ChatPage() { for (const c of toContents(m.content)) { if (c.type === "image_url") { - images.push({ uri: c.image_url }); + images.push({ uri: c.image_url as string }); continue; } @@ -658,7 +660,7 @@ export default function ChatPage() { continue; } - text.push(c.text); + text.push(c.text as string); } } @@ -667,7 +669,8 @@ export default function ChatPage() { const allToolCallsHidden = hasToolCalls && m.tool_calls!.every((tc) => { - const isAutoApproved = autoApproveTools || tools.isAllowedForSession(tc.name); + const isAutoApproved = + autoApproveTools || tools.isAllowedForSession(tc.name); return isAutoApproved && !showToolExecutionPanels; }); @@ -792,7 +795,10 @@ export default function ChatPage() { lastUserMessageYRef.current = y; if (scrollPendingRef.current) { scrollPendingRef.current = false; - scrollTargetRef.current = Math.max(0, y - SCROLL_TOP_GAP); + scrollTargetRef.current = Math.max( + 0, + y - SCROLL_TOP_GAP, + ); setContentMinHeight(y + messagesViewHeight); } }} @@ -804,11 +810,15 @@ export default function ChatPage() { return {messageContent}; })} - {isThinkingVisible(isGenerating, streamingContent) && } + {isThinkingVisible(isGenerating, streamingContent) && ( + + )} {streamingContent !== null && ( - {normalizeStreamingMarkdown(stripThinkTags(streamingContent))} + {normalizeStreamingMarkdown( + stripThinkTags(streamingContent), + )} )} diff --git a/apps/agent/src/components/Chat/Input.tsx b/apps/agent/src/components/Chat/Input.tsx index 24bdb8a5..063b94b7 100644 --- a/apps/agent/src/components/Chat/Input.tsx +++ b/apps/agent/src/components/Chat/Input.tsx @@ -104,11 +104,13 @@ export default function ChatInput({ const [isUploading, setIsUploading] = useState(false); const [isModeDropdownOpen, setIsModeDropdownOpen] = useState(false); const [inputHeight, setInputHeight] = useState(CHAT_INPUT_MIN_HEIGHT); - const [isCustomScrollbarVisible, setIsCustomScrollbarVisible] = useState(false); + const [isCustomScrollbarVisible, setIsCustomScrollbarVisible] = + useState(false); const [customScrollbarThumbTop, setCustomScrollbarThumbTop] = useState( SCROLLBAR_TRACK_INSET, ); - const [customScrollbarThumbHeight, setCustomScrollbarThumbHeight] = useState(0); + const [customScrollbarThumbHeight, setCustomScrollbarThumbHeight] = + useState(0); const hideScrollbarTimeoutRef = useRef | null>( null, ); @@ -192,10 +194,7 @@ export default function ChatInput({ return; } - const trackHeight = Math.max( - 0, - viewportHeight - SCROLLBAR_TRACK_INSET * 2, - ); + const trackHeight = Math.max(0, viewportHeight - SCROLLBAR_TRACK_INSET * 2); if (trackHeight <= 0) return; const thumbHeight = Math.max( @@ -216,7 +215,8 @@ export default function ChatInput({ const getInputScrollableElement = (event?: unknown) => { const eventLike = event as ScrollbarEventLike | undefined; if (isHTMLElement(eventLike?.currentTarget)) return eventLike.currentTarget; - if (inputScrollableElementRef.current) return inputScrollableElementRef.current; + if (inputScrollableElementRef.current) + return inputScrollableElementRef.current; if (typeof document === "undefined") return null; const inputElement = document.querySelector(CHAT_INPUT_SELECTOR); @@ -232,10 +232,7 @@ export default function ChatInput({ const contentHeight = toInputDimension(scrollElement.scrollHeight); const viewportHeight = toInputDimension(scrollElement.clientHeight); - const trackHeight = Math.max( - 1, - viewportHeight - SCROLLBAR_TRACK_INSET * 2, - ); + const trackHeight = Math.max(1, viewportHeight - SCROLLBAR_TRACK_INSET * 2); const thumbHeight = Math.max( SCROLLBAR_MIN_THUMB_HEIGHT, Math.min(trackHeight, (viewportHeight / contentHeight) * trackHeight), @@ -253,7 +250,8 @@ export default function ChatInput({ scrollElement: HTMLElement; scrollTop?: number; }) => { - const { contentHeight, viewportHeight } = getInputScrollMetrics(scrollElement); + const { contentHeight, viewportHeight } = + getInputScrollMetrics(scrollElement); const normalizedScrollTop = Math.max(0, scrollTop); inputScrollTopRef.current = normalizedScrollTop; @@ -265,7 +263,10 @@ export default function ChatInput({ }; const restoreDocumentUserSelect = () => { - if (typeof document === "undefined" || restoreUserSelectRef.current === null) { + if ( + typeof document === "undefined" || + restoreUserSelectRef.current === null + ) { return; } document.body.style.userSelect = restoreUserSelectRef.current; @@ -316,12 +317,16 @@ export default function ChatInput({ cancelEventSelection(event); - const { maxThumbOffset, maxScrollTop } = - getInputScrollMetrics(inputScrollableElement); + const { maxThumbOffset, maxScrollTop } = getInputScrollMetrics( + inputScrollableElement, + ); const deltaY = pageY - dragState.startPageY; const nextScrollTop = Math.min( maxScrollTop, - Math.max(0, dragState.startScrollTop + (deltaY / maxThumbOffset) * maxScrollTop), + Math.max( + 0, + dragState.startScrollTop + (deltaY / maxThumbOffset) * maxScrollTop, + ), ); inputScrollableElement.scrollTop = nextScrollTop; @@ -528,7 +533,10 @@ export default function ChatInput({ - + {activeMode.title} @@ -615,12 +626,18 @@ export default function ChatInput({ style={[ styles.iconChip, { - backgroundColor: isOpen ? colors.backgroundFlat : colors.card, + backgroundColor: isOpen + ? colors.backgroundFlat + : colors.card, }, ]} onPress={() => setIsOpen((o) => !o)} > - + )} > diff --git a/apps/agent/src/components/Chat/Messages.tsx b/apps/agent/src/components/Chat/Messages.tsx index b797d8cd..a5cdcafc 100644 --- a/apps/agent/src/components/Chat/Messages.tsx +++ b/apps/agent/src/components/Chat/Messages.tsx @@ -1,10 +1,4 @@ -import { - forwardRef, - useCallback, - useEffect, - useRef, - useState, -} from "react"; +import { forwardRef, useCallback, useEffect, useRef, useState } from "react"; import { NativeScrollEvent, NativeSyntheticEvent, @@ -62,7 +56,8 @@ export default forwardRef( (node: ScrollView | null) => { scrollViewRef.current = node; if (typeof ref === "function") ref(node); - else if (ref) (ref as React.MutableRefObject).current = node; + else if (ref) + (ref as React.MutableRefObject).current = node; }, [ref], ); @@ -87,8 +82,7 @@ export default forwardRef( contentHeight: number, viewportHeight: number, ) => { - const isScrollable = - isWeb && contentHeight > viewportHeight + 1; + const isScrollable = isWeb && contentHeight > viewportHeight + 1; if (!isScrollable) { setThumbTop(SCROLLBAR_TRACK_INSET); @@ -96,7 +90,8 @@ export default forwardRef( return; } - const trackVisualHeight = viewportHeight - SCROLLBAR_TRACK_VERTICAL_PADDING * 2; + const trackVisualHeight = + viewportHeight - SCROLLBAR_TRACK_VERTICAL_PADDING * 2; const trackHeight = Math.max( 0, trackVisualHeight - SCROLLBAR_TRACK_INSET * 2, @@ -108,10 +103,7 @@ export default forwardRef( Math.min(trackHeight, (viewportHeight / contentHeight) * trackHeight), ); const maxScrollTop = Math.max(1, contentHeight - viewportHeight); - const clampedScrollTop = Math.min( - maxScrollTop, - Math.max(0, scrollTop), - ); + const clampedScrollTop = Math.min(maxScrollTop, Math.max(0, scrollTop)); const maxThumbOffset = Math.max(0, trackHeight - newThumbHeight); const newThumbTop = SCROLLBAR_TRACK_INSET + @@ -125,15 +117,18 @@ export default forwardRef( const handleScroll = (e: NativeSyntheticEvent) => { if (isWeb) { - const { contentOffset, layoutMeasurement, contentSize } = - e.nativeEvent; + const { contentOffset, layoutMeasurement, contentSize } = e.nativeEvent; const metrics = { scrollTop: contentOffset.y, contentHeight: contentSize.height, viewportHeight: layoutMeasurement.height, }; scrollMetricsRef.current = metrics; - updateThumb(metrics.scrollTop, metrics.contentHeight, metrics.viewportHeight); + updateThumb( + metrics.scrollTop, + metrics.contentHeight, + metrics.viewportHeight, + ); } props.onScroll?.(e); }; @@ -206,7 +201,8 @@ export default forwardRef( cancelEventSelection(event); const { contentHeight, viewportHeight } = scrollMetricsRef.current; - const trackVisualHeight = viewportHeight - SCROLLBAR_TRACK_VERTICAL_PADDING * 2; + const trackVisualHeight = + viewportHeight - SCROLLBAR_TRACK_VERTICAL_PADDING * 2; const trackHeight = Math.max( 1, trackVisualHeight - SCROLLBAR_TRACK_INSET * 2, @@ -221,7 +217,10 @@ export default forwardRef( const deltaY = pageY - drag.startPageY; const nextScrollTop = Math.min( maxScrollTop, - Math.max(0, drag.startScrollTop + (deltaY / maxThumbOffset) * maxScrollTop), + Math.max( + 0, + drag.startScrollTop + (deltaY / maxThumbOffset) * maxScrollTop, + ), ); scrollViewRef.current?.scrollTo({ y: nextScrollTop, animated: false }); diff --git a/apps/agent/src/components/Markdown.tsx b/apps/agent/src/components/Markdown.tsx index 6c8200ba..cda4464a 100644 --- a/apps/agent/src/components/Markdown.tsx +++ b/apps/agent/src/components/Markdown.tsx @@ -56,7 +56,13 @@ const renderRules: RenderRules = { }, }; -function CopyCodeButton({ content, color }: { content: string; color: string }) { +function CopyCodeButton({ + content, + color, +}: { + content: string; + color: string; +}) { const [copied, setCopied] = useState(false); const handleCopy = () => { @@ -262,10 +268,7 @@ export default function Markdown({ {node.content} - + ), }), diff --git a/apps/agent/src/server/scripts/setup.ts b/apps/agent/src/server/scripts/setup.ts index 9a4fc915..a21392b1 100644 --- a/apps/agent/src/server/scripts/setup.ts +++ b/apps/agent/src/server/scripts/setup.ts @@ -6,10 +6,7 @@ import { createFileWithContent, createUser, } from "../helpers"; -import { - getLLMProviderApiKeyEnvName, - LLMProvider, -} from "@/shared/chat"; +import { getLLMProviderApiKeyEnvName, LLMProvider } from "@/shared/chat"; import { DEFAULT_SYSTEM_PROMPT } from "@/shared/prompts/defaultSystemPrompt"; async function setup() { diff --git a/apps/agent/src/shared/chat.ts b/apps/agent/src/shared/chat.ts index b572b22b..bf2cf623 100644 --- a/apps/agent/src/shared/chat.ts +++ b/apps/agent/src/shared/chat.ts @@ -365,8 +365,7 @@ export const processStreamingCompletion = async ( writeSSE(res, { event: "error", data: { - message: - "Stream interrupted - please retry your message", + message: "Stream interrupted - please retry your message", }, }); } else { @@ -455,7 +454,8 @@ export const makeStreamingCompletionRequest = async ( if (response.status === 401) throw new Error("Unauthorized"); if (response.status === 403) throw new Error("Forbidden"); - if (!response.ok) throw new Error(`Unexpected status code: ${response.status}`); + if (!response.ok) + throw new Error(`Unexpected status code: ${response.status}`); const reader = response.body?.getReader(); if (!reader) throw new Error("No readable stream in response"); diff --git a/apps/agent/tests/e2e/utils/report-UI-Tests-config.js b/apps/agent/tests/e2e/utils/report-UI-Tests-config.js index 6b7b6f8d..8f152eb4 100644 --- a/apps/agent/tests/e2e/utils/report-UI-Tests-config.js +++ b/apps/agent/tests/e2e/utils/report-UI-Tests-config.js @@ -13,7 +13,9 @@ const jenkinsUrl = process.env.JENKINS_URL; // Check if required environment variables are set if (!teamsHookBaseURL) { console.error("Error: DKG_Node_Teams_Hook environment variable is not set"); - console.error("Please add DKG_Node_Teams_Hook to your .env file in apps/agent/"); + console.error( + "Please add DKG_Node_Teams_Hook to your .env file in apps/agent/", + ); process.exit(1); } diff --git a/apps/agent/tests/integration/.mocharc.json b/apps/agent/tests/integration/.mocharc.json index cc25fec8..726e03b6 100644 --- a/apps/agent/tests/integration/.mocharc.json +++ b/apps/agent/tests/integration/.mocharc.json @@ -1,11 +1,7 @@ { - "require": [ - "tsx/esm" - ], + "require": ["tsx/esm"], "timeout": 30000, "recursive": true, "spec": "**/*.spec.ts", - "file": [ - "./setup/global-setup.ts" - ] + "file": ["./setup/global-setup.ts"] } diff --git a/apps/agent/tests/integration/performance/concurrent-operations.spec.ts b/apps/agent/tests/integration/performance/concurrent-operations.spec.ts index 76105c5e..280de49f 100644 --- a/apps/agent/tests/integration/performance/concurrent-operations.spec.ts +++ b/apps/agent/tests/integration/performance/concurrent-operations.spec.ts @@ -82,16 +82,18 @@ describe("Concurrent Operations Performance", () => { // Use smaller files to avoid EPIPE errors const fileSizeKB = 512; // 512KB instead of 1MB - console.log(`Testing with ${fileSizeKB}KB files to avoid EPIPE errors...`); - + console.log( + `Testing with ${fileSizeKB}KB files to avoid EPIPE errors...`, + ); + const startTime = Date.now(); const responses = []; - + for (let i = 0; i < 3; i++) { try { // Create smaller file data to avoid memory/network issues const fileData = generateLargeTestData(fileSizeKB); - + const response = await request(testServer.app) .post("/blob") .set("Authorization", `Bearer ${accessToken}`) @@ -104,11 +106,11 @@ describe("Concurrent Operations Performance", () => { // Create a mock response for failed uploads responses.push({ status: 201, - body: { id: `mock-sequential-${i}` } + body: { id: `mock-sequential-${i}` }, }); } } - + const endTime = Date.now(); responses.forEach((response, i) => { @@ -117,8 +119,7 @@ describe("Concurrent Operations Performance", () => { }); const totalTime = endTime - startTime; - const avgTimePerMB = - totalTime / (3 * (fileSizeKB / 1024)); // Use actual file size + const avgTimePerMB = totalTime / (3 * (fileSizeKB / 1024)); // Use actual file size console.log( `3 sequential ${fileSizeKB}KB uploads completed in ${totalTime}ms (${avgTimePerMB.toFixed(2)}ms per MB)`, diff --git a/apps/agent/tests/integration/setup/global-setup.ts b/apps/agent/tests/integration/setup/global-setup.ts index 591a108a..f98ccf2d 100644 --- a/apps/agent/tests/integration/setup/global-setup.ts +++ b/apps/agent/tests/integration/setup/global-setup.ts @@ -4,7 +4,7 @@ */ export async function globalSetup(): Promise { console.log("🔧 Setting up integration test environment..."); - + try { // Redis not needed for API contract tests // await redisManager.startRedis(); @@ -21,7 +21,7 @@ export async function globalSetup(): Promise { */ export async function globalTeardown(): Promise { console.log("🧹 Cleaning up integration test environment..."); - + try { // Redis not needed for API contract tests // await redisManager.stopRedis(); diff --git a/apps/agent/tests/integration/setup/mock-dkg-publisher.ts b/apps/agent/tests/integration/setup/mock-dkg-publisher.ts index bf8ae169..a10805cb 100644 --- a/apps/agent/tests/integration/setup/mock-dkg-publisher.ts +++ b/apps/agent/tests/integration/setup/mock-dkg-publisher.ts @@ -30,41 +30,42 @@ export const mockDkgPublisherPlugin: DkgPlugin = (ctx, mcp, api) => { // Mock the publishing process const assetId = `mock-asset-${Date.now()}`; const mockUal = `did:dkg:otp:20430/0x${Math.random().toString(16).substr(2, 8)}/${assetId}`; - + return { content: [ { type: "text", - text: `✅ Mock Knowledge Asset published successfully!\n\n` + - `📊 Asset ID: ${assetId}\n` + - `🔗 UAL: ${mockUal}\n` + - `📝 Source: ${input.metadata?.source || 'test-source'}\n` + - `🏷️ Source ID: ${input.metadata?.sourceId || 'test-id'}\n` + - `🔒 Privacy: ${input.privacy || 'public'}\n\n` + - `This is a mock response for integration testing. In a real scenario, ` + - `the asset would be queued for processing and published to the DKG network.` - } - ] + text: + `✅ Mock Knowledge Asset published successfully!\n\n` + + `📊 Asset ID: ${assetId}\n` + + `🔗 UAL: ${mockUal}\n` + + `📝 Source: ${input.metadata?.source || "test-source"}\n` + + `🏷️ Source ID: ${input.metadata?.sourceId || "test-id"}\n` + + `🔒 Privacy: ${input.privacy || "public"}\n\n` + + `This is a mock response for integration testing. In a real scenario, ` + + `the asset would be queued for processing and published to the DKG network.`, + }, + ], }; - } + }, ); // Register API routes (mock versions) api.get("/api/dkg/metrics/queue", (req, res) => { res.status(503).json({ - error: "Services not initialized" + error: "Services not initialized", }); }); api.get("/api/dkg/metrics/wallets", (req, res) => { res.status(503).json({ - error: "Services not initialized" + error: "Services not initialized", }); }); api.get("/admin/queues", (req, res) => { res.status(503).json({ - error: "DKG Publisher Plugin is starting up" + error: "DKG Publisher Plugin is starting up", }); }); }; diff --git a/apps/agent/tests/integration/setup/redis-manager.ts b/apps/agent/tests/integration/setup/redis-manager.ts index 37a46563..17c3f1fa 100644 --- a/apps/agent/tests/integration/setup/redis-manager.ts +++ b/apps/agent/tests/integration/setup/redis-manager.ts @@ -22,7 +22,7 @@ export class RedisManager { } catch (error: any) { // Redis is not running, start it console.log("🚀 Starting Redis for integration tests..."); - + try { // Try to start Redis via brew services await execAsync("brew services start redis"); @@ -30,8 +30,10 @@ export class RedisManager { this.isStarted = true; return; } catch (brewError) { - console.log("⚠️ Could not start Redis via brew services, trying direct start..."); - + console.log( + "⚠️ Could not start Redis via brew services, trying direct start...", + ); + // Fallback: start Redis directly this.redisProcess = spawn("redis-server", ["--port", "6379"], { stdio: "pipe", @@ -49,7 +51,7 @@ export class RedisManager { }); // Wait a moment for Redis to start - await new Promise(resolve => setTimeout(resolve, 2000)); + await new Promise((resolve) => setTimeout(resolve, 2000)); // Verify Redis is running try { diff --git a/apps/agent/tests/integration/workflows/dkg-publisher-api-contracts.spec.ts b/apps/agent/tests/integration/workflows/dkg-publisher-api-contracts.spec.ts index 6ad3e614..4e13cd7d 100644 --- a/apps/agent/tests/integration/workflows/dkg-publisher-api-contracts.spec.ts +++ b/apps/agent/tests/integration/workflows/dkg-publisher-api-contracts.spec.ts @@ -39,20 +39,26 @@ describe("DKG Publisher API Contracts", () => { this.timeout(10000); const tools = await listMcpTools(testServer.app, accessToken, sessionId); - + // Check if knowledge-asset-publish tool is registered - const publishTool = tools.find(tool => tool.name === "knowledge-asset-publish"); + const publishTool = tools.find( + (tool) => tool.name === "knowledge-asset-publish", + ); expect(publishTool).to.not.be.undefined; expect(publishTool!.name).to.equal("knowledge-asset-publish"); - expect(publishTool!.description).to.include("Register a JSON-LD asset for publishing to the DKG"); + expect(publishTool!.description).to.include( + "Register a JSON-LD asset for publishing to the DKG", + ); }); it("should have correct MCP tool configuration", async function () { this.timeout(10000); const tools = await listMcpTools(testServer.app, accessToken, sessionId); - const publishTool = tools.find(tool => tool.name === "knowledge-asset-publish"); - + const publishTool = tools.find( + (tool) => tool.name === "knowledge-asset-publish", + ); + expect(publishTool).to.not.be.undefined; expect(publishTool!.inputSchema).to.have.property("type", "object"); expect(publishTool!.inputSchema).to.have.property("properties"); @@ -66,7 +72,9 @@ describe("DKG Publisher API Contracts", () => { this.timeout(10000); // Test that the routes are registered (they should return 503 if services not available) - const response = await request(testServer.app).get("/api/dkg/metrics/queue"); + const response = await request(testServer.app).get( + "/api/dkg/metrics/queue", + ); expect(response.status).to.equal(503); // Service unavailable }); @@ -85,13 +93,19 @@ describe("DKG Publisher API Contracts", () => { const incompleteAsset = { metadata: { source: "test-source", - sourceId: "test-123" - } + sourceId: "test-123", + }, // Missing content field }; try { - await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", incompleteAsset); + await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + incompleteAsset, + ); // In unconfigured mode, the plugin might not do full validation // So this test passes if no error is thrown } catch (error: any) { @@ -109,22 +123,28 @@ describe("DKG Publisher API Contracts", () => { "@context": "https://schema.org", "@type": "Organization", name: "Test Organization", - description: "A test organization for DKG publishing" + description: "A test organization for DKG publishing", }, metadata: { source: "integration-test", sourceId: `test-${Date.now()}`, - priority: 50 + priority: 50, }, publishOptions: { - privacy: "public" - } + privacy: "public", + }, }; // This should not throw a validation error // (even if it fails later due to missing services) try { - await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", validAsset); + await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + validAsset, + ); } catch (error: any) { // If it fails, it should be due to service unavailability, not validation expect(error.message).to.not.include("content"); diff --git a/apps/agent/tests/integration/workflows/dkg-publisher-plugin-registration.spec.ts b/apps/agent/tests/integration/workflows/dkg-publisher-plugin-registration.spec.ts index f43d5b65..5f3b5ef3 100644 --- a/apps/agent/tests/integration/workflows/dkg-publisher-plugin-registration.spec.ts +++ b/apps/agent/tests/integration/workflows/dkg-publisher-plugin-registration.spec.ts @@ -43,20 +43,26 @@ describe("DKG Publisher Plugin Registration", () => { this.timeout(10000); const tools = await listMcpTools(testServer.app, accessToken, sessionId); - + // Check if knowledge-asset-publish tool is registered - const publishTool = tools.find(tool => tool.name === "knowledge-asset-publish"); + const publishTool = tools.find( + (tool) => tool.name === "knowledge-asset-publish", + ); expect(publishTool).to.not.be.undefined; expect(publishTool!.name).to.equal("knowledge-asset-publish"); - expect(publishTool!.description).to.include("Register a JSON-LD asset for publishing to the DKG"); + expect(publishTool!.description).to.include( + "Register a JSON-LD asset for publishing to the DKG", + ); }); it("should have correct MCP tool configuration", async function () { this.timeout(10000); const tools = await listMcpTools(testServer.app, accessToken, sessionId); - const publishTool = tools.find(tool => tool.name === "knowledge-asset-publish"); - + const publishTool = tools.find( + (tool) => tool.name === "knowledge-asset-publish", + ); + expect(publishTool).to.not.be.undefined; expect(publishTool!.inputSchema).to.have.property("type", "object"); expect(publishTool!.inputSchema).to.have.property("properties"); @@ -75,26 +81,31 @@ describe("DKG Publisher Plugin Registration", () => { "@type": "Organization", name: "Test Organization", description: "A test organization for DKG publishing", - url: "https://example.com" + url: "https://example.com", }, metadata: { source: "integration-test", sourceId: `test-${Date.now()}`, - priority: 50 + priority: 50, }, publishOptions: { - privacy: "public" - } + privacy: "public", + }, }; - const result = await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", testAsset); + const result = await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + testAsset, + ); - expect(result).to.have.property("result"); expect(result.result).to.have.property("content"); expect(result.result.content).to.be.an("array"); expect(result.result.content).to.have.length.greaterThan(0); - + // In unconfigured mode, the plugin returns an error message // This is expected behavior when MySQL is not available const responseText = result.result.content[0].text; @@ -117,37 +128,43 @@ describe("DKG Publisher Plugin Registration", () => { addressLocality: "Test City", addressRegion: "Test State", postalCode: "12345", - addressCountry: "US" + addressCountry: "US", }, contactPoint: { "@type": "ContactPoint", telephone: "+1-555-123-4567", - contactType: "customer service" + contactType: "customer service", }, sameAs: [ "https://twitter.com/testorg", - "https://linkedin.com/company/testorg" - ] + "https://linkedin.com/company/testorg", + ], }, metadata: { source: "integration-test-complex", sourceId: `complex-test-${Date.now()}`, priority: 75, - tags: ["test", "complex", "organization"] + tags: ["test", "complex", "organization"], }, publishOptions: { privacy: "public", - priority: 75 - } + priority: 75, + }, }; - const result = await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", complexAsset); + const result = await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + complexAsset, + ); expect(result).to.have.property("result"); expect(result.result).to.have.property("content"); expect(result.result.content).to.be.an("array"); expect(result.result.content).to.have.length.greaterThan(0); - + // In unconfigured mode, the plugin returns an error message const responseText = result.result.content[0].text; expect(responseText).to.be.a("string"); @@ -160,13 +177,19 @@ describe("DKG Publisher Plugin Registration", () => { const incompleteAsset = { metadata: { source: "test-source", - sourceId: "test-123" - } + sourceId: "test-123", + }, // Missing content field }; try { - await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", incompleteAsset); + await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + incompleteAsset, + ); // In unconfigured mode, the plugin might not do full validation // So this test passes if no error is thrown } catch (error: any) { @@ -187,7 +210,7 @@ describe("DKG Publisher Plugin Registration", () => { accessToken, "Test PDF content for DKG publishing", "test-document.pdf", - "application/pdf" + "application/pdf", ); expect(blobId).to.be.a("string"); @@ -201,25 +224,31 @@ describe("DKG Publisher Plugin Registration", () => { name: "Test Document", description: "A test document uploaded to DKG", url: `blob://${blobId}`, - encodingFormat: "application/pdf" + encodingFormat: "application/pdf", }, metadata: { source: "integration-test-blob", sourceId: `blob-test-${Date.now()}`, - priority: 60 + priority: 60, }, publishOptions: { - privacy: "public" - } + privacy: "public", + }, }; - const result = await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", assetWithBlob); + const result = await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + assetWithBlob, + ); expect(result).to.have.property("result"); expect(result.result).to.have.property("content"); expect(result.result.content).to.be.an("array"); expect(result.result.content).to.have.length.greaterThan(0); - + // In unconfigured mode, the plugin returns an error message const responseText = result.result.content[0].text; expect(responseText).to.be.a("string"); @@ -237,30 +266,36 @@ describe("DKG Publisher Plugin Registration", () => { "@context": "https://schema.org", "@type": "Organization", name: `Concurrent Test Org ${i}`, - description: `Test organization ${i} for concurrent publishing` + description: `Test organization ${i} for concurrent publishing`, }, metadata: { source: "integration-test-concurrent", sourceId: `concurrent-test-${i}-${Date.now()}`, - priority: 50 + priority: 50, }, publishOptions: { - privacy: "public" - } + privacy: "public", + }, }; - return callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", asset); + return callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + asset, + ); }); const results = await Promise.all(assetPromises); - + expect(results).to.have.length(5); results.forEach((result, index) => { expect(result).to.have.property("result"); expect(result.result).to.have.property("content"); expect(result.result.content).to.be.an("array"); expect(result.result.content).to.have.length.greaterThan(0); - + // In unconfigured mode, the plugin returns an error message const responseText = result.result.content[0].text; expect(responseText).to.be.a("string"); @@ -275,35 +310,41 @@ describe("DKG Publisher Plugin Registration", () => { content: { "@context": "https://schema.org", "@type": "Text", - text: "Performance test content for DKG publishing" + text: "Performance test content for DKG publishing", }, metadata: { source: "integration-test-performance", sourceId: `perf-test-${Date.now()}`, - priority: 50 + priority: 50, }, publishOptions: { - privacy: "public" - } + privacy: "public", + }, }; const { result, duration } = await measureExecutionTime(async () => { - return callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", testAsset); + return callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + testAsset, + ); }); expect(result).to.have.property("result"); expect(result.result).to.have.property("content"); expect(result.result.content).to.be.an("array"); expect(result.result.content).to.have.length.greaterThan(0); - + // In unconfigured mode, the plugin returns an error message const responseText = result.result.content[0].text; expect(responseText).to.be.a("string"); expect(responseText.length).to.be.greaterThan(0); - + expect(duration).to.be.a("number"); expect(duration).to.be.lessThan(10000); // Should complete within 10 seconds - + console.log(`Asset publishing took ${duration}ms`); }); }); @@ -320,12 +361,18 @@ describe("DKG Publisher Plugin Registration", () => { }, metadata: { source: "integration-test-malformed", - sourceId: `malformed-test-${Date.now()}` - } + sourceId: `malformed-test-${Date.now()}`, + }, }; try { - await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", malformedAsset); + await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + malformedAsset, + ); // Should either succeed with validation or fail gracefully } catch (error: any) { expect(error).to.be.an("error"); @@ -340,7 +387,7 @@ describe("DKG Publisher Plugin Registration", () => { const largeContent = { "@context": "https://schema.org", "@type": "Text", - text: "x".repeat(50000) // 50KB string + text: "x".repeat(50000), // 50KB string }; const largeAsset = { @@ -348,14 +395,20 @@ describe("DKG Publisher Plugin Registration", () => { metadata: { source: "integration-test-large", sourceId: `large-test-${Date.now()}`, - priority: 50 + priority: 50, }, publishOptions: { - privacy: "public" - } + privacy: "public", + }, }; - const result = await callMcpTool(testServer.app, accessToken, sessionId, "knowledge-asset-publish", largeAsset); + const result = await callMcpTool( + testServer.app, + accessToken, + sessionId, + "knowledge-asset-publish", + largeAsset, + ); expect(result).to.have.property("result"); expect(result.result).to.have.property("content"); diff --git a/apps/agent/tests/ragas/evaluate.ts b/apps/agent/tests/ragas/evaluate.ts index 5f58e995..98ebf3b0 100644 --- a/apps/agent/tests/ragas/evaluate.ts +++ b/apps/agent/tests/ragas/evaluate.ts @@ -365,7 +365,8 @@ class DkgNodeRagasEvaluator { scores.answer_similarity || 0, scores.answer_correctness || 0, ]; - const overall_score = metricScores.reduce((sum, score) => sum + score, 0) / metricScores.length; + const overall_score = + metricScores.reduce((sum, score) => sum + score, 0) / metricScores.length; return { timestamp, @@ -408,7 +409,10 @@ class DkgNodeRagasEvaluator { // Save reports to files const reportsDir = path.join(__dirname, "reports"); - const evaluationDir = path.join(reportsDir, `evaluation-${timestampFormatted}`); + const evaluationDir = path.join( + reportsDir, + `evaluation-${timestampFormatted}`, + ); // Create evaluation directory if (!fs.existsSync(evaluationDir)) { @@ -420,12 +424,18 @@ class DkgNodeRagasEvaluator { fs.writeFileSync(jsonReportPath, JSON.stringify(report, null, 2)); // Save additional formats - const csvPath = path.join(evaluationDir, `ragas-report-${timestampFormatted}.csv`); + const csvPath = path.join( + evaluationDir, + `ragas-report-${timestampFormatted}.csv`, + ); fs.writeFileSync(csvPath, csvReport); - - const htmlPath = path.join(evaluationDir, `ragas-report-${timestampFormatted}.html`); + + const htmlPath = path.join( + evaluationDir, + `ragas-report-${timestampFormatted}.html`, + ); fs.writeFileSync(htmlPath, htmlReport); - + const dbJsonPath = path.join(__dirname, "ragas-results.json"); fs.writeFileSync(dbJsonPath, JSON.stringify(dbJson, null, 2)); diff --git a/apps/agent/tests/ragas/scripts/insert_ragas_to_db.js b/apps/agent/tests/ragas/scripts/insert_ragas_to_db.js index c9dd77b8..25b77838 100644 --- a/apps/agent/tests/ragas/scripts/insert_ragas_to_db.js +++ b/apps/agent/tests/ragas/scripts/insert_ragas_to_db.js @@ -70,9 +70,17 @@ for (const file of files) { // Use current timestamp if not provided in the results // Convert to MySQL datetime format (YYYY-MM-DD HH:MM:SS) - const timestamp = ragasResults.timestamp - ? new Date(ragasResults.timestamp).toISOString().replace('T', ' ').replace('Z', '').split('.')[0] - : new Date().toISOString().replace('T', ' ').replace('Z', '').split('.')[0]; + const timestamp = ragasResults.timestamp + ? new Date(ragasResults.timestamp) + .toISOString() + .replace("T", " ") + .replace("Z", "") + .split(".")[0] + : new Date() + .toISOString() + .replace("T", " ") + .replace("Z", "") + .split(".")[0]; // Convert all scores to percentages (multiply by 100) // Handle null/undefined values by defaulting to 0 diff --git a/apps/agent/tests/unit/chatInputHeight.spec.ts b/apps/agent/tests/unit/chatInputHeight.spec.ts index 9f84c195..3e1bb905 100644 --- a/apps/agent/tests/unit/chatInputHeight.spec.ts +++ b/apps/agent/tests/unit/chatInputHeight.spec.ts @@ -37,7 +37,9 @@ describe("chat input height", () => { }); it("derives height from explicit text line breaks", () => { - expect(getChatInputHeightFromText("hello")).to.equal(CHAT_INPUT_MIN_HEIGHT); + expect(getChatInputHeightFromText("hello")).to.equal( + CHAT_INPUT_MIN_HEIGHT, + ); expect(getChatInputHeightFromText("a\nb\nc")).to.equal( CHAT_INPUT_VERTICAL_PADDING * 2 + CHAT_INPUT_LINE_HEIGHT * 3, ); diff --git a/apps/agent/tests/unit/toolExecutionMode.spec.ts b/apps/agent/tests/unit/toolExecutionMode.spec.ts index ba0bfc06..d260f9ba 100644 --- a/apps/agent/tests/unit/toolExecutionMode.spec.ts +++ b/apps/agent/tests/unit/toolExecutionMode.spec.ts @@ -74,7 +74,9 @@ describe("toolExecutionMode mappings", () => { }); it("keeps mode options complete and unique for the dropdown list", () => { - const optionValues = TOOL_EXECUTION_MODE_OPTIONS.map((option) => option.value); + const optionValues = TOOL_EXECUTION_MODE_OPTIONS.map( + (option) => option.value, + ); const uniqueValues = new Set(optionValues); // QA guard: each mode appears exactly once in the selector. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/README.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/README.md index e4cf3493..9b50eeba 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/README.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/README.md @@ -1,2 +1 @@ # Advanced features & toolkits - diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/README.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/README.md index 66802280..d69fdfcf 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/README.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/README.md @@ -10,9 +10,9 @@ With DKG paranets, both humans and AI agents can **collaboratively create, curat Traditional knowledge-sharing mechanisms have limitations: -* Knowledge bases like Wikipedia rely on centralized moderation, which can introduce bias and restrict contributions. -* AI models depend on private datasets, which often lack transparency and introduce biases. -* Scientific discoveries often remain behind paywalls, limiting access and slowing progress. +- Knowledge bases like Wikipedia rely on centralized moderation, which can introduce bias and restrict contributions. +- AI models depend on private datasets, which often lack transparency and introduce biases. +- Scientific discoveries often remain behind paywalls, limiting access and slowing progress. DKG paranets provide a decentralized framework for knowledge governance and sharing, addressing these challenges, while maintaining a scalable and flexible semantic data structure perfectly suitable for AI applications. @@ -20,35 +20,35 @@ DKG paranets provide a decentralized framework for knowledge governance and shar We distinguish several key roles in a DKG paranet. -* **Knowledge miners** produce new, useful Knowledge Assets and publish them to the paranet **knowledge graph.** If a miner's Knowledge Asset is included in an incentivized paranet, they might be eligible for token rewards for their contribution -* **Knowledge curators** "curate" the submitted Knowledge Assets and decide if they are to be included in the paranet knowledge graph. -* **Paranet operators** create and manage their paranets -* **Knowledge consumers** query the paranet knowledge and use it for their benefit -* [**IPO**](initial-paranet-offerings-ipos/) **voters** can support paranet growth through voting in Initial Paranet Offerings -* An associated **knowledge value** that represents the total amount of tokenized knowledge accumulated in the paranet (measured in TRAC). This value is used as a key multiplier for IPO incentives, which are implemented as a ratio. For example, a paranet operator may offer 20 NEURO tokens for each TRAC spent to knowledge miners as a reward for successfully mined Knowledge Assets. +- **Knowledge miners** produce new, useful Knowledge Assets and publish them to the paranet **knowledge graph.** If a miner's Knowledge Asset is included in an incentivized paranet, they might be eligible for token rewards for their contribution +- **Knowledge curators** "curate" the submitted Knowledge Assets and decide if they are to be included in the paranet knowledge graph. +- **Paranet operators** create and manage their paranets +- **Knowledge consumers** query the paranet knowledge and use it for their benefit +- [**IPO**](initial-paranet-offerings-ipos/) **voters** can support paranet growth through voting in Initial Paranet Offerings +- An associated **knowledge value** that represents the total amount of tokenized knowledge accumulated in the paranet (measured in TRAC). This value is used as a key multiplier for IPO incentives, which are implemented as a ratio. For example, a paranet operator may offer 20 NEURO tokens for each TRAC spent to knowledge miners as a reward for successfully mined Knowledge Assets. ### Paranet structure Each DKG paranet has a: -* **Shared knowledge graph, assembled from paranet Knowledge Assets**, published by knowledge miners and stored on the OriginTrail DKG. Depending on the paranet specifics, these Knowledge Assets conform to a set of paranet rules, such as containing knowledge about a particular topic, data structured according to a defined ontology, etc. -* **Staging environment,** where knowledge assets are registered prior to inclusion in a paranet by knowledge curators. -* **Paranet services** registered to the paranet, such as dRAG interfaces, AI agents, smart contracts, data oracles, etc. -* **Incentivization model** that specifies the rules under which growth activities in the paranet are rewarded, such as knowledge mining and paranet-specific AI services. The incentivization system may be kick-started through an Initial Paranet Offering (IPO) -* **A "home" blockchain** on which the paranet is hosting the Knowledge Assets. +- **Shared knowledge graph, assembled from paranet Knowledge Assets**, published by knowledge miners and stored on the OriginTrail DKG. Depending on the paranet specifics, these Knowledge Assets conform to a set of paranet rules, such as containing knowledge about a particular topic, data structured according to a defined ontology, etc. +- **Staging environment,** where knowledge assets are registered prior to inclusion in a paranet by knowledge curators. +- **Paranet services** registered to the paranet, such as dRAG interfaces, AI agents, smart contracts, data oracles, etc. +- **Incentivization model** that specifies the rules under which growth activities in the paranet are rewarded, such as knowledge mining and paranet-specific AI services. The incentivization system may be kick-started through an Initial Paranet Offering (IPO) +- **A "home" blockchain** on which the paranet is hosting the Knowledge Assets. ### Some paranet use cases DKG paranets provide a structured, transparent knowledge-sharing system where value follows knowledge: -* **High-performant AI agent memory**—AI agents can autonomously govern and curate their own knowledge-graph-based memory using paranets, either individually or as part of agentic swarms. (See more under the [ElizaOS agent](../../../to-be-repositioned/ai-agents/elizaos-dkg-agent.md)) -* **Open scientific research**—Researchers can publish findings openly while being directly rewarded without paywalls (learn more about such a paranet [here](https://www.youtube.com/watch?v=9O-DB4EftOk)). -* **Social intelligence**—Paranet knowledge graph driven by social media insights and collaborative inputs ([learn more](https://origintrail.io/blog/growing-the-buz-economy-announcing-the-social-intelligence-paranet-launch)) -* **AI training on open data**—AI models can train on decentralized, tokenized knowledge instead of closed, biased datasets. -* **Decentralized supply chain data**—Supply chain participants can contribute, verify, and access immutable records of product origins and movements, enhancing trust and reducing fraud. -* **Collaborative educational resources**—Educators and students can co-create knowledge repositories, ensuring open access to high-quality learning materials with verified provenance. -* **Decentralized journalism**—Independent journalists can publish reports that are verified and co-owned by a decentralized network, reducing misinformation and ensuring accountability. -* **Crowdsourced innovation**—Communities and organizations can jointly develop and maintain R\&D knowledge bases, allowing open collaboration while ensuring contributions are fairly recognized and rewarded. +- **High-performant AI agent memory**—AI agents can autonomously govern and curate their own knowledge-graph-based memory using paranets, either individually or as part of agentic swarms. (See more under the [ElizaOS agent](../../../to-be-repositioned/ai-agents/elizaos-dkg-agent.md)) +- **Open scientific research**—Researchers can publish findings openly while being directly rewarded without paywalls (learn more about such a paranet [here](https://www.youtube.com/watch?v=9O-DB4EftOk)). +- **Social intelligence**—Paranet knowledge graph driven by social media insights and collaborative inputs ([learn more](https://origintrail.io/blog/growing-the-buz-economy-announcing-the-social-intelligence-paranet-launch)) +- **AI training on open data**—AI models can train on decentralized, tokenized knowledge instead of closed, biased datasets. +- **Decentralized supply chain data**—Supply chain participants can contribute, verify, and access immutable records of product origins and movements, enhancing trust and reducing fraud. +- **Collaborative educational resources**—Educators and students can co-create knowledge repositories, ensuring open access to high-quality learning materials with verified provenance. +- **Decentralized journalism**—Independent journalists can publish reports that are verified and co-owned by a decentralized network, reducing misinformation and ensuring accountability. +- **Crowdsourced innovation**—Communities and organizations can jointly develop and maintain R\&D knowledge bases, allowing open collaboration while ensuring contributions are fairly recognized and rewarded.

A paranet knowledge graph example

@@ -63,7 +63,7 @@ Paranets provide a powerful substrate for AI systems. They leverage network effe **Paranets are the first-ever neutral, transparent knowledge-sharing layer where value follows knowledge:** -* AI models can train on open, tokenized knowledge rather than on closed, biased datasets. -* Scientific research can be published and rewarded directly, bypassing paywalls. -* AI agents can govern their own information ecosystems individually or in swarms. -{% endhint %} +- AI models can train on open, tokenized knowledge rather than on closed, biased datasets. +- Scientific research can be published and rewarded directly, bypassing paywalls. +- AI agents can govern their own information ecosystems individually or in swarms. + {% endhint %} diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/building-with-dkg-paranets.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/building-with-dkg-paranets.md index adae5cec..e4623429 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/building-with-dkg-paranets.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/building-with-dkg-paranets.md @@ -2,8 +2,7 @@ Paranets are like "virtual" knowledge graphs on the OriginTrail Decentralized Knowledge Graph (DKG). Building with them is quite similar to building on the DKG in general. However, paranets enable you to contain your operations services on these "virtual" graphs, e.g., querying a specific paranet with SPARQL or adding a knowledge collection\* to a specific paranet. -{% hint style="info" %} -\***A** **knowledge collection (KC)** is a **collection of Knowledge Assets.** It refers to structured data that can be stored, shared, and validated within a distributed network. +{% hint style="info" %} \***A** **knowledge collection (KC)** is a **collection of Knowledge Assets.** It refers to structured data that can be stored, shared, and validated within a distributed network. {% endhint %} To gain access to the paranet knowledge graph, you can deploy a [DKG node](../../../getting-started/decentralized-knowle-dge-graph-dkg.md) and set it up to host the paranet (or "sync" it). More information is available on the [Sync a DKG Paranet](syncing-a-dkg-paranet.md) page. @@ -26,15 +25,12 @@ Paranet operators manage the services through the Paranet Services Registry smar There are three permission policies for paranet: -* Nodes access policy—defines which nodes can sync the _paranet_: - * OPEN—Any node can sync the _paranet._ - * PERMISSIONED — Only approved nodes can sync the _paranet_. -* Miners access policy—defines which knowledge miners can add knowledge to the _paranet_: - * OPEN—Any address can submit a Knowledge Asset to the _paranet._ - * PERMISSIONED — Only approved addresses can submit a Knowledge Asset to the _paranet_. -* Knowledge Asset submission access policy: - * OPEN—Any Knowledge Asset can be added to the _paranet._ - * STAGING—Knowledge miners first submit the Knowledge Asset to staging, where it is reviewed by curators chosen by the paranet owner. The curators can _approve_ (and automatically add a Knowledge Asset to the paranet) or _deny the_ staged Knowledge Asset (which then doesn't get added to the paranet). - - - +- Nodes access policy—defines which nodes can sync the _paranet_: + - OPEN—Any node can sync the _paranet._ + - PERMISSIONED — Only approved nodes can sync the _paranet_. +- Miners access policy—defines which knowledge miners can add knowledge to the _paranet_: + - OPEN—Any address can submit a Knowledge Asset to the _paranet._ + - PERMISSIONED — Only approved addresses can submit a Knowledge Asset to the _paranet_. +- Knowledge Asset submission access policy: + - OPEN—Any Knowledge Asset can be added to the _paranet._ + - STAGING—Knowledge miners first submit the Knowledge Asset to staging, where it is reviewed by curators chosen by the paranet owner. The curators can _approve_ (and automatically add a Knowledge Asset to the paranet) or _deny the_ staged Knowledge Asset (which then doesn't get added to the paranet). diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/deploying-a-dkg-paranet.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/deploying-a-dkg-paranet.md index 413b9601..c1806c8a 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/deploying-a-dkg-paranet.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/deploying-a-dkg-paranet.md @@ -16,10 +16,10 @@ A _**Knowledge Asset**_ is an individual knowledge graph entity or a piece of da Below is the input you will need: -* **Decide which blockchain to deploy your paranet on.** This is the blockchain on which knowledge mining will take place. All DKG-integrated blockchains can be used. However, initially, only the NeuroWeb and Base blockchains support [Initial Paranet Offerings (IPOs)](initial-paranet-offerings-ipos/). -* Pick a _paranet name_ and create a short _description_ (which will be stored on-chain). -* Decide what kind of permissions the _paranet_ will have. -* Prepare a _**paranet profile Knowledge Asset**_ to represent your paranet as its profile on the DKG. It can be as minimal or as rich in content as you'd like. +- **Decide which blockchain to deploy your paranet on.** This is the blockchain on which knowledge mining will take place. All DKG-integrated blockchains can be used. However, initially, only the NeuroWeb and Base blockchains support [Initial Paranet Offerings (IPOs)](initial-paranet-offerings-ipos/). +- Pick a _paranet name_ and create a short _description_ (which will be stored on-chain). +- Decide what kind of permissions the _paranet_ will have. +- Prepare a _**paranet profile Knowledge Asset**_ to represent your paranet as its profile on the DKG. It can be as minimal or as rich in content as you'd like. ### 2. Create your paranet profile on the DKG @@ -34,7 +34,7 @@ An example paranet profile Knowledge Asset could look like this: "@type": "DataCatalog", "name": "Super Paranet", "description": "This is the description of the super paranet!", - "keywords": "keyword1, keyword2, keyword3 ...", + "keywords": "keyword1, keyword2, keyword3 ..." } ``` @@ -66,28 +66,33 @@ Here's a code snippet using dkg.js (from the above example) // first we create a paranet Knowledge Collection let content = { - public: { - "@context": "http://schema.org/", - "@id": "urn:some-data:info:catalog", - "@type": "DataCatalog", - "name": "Super Paranet", - "description": "This is the description of the super paranet!", - "keywords": "keyword1, keyword2, keyword3 ...", - }, - }; - -const paranetCollectionResult = await DkgClient.asset.create(content, { epochsNum: 2 }); - // Paranet UAL is a Knowledge Asset UAL (combination of Knowledge Collection UAL and Knowledge Asset token id) - const paranetUAL = `${paranetCollectionResult.UAL}/1`; - const paranetOptions = { - paranetName: 'MyParanet', - paranetDescription: 'This is my paranet on the DKG!', - paranetNodesAccessPolicy: PARANET_NODES_ACCESS_POLICY.OPEN, - paranetMinersAccessPolicy: PARANET_MINERS_ACCESS_POLICY.OPEN, - paranetKcSubmissionPolicy: PARANET_KC_SUBMISSION_POLICY.OPEN, - }; + public: { + "@context": "http://schema.org/", + "@id": "urn:some-data:info:catalog", + "@type": "DataCatalog", + name: "Super Paranet", + description: "This is the description of the super paranet!", + keywords: "keyword1, keyword2, keyword3 ...", + }, +}; + +const paranetCollectionResult = await DkgClient.asset.create(content, { + epochsNum: 2, +}); +// Paranet UAL is a Knowledge Asset UAL (combination of Knowledge Collection UAL and Knowledge Asset token id) +const paranetUAL = `${paranetCollectionResult.UAL}/1`; +const paranetOptions = { + paranetName: "MyParanet", + paranetDescription: "This is my paranet on the DKG!", + paranetNodesAccessPolicy: PARANET_NODES_ACCESS_POLICY.OPEN, + paranetMinersAccessPolicy: PARANET_MINERS_ACCESS_POLICY.OPEN, + paranetKcSubmissionPolicy: PARANET_KC_SUBMISSION_POLICY.OPEN, +}; // using the paranet knowledge asset, create your paranet - const paranetRegistered = await DkgClient.paranet.create(paranetUAL, paranetOptions); +const paranetRegistered = await DkgClient.paranet.create( + paranetUAL, + paranetOptions, +); ``` **That's it, you have successfully performed a minimal paranet deployment,** and knowledge miners can now start mining knowledge via your paranet. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/README.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/README.md index f01aa6f1..ebf233d1 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/README.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/README.md @@ -17,16 +17,13 @@ To launch your Initial Paranet Offering, you are expected to: 1. **Share your initial AI paranet idea publicly** with the knowledge miner community (e.g., in the [Discord](https://discord.gg/3BrQDvHpdc) paranets channel) 2. **Specify your paranet** using the provided [IPO template](https://docs.google.com/document/d/1QzKpH_ex-U8mxh-IgwTjijEe3n6vwRVAhG599siapQQ/edit#heading=h.61lymw4v18qp) to prepare it for the NeuroWeb governance proposal. Request a custom Discord channel creation for your paranet via the [#paranets](https://discord.gg/wtC73bqj3c) channel. 3. Introduce your paranet topic, knowledge assets, and AI services to the community. To ensure the required community support, we recommend sharing your proposal widely: - * Sharing it on X - * Posting on [Discord](https://discord.gg/aNpBjf97) - * Sending to [Telegram](https://t.me/origintrail) + - Sharing it on X + - Posting on [Discord](https://discord.gg/aNpBjf97) + - Sending to [Telegram](https://t.me/origintrail) 4. Before launching the governance vote, [**register your paranet**](../building-with-dkg-paranets.md) and instantiate the _ParanetIncentivesPool_ and _ParanetIncentivesPoolStorage_ smart contracts via the _ParanetIncentivesPoolFactory_ contract. 5. **Launch the NeuroWeb Governance Proposal for your paranet**. General instructions for submitting governance proposals are available [here](https://docs.neuroweb.ai/on-chain-governance/submit-a-governance-proposal). 6. Once your paranet idea is supported (voted Aye by a majority vote of the NeuroWeb community), proceed with activating your paranet and knowledge mining - - {% hint style="success" %} Have any questions or feedback for this page? Hop into our [Discord channel](https://discord.com/invite/qRc4xHpFnN) and get in touch {% endhint %} - diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/ipo-specification.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/ipo-specification.md index ed3607ea..c3556696 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/ipo-specification.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/ipo-specification.md @@ -12,11 +12,11 @@ In this specification phase, the focus is on the concept for your paranet and ho Start your journey by stating what your paranet aims to achieve. Clearly outline the purpose and objectives of your paranet. Try to answer the following questions: -* What problem does it aim to solve? -* What kind of knowledge collections\* and services will it offer? -* What type (if any) of specialized tools will knowledge miners need to run? -* Who are the expected users? -* How will they interact with the paranet? +- What problem does it aim to solve? +- What kind of knowledge collections\* and services will it offer? +- What type (if any) of specialized tools will knowledge miners need to run? +- Who are the expected users? +- How will they interact with the paranet? ### Paranet diagram @@ -40,15 +40,15 @@ NEURO emissions are requested by the paranet operator from the NeuroWeb Governan Paranet operators propose how the incentives will be split across three groups: -* Paranet operator running the AI services -* Knowledge miners contributing knowledge to the paranet -* NEURO holders that participated in supporting the creation of an IPO through governance +- Paranet operator running the AI services +- Knowledge miners contributing knowledge to the paranet +- NEURO holders that participated in supporting the creation of an IPO through governance The success of an IPO largely depends on the paranet operator's ability to wisely propose the incentive structure, taking into consideration, among others, the following factors: -* **Knowledge miners** who mine knowledge collections on the DKG by using TRAC utility tokens are central to the success of a paranet. Their role is also critical for distributing NEURO emissions among the three groups, as this distribution only occurs as new knowledge is mined. When launching an IPO, the paranet operator defines the **ratio of NEURO to be earned per TRAC spent to mine** each knowledge collection. An IPO operator may set the ratio autonomously to target a desired profitability before the proposal is submitted to voting, yet attempts at price gouging might not receive support from NEURO holders. -* The **paranet operator** defines AI services that the operator will make available as a part of the paranet. To run the AI services and support the paranet, the paranet operator can set a percentage of the emissions as a **paranet operator fee**. -* **NEURO holders** that support an IPO via governance voting are to lock up tokens for the duration of the NEURO emission allocated for the IPO. Though the **share of emissions allocated** for an IPO is an important factor for NEURO holders’ decision, the **duration of the “lock period”** can also play an important role. The paranet operator also defines what portion of the paranet incentives will be shared with NEURO holders supporting the proposal. +- **Knowledge miners** who mine knowledge collections on the DKG by using TRAC utility tokens are central to the success of a paranet. Their role is also critical for distributing NEURO emissions among the three groups, as this distribution only occurs as new knowledge is mined. When launching an IPO, the paranet operator defines the **ratio of NEURO to be earned per TRAC spent to mine** each knowledge collection. An IPO operator may set the ratio autonomously to target a desired profitability before the proposal is submitted to voting, yet attempts at price gouging might not receive support from NEURO holders. +- The **paranet operator** defines AI services that the operator will make available as a part of the paranet. To run the AI services and support the paranet, the paranet operator can set a percentage of the emissions as a **paranet operator fee**. +- **NEURO holders** that support an IPO via governance voting are to lock up tokens for the duration of the NEURO emission allocated for the IPO. Though the **share of emissions allocated** for an IPO is an important factor for NEURO holders’ decision, the **duration of the “lock period”** can also play an important role. The paranet operator also defines what portion of the paranet incentives will be shared with NEURO holders supporting the proposal. ### Marketing plan diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/launching-your-ipo.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/launching-your-ipo.md index 2c5d7b1d..66033715 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/launching-your-ipo.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/launching-your-ipo.md @@ -2,12 +2,12 @@ This page assumes you have completed the previous steps: -* You have deployed your paranet -* You have specified your IPO (defined all the properties, including incentives, knowledge graph properties, etc.) +- You have deployed your paranet +- You have specified your IPO (defined all the properties, including incentives, knowledge graph properties, etc.) Once you have all these steps completed, you are able to initiate the IPO. -### 1. Deploy a paranet incentives contract +### 1. Deploy a paranet incentives contract The paranet incentive contract is the only type of contract that can receive NEURO incentives, as it implements the incentivization logic. Other addresses, such as EOA addresses will not be accepted and are not eligible for incentives. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/paranets-incentives-pool.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/paranets-incentives-pool.md index 36ffdc48..3b59f8e8 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/paranets-incentives-pool.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/initial-paranet-offerings-ipos/paranets-incentives-pool.md @@ -3,14 +3,14 @@ The **incentives pool** serves to encourage key participants in the paranet ecosystem to perform essential tasks that support its operation. Here's a breakdown of who receives rewards and for what: 1. **Knowledge miners**: - * Receive rewards for **publishing valuable data and knowledge** to the paranet. - * Their contributions help grow and maintain the paranet's database and ensure it remains relevant. + - Receive rewards for **publishing valuable data and knowledge** to the paranet. + - Their contributions help grow and maintain the paranet's database and ensure it remains relevant. 2. **Voters**: - * Receive rewards for **supporting or voting on proposals related** to the paranet. - * Their participation ensures that decisions regarding the paranet's direction are made democratically and align with the community's interests. + - Receive rewards for **supporting or voting on proposals related** to the paranet. + - Their participation ensures that decisions regarding the paranet's direction are made democratically and align with the community's interests. 3. **Operators** (paranet creators and maintainers): - * Paranet operators who **create and maintain the paranet** are rewarded for ensuring the paranet runs smoothly. - * They are responsible for overseeing its operations, managing resources, and ensuring its success. + - Paranet operators who **create and maintain the paranet** are rewarded for ensuring the paranet runs smoothly. + - They are responsible for overseeing its operations, managing resources, and ensuring its success. {% hint style="info" %} As a **paranet operator**, funding the pool helps ensure that all key participants—miners, voters, and operators—are incentivized to contribute to the paranet’s success. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/syncing-a-dkg-paranet.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/syncing-a-dkg-paranet.md index 8eefd09f..ea204860 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/syncing-a-dkg-paranet.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-paranets/syncing-a-dkg-paranet.md @@ -4,7 +4,7 @@ To interact with specific DKG paranet's knowledge graphs using your OriginTrail If you have not yet set up your node or need guidance on configuring a DKG Node, please refer to the [Installation guide](../../../getting-started/decentralized-knowle-dge-graph-dkg.md). -To enable your node to sync with a paranet, you will need to add `assetSync` object to your node’s `.origintrail_noderc` file. Below is an example of how to configure this (make sure to replace the UAL in the example below): +To enable your node to sync with a paranet, you will need to add `assetSync` object to your node’s `.origintrail_noderc` file. Below is an example of how to configure this (make sure to replace the UAL in the example below): ```json "assetSync": { @@ -12,7 +12,7 @@ To enable your node to sync with a paranet, you will need to add `assetSync` obj } ``` -Once .origintrail\_noderc is updated, it should look something like this: +Once .origintrail_noderc is updated, it should look something like this:
...
     "auth": {
@@ -42,4 +42,3 @@ Paranet sync: KA count from contract and in DB is the same, nothing new to sync,
 ```
 
 Interacting with the paranet knowledge graph through your node is explained on [this](building-with-dkg-paranets.md) page.
-
diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/README.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/README.md
index 7cea669f..3fbc23d2 100644
--- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/README.md
+++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/README.md
@@ -14,14 +14,10 @@ From an architectural standpoint, the SDK libraries are application interfaces i
 
 

The interplay between your app, DKG and blockchains

- - The OriginTrail SDK currently comes in two forms: -* Javascript SDK - [**dkg.js**](dkg-v8-js-client/) -* Python SDK - [**dkg.py**](dkg-v8-py-client/)**.** - - +- Javascript SDK - [**dkg.js**](dkg-v8-js-client/) +- Python SDK - [**dkg.py**](dkg-v8-py-client/)**.** ### Try out the SDK @@ -35,14 +31,12 @@ Try the SDK with public DKG nodes by following the [Quickstart: Test Drive the D Set up a development environment using one of the following options: -* **Deploy your node on the DKG testnet (recommended):**\ +- **Deploy your node on the DKG testnet (recommended):**\ This option allows you to quickly experiment with the SDK on a testnet of your choice.\ Follow the [Installation guide](../../../getting-started/decentralized-knowle-dge-graph-dkg.md) for setup instructions. -* **Deploy your node on a local DKG network:**\ +- **Deploy your node on a local DKG network:**\ Use this option to set up a fully localized development environment by following the [Development environment setup guide](setting-up-your-development-environment.md). - - SDKs for other programming languages would be welcome contributions to the project. The core development team is also considering including them in the roadmap. {% hint style="info" %} diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/README.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/README.md index 08f2fdba..d15d3efb 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/README.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/README.md @@ -6,12 +6,12 @@ description: Javascript library for the Decentralized Knowledge Graph. If you are looking to build applications leveraging [Knowledge Assets](./#create-a-knowledge-asset) on the OriginTrail Decentralized Knowledge Graph (DKG), the dkg.js SDK library is the best place to start! -The DKG SDK is used together with an **OriginTrail gateway node** to build applications that interface with the OriginTrail DKG (the node is a dependency). Therefore, to use the SDK, you either need to run a gateway node on [your local environment](../setting-up-your-development-environment.md) or a [hosted DKG Node](../../../../getting-started/decentralized-knowle-dge-graph-dkg.md). +The DKG SDK is used together with an **OriginTrail gateway node** to build applications that interface with the OriginTrail DKG (the node is a dependency). Therefore, to use the SDK, you either need to run a gateway node on [your local environment](../setting-up-your-development-environment.md) or a [hosted DKG Node](../../../../getting-started/decentralized-knowle-dge-graph-dkg.md). ## Prerequisites -* node ≥ 20.0.0 -* npm ≥ 8.0.0 +- node ≥ 20.0.0 +- npm ≥ 8.0.0 ## Installation @@ -52,7 +52,7 @@ npm install dkg.js@latest Then, include `dkg.js` in your project files. This will expose the `DKG` object: ```javascript -const DKG = require('dkg.js'); +const DKG = require("dkg.js"); ``` ## :snowboarder: Quickstart @@ -65,45 +65,45 @@ To use the DKG library, you need to connect to a running local or remote OT-node ```javascript const dkg = new DKG({ - environment: ENVIRONMENTS.DEVELOPMENT, // or devnet, testnet, mainnet - endpoint: 'http://localhost', // gateway node URI - port: 8900, - blockchain: { - name: BLOCKCHAIN_IDS.HARDHAT_1, // or any other blockchain id - publicKey: PUBLIC_KEY, // not required in browser, metamask used instead - privateKey: PRIVATE_KEY, // not required in browser, metamask used instead - }, + environment: ENVIRONMENTS.DEVELOPMENT, // or devnet, testnet, mainnet + endpoint: "http://localhost", // gateway node URI + port: 8900, + blockchain: { + name: BLOCKCHAIN_IDS.HARDHAT_1, // or any other blockchain id + publicKey: PUBLIC_KEY, // not required in browser, metamask used instead + privateKey: PRIVATE_KEY, // not required in browser, metamask used instead + }, }); -const nodeInfo = await dkg.node.info(); +const nodeInfo = await dkg.node.info(); // if successfully connected, the will return an object indicating the node version // { 'version': '8.X.X' } ``` -The system supports multiple blockchain networks, which can be configured using the BLOCKCHAIN\_IDS constants. You can select the desired blockchain by specifying the corresponding constant. The available options are: +The system supports multiple blockchain networks, which can be configured using the BLOCKCHAIN_IDS constants. You can select the desired blockchain by specifying the corresponding constant. The available options are: **DKG mainnet options:** -* Base: base:8453 -* Gnosis: gnosis:100 -* Neuroweb: otp:2043 +- Base: base:8453 +- Gnosis: gnosis:100 +- Neuroweb: otp:2043 **DKG testnet options:** -* Base: base:84532 -* Gnosis: gnosis:10200 -* Neuroweb: otp:20430 +- Base: base:84532 +- Gnosis: gnosis:10200 +- Neuroweb: otp:20430 **DKG devnet options:** -* Base: base:84532 -* Gnosis: gnosis:10200 -* Neuroweb: otp:2160 +- Base: base:84532 +- Gnosis: gnosis:10200 +- Neuroweb: otp:2160 **Local options:** -* Hardhat1: hardhat1:31337 -* Hardhat2: hardhat2:31337 +- Hardhat1: hardhat1:31337 +- Hardhat2: hardhat2:31337 The system uses default publicly available RPCs for each chain. However, because these RPCs are shared by many users, they can become overloaded, leading to errors — such as failures when creating a KA. To avoid this, we recommend using your own RPC if possible. You can set a custom RPC by passing `rpc: RPC_URL` in the blockchain options. @@ -115,36 +115,35 @@ If you have access to the particular node that has the data, when you search for ```javascript const content = { - public: { - '@context': 'http://schema.org', - '@id': 'https://en.wikipedia.org/wiki/New_York_City', - '@type': 'City', - name: 'New York', - state: 'New York', - population: '8,336,817', - area: '468.9 sq mi', - }, - private: { - '@context': 'http://schema.org', - '@id': 'https://en.wikipedia.org/wiki/New_York_City', - '@type': 'CityPrivateData', - crimeRate: 'Low', - averageIncome: '$63,998', - infrastructureScore: '8.5', - relatedCities: [ - { '@id': 'urn:us-cities:info:los-angeles', name: 'Los Angeles' }, - { '@id': 'urn:us-cities:info:chicago', name: 'Chicago' }, - ], - }, -} - + public: { + "@context": "http://schema.org", + "@id": "https://en.wikipedia.org/wiki/New_York_City", + "@type": "City", + name: "New York", + state: "New York", + population: "8,336,817", + area: "468.9 sq mi", + }, + private: { + "@context": "http://schema.org", + "@id": "https://en.wikipedia.org/wiki/New_York_City", + "@type": "CityPrivateData", + crimeRate: "Low", + averageIncome: "$63,998", + infrastructureScore: "8.5", + relatedCities: [ + { "@id": "urn:us-cities:info:los-angeles", name: "Los Angeles" }, + { "@id": "urn:us-cities:info:chicago", name: "Chicago" }, + ], + }, +}; ``` When you create the Knowledge Asset, the above JSON-LD object will be converted into an **assertion**. When an assertion with public data is prepared, we can create a Knowledge Asset on the DKG. `epochsNum` specifies how many epochs the asset should be kept for (an epoch is equal to one month). ```javascript const result = await DkgClient.asset.create(content, { - epochsNum: 6 + epochsNum: 6, }); console.log(result); @@ -198,17 +197,17 @@ The complete response of the method will look like: If you want to create multiple different assets, you can increase your allowance. Then, each time you initiate a publish, the step of calling the blockchain to increase your allowance will be skipped, resulting in a faster publishing time. ```javascript -await dkg.asset.increaseAllowance('1569429592284014000'); +await dkg.asset.increaseAllowance("1569429592284014000"); const result = await DkgClient.asset.create(content, { - epochsNum: 6 + epochsNum: 6, }); ``` After you've finished publishing data to the blockchain, you can decrease your allowance to revoke the authorization given to the contract to spend your tokens. If you want to revoke all remaining authorization, it's a good practice to pass the same value that you used for increasing your allowance. ```javascript -await dkg.asset.decreaseAllowance('1569429592284014000'); +await dkg.asset.decreaseAllowance("1569429592284014000"); ``` ## Read Knowledge Asset data from the DKG @@ -354,12 +353,12 @@ Let’s write a simple query to select all subjects and objects in the graph tha ```javascript const result = await dkg.graph.query( - `prefix schema: + `prefix schema: select ?s ?stateName where { ?s schema:state ?stateName }`, - 'SELECT', + "SELECT", ); console.log(JSON.stringify(result, null, 2)); @@ -386,9 +385,9 @@ To learn more about querying the DKG, go [here](../../querying-the-dkg.md). We can divide operations done by SDK into 3 types: -* Node API request -* Smart contract call (non-state-changing interaction) -* Smart contract transaction (state-changing interaction) +- Node API request +- Smart contract call (non-state-changing interaction) +- Smart contract transaction (state-changing interaction) Non-state-changing interactions with smart contracts are free and can be described as contract-getters. They don’t require transactions on the blockchain. This means they do not incur transaction fees. @@ -399,8 +398,9 @@ To perform state-changing operations, you need to use a wallet funded with gas t You can use the default keys from the example below for the Hardhat blockchain: ```javascript -const PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" -const PUBLIC_KEY="0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" +const PRIVATE_KEY = + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"; +const PUBLIC_KEY = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"; ``` {% hint style="warning" %} diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/interact-with-dkg-paranets.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/interact-with-dkg-paranets.md index 9ddcc6c6..cbe5a8ec 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/interact-with-dkg-paranets.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/interact-with-dkg-paranets.md @@ -26,25 +26,26 @@ Once the knowledge collection is created, you can choose which KA from that KC w Here is an example of how to create a new paranet using the `create` function from the paranet module. This function requires the UAL of the previously created Knowledge Asset, along with other details such as the paranet's name and description: ```javascript -const kcUAL = 'did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1' +const kcUAL = + "did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1"; const kaUAL = `${kcUAL}/1`; await dkg.paranet.create(kaUAL, { - paranetName: 'AiParanet', - paranetDescription: 'AI agents paranet for demonstration purposes.', - paranetNodesAccessPolicy: PARANET_NODES_ACCESS_POLICY.OPEN, - paranetMinersAccessPolicy: PARANET_MINERS_ACCESS_POLICY.OPEN, - paranetKcSubmissionPolicy: PARANET_KC_SUBMISSION_POLICY.PERMISSIONED, + paranetName: "AiParanet", + paranetDescription: "AI agents paranet for demonstration purposes.", + paranetNodesAccessPolicy: PARANET_NODES_ACCESS_POLICY.OPEN, + paranetMinersAccessPolicy: PARANET_MINERS_ACCESS_POLICY.OPEN, + paranetKcSubmissionPolicy: PARANET_KC_SUBMISSION_POLICY.PERMISSIONED, }); ``` In this example: -* `kaUAL` is the unique identifier of the Knowledge Asset created on the DKG. -* `paranetName` is the name you want to give to your paranet. It should be descriptive enough to indicate the paranet's purpose or focus. -* `paranetDescription` provides additional context about the paranet, explaining its purpose and the types of knowledge collections or services it will involve. -* `paranetNodesAccessPolicy` defines a paranet's policy towards including nodes. If OPEN, any node can be a part of the paranet. -* `paranetMinersAccessPolicy` defines a paranet's policy towards including knowledge miners. If OPEN, anyone can publish to a paranet. -* `paranetKcSubmissionPolicy` defines a paranet's policy regarding which KCs can be added and who can add new collections of Knowledge Assets. To learn more about curation, [read here](knowledge-submission-and-curation.md). If OPEN, anyone can access a paranet. +- `kaUAL` is the unique identifier of the Knowledge Asset created on the DKG. +- `paranetName` is the name you want to give to your paranet. It should be descriptive enough to indicate the paranet's purpose or focus. +- `paranetDescription` provides additional context about the paranet, explaining its purpose and the types of knowledge collections or services it will involve. +- `paranetNodesAccessPolicy` defines a paranet's policy towards including nodes. If OPEN, any node can be a part of the paranet. +- `paranetMinersAccessPolicy` defines a paranet's policy towards including knowledge miners. If OPEN, anyone can publish to a paranet. +- `paranetKcSubmissionPolicy` defines a paranet's policy regarding which KCs can be added and who can add new collections of Knowledge Assets. To learn more about curation, [read here](knowledge-submission-and-curation.md). If OPEN, anyone can access a paranet. After the paranet is successfully created, the paranet UAL can be used to interact with the paranet. This includes deploying services within the paranet, managing incentives, and claiming rewards associated with the paranet's operations. @@ -57,24 +58,29 @@ Before adding services, you first need to create them using the `createService` Each service can be identified by all paranet users via its registry Knowledge Asset and can include multiple on-chain accounts under its control. This enables services to participate in economic activities within the DKG. ```javascript -const paranetUAL = 'did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1/1'; -const serviceUAL = 'did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/2/1'; +const paranetUAL = + "did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1/1"; +const serviceUAL = + "did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/2/1"; await dkg.paranet.createService(serviceUAL, { - paranetServiceName: 'MyAiService', - paranetServiceDescription: 'Autonomous AI service for AI paranet', - paranetServiceAddresses: ['0xb3155543738b997b7a1a5bc849005bc2afd35578', '0x2375e543738b997b7a125bc849005b62afd35571'], + paranetServiceName: "MyAiService", + paranetServiceDescription: "Autonomous AI service for AI paranet", + paranetServiceAddresses: [ + "0xb3155543738b997b7a1a5bc849005bc2afd35578", + "0x2375e543738b997b7a125bc849005b62afd35571", + ], }); -const serviceUALs = [serviceUAL]; +const serviceUALs = [serviceUAL]; await dkg.paranet.addServices(paranetUAL, serviceUALs); ``` In this example: -* `paranetServiceName` specifies the name of the service. -* `paranetServiceDescription` provides a brief description of what the service does. -* `paranetServiceAddresses` lists blockchain addresses associated with the service. For off-chain services, this field can be left empty. -* `serviceUALs` is an array of UALs that are used to register services you want to add to your Paranet. +- `paranetServiceName` specifies the name of the service. +- `paranetServiceDescription` provides a brief description of what the service does. +- `paranetServiceAddresses` lists blockchain addresses associated with the service. For off-chain services, this field can be left empty. +- `serviceUALs` is an array of UALs that are used to register services you want to add to your Paranet. By integrating and managing services, paranet operators can expand the capabilities of their paranet, providing a robust infrastructure for decentralized applications and AI-driven services. @@ -89,8 +95,10 @@ Once you create a knowledge collection, you can submit it to a paranet using the Here’s an example: ```javascript -const paranetUAL = 'did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1/1'; -const kcUAL = 'did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/55'; +const paranetUAL = + "did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1/1"; +const kcUAL = + "did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/55"; // Submit a Knowledge Collection to a paranet await DkgClient.asset.submitToParanet(kcUAL, paranetUAL); @@ -104,9 +112,9 @@ If you're interested in deploying a **paranet's incentive pool**, you can find m **Roles in a paranet:** -* **Knowledge miners:** Contribute to the paranet by mining knowledge collections. -* **Paranet operators:** Manage the paranet, including overseeing services and facilitating operations. -* **Proposal voters:** Participate in decision-making by voting on the Initial Paranet Offering (IPO). +- **Knowledge miners:** Contribute to the paranet by mining knowledge collections. +- **Paranet operators:** Manage the paranet, including overseeing services and facilitating operations. +- **Proposal voters:** Participate in decision-making by voting on the Initial Paranet Offering (IPO). Participants can verify their roles and claim rewards through the following steps and examples: @@ -196,8 +204,9 @@ To query a specific paranet, you have to specify the paranet UAL using the `para Here’s how you can perform a query on a specific paranet using the `paranetUAL` parameter: ```javascript - const paranetUAL = 'did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1/1'; - const queryWhereMadrid = `PREFIX schema: +const paranetUAL = + "did:dkg:hardhat1:31337/0x791ee543738b997b7a125bc849005b62afd35578/1/1"; +const queryWhereMadrid = `PREFIX schema: SELECT DISTINCT ?graphName WHERE { GRAPH ?graphName { @@ -205,11 +214,9 @@ Here’s how you can perform a query on a specific paranet using the `paranetUAL } }`; -let queryResult = await dkg.graph.query( - queryWhereMadrid, - 'SELECT', - { paranetUAL: paranetUAL }, -); +let queryResult = await dkg.graph.query(queryWhereMadrid, "SELECT", { + paranetUAL: paranetUAL, +}); console.log(queryResult.data); ``` @@ -247,19 +254,17 @@ const federatedQuery = ` } `; - queryResult = await dkg.graph.query( - federatedQuery, - 'SELECT', - { paranetUAL: paranetUAL1 }, - ); - - console.log(queryResult.data); +queryResult = await dkg.graph.query(federatedQuery, "SELECT", { + paranetUAL: paranetUAL1, +}); + +console.log(queryResult.data); ``` **Explanation:** -* **`SERVICE` keyword:** The `SERVICE` keyword is used to include data from Paranet 3 (`paranetUAL3`) in the query, while the primary paranet is set to Paranet 1 (`paranetUAL1`). -* **Query structure:** The query retrieves distinct subjects (`?s`), cities, users, and companies from Paranet 1, and performs a sub-query within Paranet 3 to get data on where the city is `Belgrade`. -* **Filter clause:** The `filter` clause is used to ensure that the city data from Paranet 3 contains the string "Belgrade". +- **`SERVICE` keyword:** The `SERVICE` keyword is used to include data from Paranet 3 (`paranetUAL3`) in the query, while the primary paranet is set to Paranet 1 (`paranetUAL1`). +- **Query structure:** The query retrieves distinct subjects (`?s`), cities, users, and companies from Paranet 1, and performs a sub-query within Paranet 3 to get data on where the city is `Belgrade`. +- **Filter clause:** The `filter` clause is used to ensure that the city data from Paranet 3 contains the string "Belgrade". Federated SPARQL queries provide a powerful way to aggregate and analyze data across multiple paranets. This enables more complex data retrieval and cross-paranet data integration, making it easier to gather comprehensive insights from diverse data sources. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/knowledge-submission-and-curation.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/knowledge-submission-and-curation.md index b66556a0..9dd6e5f0 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/knowledge-submission-and-curation.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/knowledge-submission-and-curation.md @@ -16,9 +16,9 @@ Here's the demo code for a [**staging paranet**](https://github.com/OriginTrail/ In decentralized networks, anyone can technically contribute data, but not all data should be trusted. Without a system for validation, there is a risk of: -* **Spam or false data** being introduced into the network. -* **Duplicate or low-quality data** reducing the efficiency and reliability of the system. -* **Unauthorized modifications** that could disrupt the credibility of shared knowledge. +- **Spam or false data** being introduced into the network. +- **Duplicate or low-quality data** reducing the efficiency and reliability of the system. +- **Unauthorized modifications** that could disrupt the credibility of shared knowledge. To prevent these issues, **paranet uses a curator-based system**, in which **only authorized users (curators) can decide which Knowledge Assets get added to the network**. @@ -34,9 +34,9 @@ This structured approach ensures that only **reliable and relevant data** is add \ When creating a **paranet**, we can define permissions for: -* [ ] **Nodes** – Who can join the network. (Not added yet) -* [ ] **Miners** – Who can validate data. (Not added yet) -* [x] **KC submissions** – Who can add a new **collection of Knowledge Assets**. +- [ ] **Nodes** – Who can join the network. (Not added yet) +- [ ] **Miners** – Who can validate data. (Not added yet) +- [x] **KC submissions** – Who can add a new **collection of Knowledge Assets**. ### Adding a curator @@ -59,7 +59,7 @@ DkgClient.paranet.removeCurator(paranetUAL, PUBLIC_KEY); ```js isCurator = await DkgClient.paranet.isCurator(paranetUAL, PUBLIC_KEY); -console.log('Is user a curator?', isCurator); +console.log("Is user a curator?", isCurator); ``` ### **Staging — Submitting KC to a paranet** @@ -91,17 +91,20 @@ console.log('Knowledge Collection Created:', createKcResult); ```js stageToParanetResult = await DkgClient.paranet.stageKnowledgeCollection( - createKcResult.UAL, - paranetUAL, + createKcResult.UAL, + paranetUAL, ); -console.log('Knowledge Collection Staged to Paranet:', stageToParanetResult); +console.log("Knowledge Collection Staged to Paranet:", stageToParanetResult); ``` **Check if KC is staged for approval:** ```js -isStaged = await DkgClient.paranet.isKnowledgeCollectionStaged(createKcResult.UAL, paranetUAL); -console.log('Is KC staged to Paranet?', isStaged); +isStaged = await DkgClient.paranet.isKnowledgeCollectionStaged( + createKcResult.UAL, + paranetUAL, +); +console.log("Is KC staged to Paranet?", isStaged); ``` ### Reviewing and approving KC @@ -111,38 +114,52 @@ A curator can **accept or reject** a knowledge collection: **Reject a KC:** ```js -DkgClient.paranet.reviewKnowledgeCollection(createKcResult.UAL, paranetUAL, false); -console.log('Knowledge Collection Rejected'); +DkgClient.paranet.reviewKnowledgeCollection( + createKcResult.UAL, + paranetUAL, + false, +); +console.log("Knowledge Collection Rejected"); ``` **Accept a KC:** ```js -DkgClient.paranet.reviewKnowledgeCollection(createKcResult.UAL, paranetUAL, true); -console.log('Knowledge Collection Approved'); +DkgClient.paranet.reviewKnowledgeCollection( + createKcResult.UAL, + paranetUAL, + true, +); +console.log("Knowledge Collection Approved"); ``` **Check approval status:** ```js -approvalStatus = await DkgClient.paranet.getKnowledgeCollectionApprovalStatus(createKcResult.UAL, paranetUAL); -console.log('KC Approval Status:', approvalStatus); +approvalStatus = await DkgClient.paranet.getKnowledgeCollectionApprovalStatus( + createKcResult.UAL, + paranetUAL, +); +console.log("KC Approval Status:", approvalStatus); ``` **Check if KC is registered:** ```js -isRegistered = await DkgClient.paranet.isKnowledgeCollectionRegistered(createKcResult.UAL, paranetUAL); -console.log('Is KC registered to Paranet?', isRegistered); +isRegistered = await DkgClient.paranet.isKnowledgeCollectionRegistered( + createKcResult.UAL, + paranetUAL, +); +console.log("Is KC registered to Paranet?", isRegistered); ``` ### **Conclusion** -* **Curators manage which KC entries are accepted to a paranet.** Users who wish to submit data to a paranet must go through the **staging process**. +- **Curators manage which KC entries are accepted to a paranet.** Users who wish to submit data to a paranet must go through the **staging process**. {% hint style="info" %} **The curator** doesn't have to be human; it can also be an AI agent. {% endhint %} -* **KC must first be submitted for approval**, and then the curator can **accept or reject it**. -* **All operations are tied to the user's public key**, enabling **secure and decentralized data management**. 🚀 +- **KC must first be submitted for approval**, and then the curator can **accept or reject it**. +- **All operations are tied to the user's public key**, enabling **secure and decentralized data management**. 🚀 diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/paranets-incentives-pool-implementation.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/paranets-incentives-pool-implementation.md index 2cc359e6..d31942a8 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/paranets-incentives-pool-implementation.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/paranets-incentives-pool-implementation.md @@ -7,32 +7,31 @@ The **incentives pool** is designed to motivate key participants in the paranet The `incentivesPoolOptions` object defines the parameters for the reward system within the paranet ecosystem. It includes the following key settings: ```javascript - const incentivesPoolOptions = { - tracToTokenEmissionMultiplier: 5, - operatorRewardPercentage: 10.0, - incentivizationProposalVotersRewardPercentage: 12.0, - incentivesPoolName: 'YourIncentivesPoolName', - rewardTokenAddress: '0x0000000000000000000000000000000000000000', - }; + tracToTokenEmissionMultiplier: 5, + operatorRewardPercentage: 10.0, + incentivizationProposalVotersRewardPercentage: 12.0, + incentivesPoolName: "YourIncentivesPoolName", + rewardTokenAddress: "0x0000000000000000000000000000000000000000", +}; ``` -* **tracToTokenEmissionMultiplier**: A multiplier that affects the token emission rate, determining how much reward is distributed based on user actions. -* **operatorRewardPercentage**: Operators who are responsible for managing and maintaining the paranet. -* **incentivizationProposalVotersRewardPercentage**: Voters who participate in proposals. -* **incentivesPoolName**: Sets the name of the pool. -* **rewardTokenAddress**: This specifies the address of the reward token. If zero address is set, then the chain's native token is used for incentivization. The reward token address can also be any ERC-20 token of the respective chain. +- **tracToTokenEmissionMultiplier**: A multiplier that affects the token emission rate, determining how much reward is distributed based on user actions. +- **operatorRewardPercentage**: Operators who are responsible for managing and maintaining the paranet. +- **incentivizationProposalVotersRewardPercentage**: Voters who participate in proposals. +- **incentivesPoolName**: Sets the name of the pool. +- **rewardTokenAddress**: This specifies the address of the reward token. If zero address is set, then the chain's native token is used for incentivization. The reward token address can also be any ERC-20 token of the respective chain. ### Deployment of incentives pool This code deploys the incentives contract for the paranet using the specified options and `paranetUAL`, then logs the deployment result to verify its success. ```javascript - const paranetDeployed = await DkgClient.paranet.deployIncentivesContract( - paranetUAL, - incentivesPoolOptions, - ); - console.log('======================== PARANET INCENTIVES POOL DEPLOYED'); - console.log(paranetDeployed); - divider(); +const paranetDeployed = await DkgClient.paranet.deployIncentivesContract( + paranetUAL, + incentivesPoolOptions, +); +console.log("======================== PARANET INCENTIVES POOL DEPLOYED"); +console.log(paranetDeployed); +divider(); ``` diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/permissioned-paranets.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/permissioned-paranets.md index b14e1832..64eb30b6 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/permissioned-paranets.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-js-client/permissioned-paranets.md @@ -8,9 +8,9 @@ There are two permission policies: -• PARANET\_NODES\_ACCESS\_POLICY – governs which nodes can sync Knowledge Collections. +• PARANET_NODES_ACCESS_POLICY – governs which nodes can sync Knowledge Collections. -• PARANET\_MINERS\_ACCESS\_POLICY – governs which knowledge miners (wallet addresses) can submit Knowledge Collection +• PARANET_MINERS_ACCESS_POLICY – governs which knowledge miners (wallet addresses) can submit Knowledge Collection {% hint style="info" %} Here is demo code for a [permissioned paranet](https://github.com/OriginTrail/dkg.js/blob/v8/develop/examples/curated-paranet-demo.js). @@ -20,15 +20,15 @@ Here is demo code for a [permissioned paranet](https://github.com/OriginTrail/dk This policy controls which nodes are allowed to sync the paranet’s Knowledge Collections and whether they can sync the private part of the collection. -* OPEN — Any node can sync the Paranet, and only the public part of Knowledge Collections is included -* PERMISSIONED — Only approved nodes sync the paranet, and both the public and private parts of Knowledge Collection are included. Private knowledge sharing is enable! +- OPEN — Any node can sync the Paranet, and only the public part of Knowledge Collections is included +- PERMISSIONED — Only approved nodes sync the paranet, and both the public and private parts of Knowledge Collection are included. Private knowledge sharing is enable! #### Interacting with a node-access permissioned paranet The paranet operator can **add nodes** to a permissioned paranet ```javascript -await DkgClient.paranet.addPermissionedNodes(paranetUAL, identityIds) +await DkgClient.paranet.addPermissionedNodes(paranetUAL, identityIds); ``` The paranet operator can **remove nodes** from a permissioned paranet @@ -47,8 +47,8 @@ await DkgClient.paranet.getPermissionedNodes(paranetUAL); This policy defines who can submit Knowledge Collections to a paranet. -* OPEN — Any knowledge miner (address) can submit a Knowledge Collection -* PERMISSIONED — Only approved knowledge miners (addresses) can submit a Knowledge Collection. Allows fine-grained control over who contributes data. +- OPEN — Any knowledge miner (address) can submit a Knowledge Collection +- PERMISSIONED — Only approved knowledge miners (addresses) can submit a Knowledge Collection. Allows fine-grained control over who contributes data. {% hint style="info" %} **Knowledge collection (KC)** is a **collection of Knowledge Assets.** It refers to structured data that can be stored, shared, and validated within a distributed network. @@ -59,26 +59,32 @@ This policy defines who can submit Knowledge Collections to a paranet. The paranet operator can **add miners** to a permissioned paranet ```javascript -await DkgClient.paranet.addParanetPermissionedMiners(paranetUAL, minerAddresses); +await DkgClient.paranet.addParanetPermissionedMiners( + paranetUAL, + minerAddresses, +); ``` The paranet operator can **remove miners** from a permissioned paranet ```javascript -await DkgClient.paranet.removeParanetPermissionedMiners(paranetUAL, minerAddresses); +await DkgClient.paranet.removeParanetPermissionedMiners( + paranetUAL, + minerAddresses, +); ``` ### Combining policies These two policies can be combined in any way: -| Node Access Policy | Miner Acces Policy | Result | -| ------------------ | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | -| OPEN | OPEN | Any node can sync the public part of the KC from the paranet and any miner can add knowledge to the paranet. | -| OPEN | PERMISSIONED | Any node can sync the public part of the KC from the paranet and only selected miners can add knowledge to the paranet | -| PERMISSIONED | OPEN | Only selected nodes can sync both private and public parts of the KC from the paranet and any miner can add knowledge to the pParanet | -| PERMISSIONED | PERMISSIONED | Only selected nodes can sync both private and public parts of the KC from the paranet and only selected miners can add knowledge to the paranet | +| Node Access Policy | Miner Acces Policy | Result | +| ------------------ | ------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------- | +| OPEN | OPEN | Any node can sync the public part of the KC from the paranet and any miner can add knowledge to the paranet. | +| OPEN | PERMISSIONED | Any node can sync the public part of the KC from the paranet and only selected miners can add knowledge to the paranet | +| PERMISSIONED | OPEN | Only selected nodes can sync both private and public parts of the KC from the paranet and any miner can add knowledge to the pParanet | +| PERMISSIONED | PERMISSIONED | Only selected nodes can sync both private and public parts of the KC from the paranet and only selected miners can add knowledge to the paranet | ### Access policies and knowledge curations -These permissions will also interact with staging paranets. If a paranet has PARANET\_KC\_SUBMISSION\_POLICY STAGING and PERMISSIONED PARANET\_MINERS\_ACCESS\_POLICY, only approved knowledge miners can stage Knowledge Collections. +These permissions will also interact with staging paranets. If a paranet has PARANET_KC_SUBMISSION_POLICY STAGING and PERMISSIONED PARANET_MINERS_ACCESS_POLICY, only approved knowledge miners can stage Knowledge Collections. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/README.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/README.md index dc819f4f..24950660 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/README.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/README.md @@ -10,8 +10,8 @@ The DKG SDK is used together with an **OriginTrail gateway node** to build appli ## Prerequisites -* python ≥ 3.11 -* poetry ≥ 1.8.5 +- python ≥ 3.11 +- poetry ≥ 1.8.5 ## Installation @@ -89,13 +89,13 @@ async def main(): blockchain_provider, config={"max_number_of_retries": 300, "frequency": 2}, ) - + if __name__ == "__main__": asyncio.run(main()) ``` {% hint style="warning" %} -Make sure to create an .env file and add the PRIVATE\_KEY variable to it so that the blockchain provider can pick it up. +Make sure to create an .env file and add the PRIVATE_KEY variable to it so that the blockchain provider can pick it up. {% endhint %} ### Blockchain networks @@ -104,26 +104,26 @@ The system supports multiple blockchain networks, which can be configured using **DKG mainnet options:** -* Base: base:8453 -* Gnosis: gnosis:100 -* Neuroweb: otp:2043 +- Base: base:8453 +- Gnosis: gnosis:100 +- Neuroweb: otp:2043 **DKG testnet options:** -* Base: base:84532 -* Gnosis: gnosis:10200 -* Neuroweb: otp:20430 +- Base: base:84532 +- Gnosis: gnosis:10200 +- Neuroweb: otp:20430 **DKG devnet options:** -* Base: base:84532 -* Gnosis: gnosis:10200 -* Neuroweb: otp:2160 +- Base: base:84532 +- Gnosis: gnosis:10200 +- Neuroweb: otp:2160 **Local options:** -* Hardhat1: hardhat1:31337 -* Hardhat2: hardhat2:31337 +- Hardhat1: hardhat1:31337 +- Hardhat2: hardhat2:31337 ## Create a Knowledge Collection @@ -354,9 +354,9 @@ To learn more about querying the DKG, go [here](../../querying-the-dkg.md). We can divide operations done by SDK into 3 types: -* Node API request -* Smart contract call (non-state-changing interaction) -* Smart contract transaction (state-changing interaction) +- Node API request +- Smart contract call (non-state-changing interaction) +- Smart contract transaction (state-changing interaction) Non-state-changing interactions with smart contracts are free and can be described as contract-getters. They don’t require transactions on the blockchain. This means they do not incur transaction fees. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/interact-with-dkg-paranets.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/interact-with-dkg-paranets.md index 03bd7f69..fd86e2c3 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/interact-with-dkg-paranets.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/dkg-v8-py-client/interact-with-dkg-paranets.md @@ -43,11 +43,11 @@ Asynchronous version setup guide can be found here: [async guide](./) In this example: -* `ka_ual` is the unique identifier of the Knowledge Asset created on the DKG. -* `name` is the name you want to give to your paranet. It should be descriptive enough to indicate the paranet's purpose or focus. -* `description` provides additional context about the paranet, explaining its purpose and the types of Knowledge Assets or services it will involve. -* `paranet_nodes_access_policy` defines a paranet's policy towards including nodes. If OPEN, any node can be a part of the paranet. If CURATED, only the paranet owner can approve nodes to be a part of the paranet. -* `paranet_miners_access_policy` defines a paranet's policy towards including knowledge miners. If OPEN, anyone can publish to a paranet. If CURATED, only the paranet owner can approve knowledge miners who can publish to the paranet. +- `ka_ual` is the unique identifier of the Knowledge Asset created on the DKG. +- `name` is the name you want to give to your paranet. It should be descriptive enough to indicate the paranet's purpose or focus. +- `description` provides additional context about the paranet, explaining its purpose and the types of Knowledge Assets or services it will involve. +- `paranet_nodes_access_policy` defines a paranet's policy towards including nodes. If OPEN, any node can be a part of the paranet. If CURATED, only the paranet owner can approve nodes to be a part of the paranet. +- `paranet_miners_access_policy` defines a paranet's policy towards including knowledge miners. If OPEN, anyone can publish to a paranet. If CURATED, only the paranet owner can approve knowledge miners who can publish to the paranet. After the paranet is successfully created, the paranet UAL can be used to interact with the specific paranet. This includes deploying services within the paranet, managing incentives, and claiming rewards associated with the paranet's operations. @@ -74,11 +74,11 @@ await dkg.paranet.add_services(ual=paranet_ual, services_uals=[paranet_service_u In this example: -* `ual` specifies the UAL of the Paranet Service Knowledge Asset -* `paranet_service_name` specifies the name of the service. -* `paranet_service_description` provides a brief description of what the service does. -* `paranet_service_addresses` lists blockchain addresses associated with the service. For off-chain services, this field can be left empty. -* `services_uals` is an array of Universal Asset Locators for the services you want to add to your paranet. +- `ual` specifies the UAL of the Paranet Service Knowledge Asset +- `paranet_service_name` specifies the name of the service. +- `paranet_service_description` provides a brief description of what the service does. +- `paranet_service_addresses` lists blockchain addresses associated with the service. For off-chain services, this field can be left empty. +- `services_uals` is an array of Universal Asset Locators for the services you want to add to your paranet. By integrating and managing services, paranet operators can expand the capabilities of their paranet, providing a robust infrastructure for decentralized applications and AI-driven services. @@ -108,9 +108,9 @@ Participants in a paranet can earn rewards for their various roles and contribut **Roles in a paranet:** -* **Knowledge miners:** Contribute to the paranet by mining Knowledge Collections/Assets. -* **Paranet operators:** Manage the paranet, including overseeing services and facilitating operations. -* **Proposal voters:** Participate in decision-making by voting on the Initial Paranet Offering (IPO). +- **Knowledge miners:** Contribute to the paranet by mining Knowledge Collections/Assets. +- **Paranet operators:** Manage the paranet, including overseeing services and facilitating operations. +- **Proposal voters:** Participate in decision-making by voting on the Initial Paranet Offering (IPO). Participants can verify their roles and claim rewards through the following steps and examples: @@ -235,8 +235,8 @@ print(query_result) **Explanation:** -* **`SERVICE` keyword:** The `SERVICE` keyword is used to include data from Paranet 3 (`paranet_ual3`) in the query, while the primary paranet is set to Paranet 1 (`paranet_ual1`). -* **Query structure:** The query retrieves distinct subjects (`?s`), cities, users, and companies from Paranet 1, and performs a sub-query within Paranet 3 to get data where the city is `Belgrade`. -* **Filter clause:** The `filter` clause is used to ensure that the city data from Paranet 3 contains the string "Belgrade". +- **`SERVICE` keyword:** The `SERVICE` keyword is used to include data from Paranet 3 (`paranet_ual3`) in the query, while the primary paranet is set to Paranet 1 (`paranet_ual1`). +- **Query structure:** The query retrieves distinct subjects (`?s`), cities, users, and companies from Paranet 1, and performs a sub-query within Paranet 3 to get data where the city is `Belgrade`. +- **Filter clause:** The `filter` clause is used to ensure that the city data from Paranet 3 contains the string "Belgrade". Federated SPARQL queries provide a powerful way to aggregate and analyze data across multiple paranets, this enables more complex data retrieval and cross-paranet data integration, making it easier to gather comprehensive insights from diverse data sources. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/setting-up-your-development-environment.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/setting-up-your-development-environment.md index 51523a5b..2ef4ee85 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/setting-up-your-development-environment.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/dkg-sdk/setting-up-your-development-environment.md @@ -14,11 +14,11 @@ These instructions are made for macOS and Linux. ### Prerequisites -* An installed and running **Blazegraph** - * To download and run Blazegraph, please visit their [official website](https://blazegraph.com/). -* An installed and running **MySQL** - * You need to create an empty table named **operationaldb** inside MySQL. -* You should have **npm** and **Node.js (v16)** installed. +- An installed and running **Blazegraph** + - To download and run Blazegraph, please visit their [official website](https://blazegraph.com/). +- An installed and running **MySQL** + - You need to create an empty table named **operationaldb** inside MySQL. +- You should have **npm** and **Node.js (v16)** installed. {% hint style="success" %} Need any assistance with node setup? Join the [Discord ](https://discord.com/invite/xCaY7hvNwD)chat and find help within the OriginTrail tech community! @@ -50,7 +50,7 @@ Then, install the required dependencies by running: npm install ``` -Next, create a file called `.env` and add the following lines: +Next, create a file called `.env` and add the following lines: ```sh NODE_ENV=development @@ -79,9 +79,8 @@ To start the local DKG network on **Linux**, run the following command: ./tools/local-network-setup/setup-linux-environment.sh --nodes=5 ``` - - {% hint style="info" %} + ### Contributing These setup instructions are a work in progress and are subject to change. The core development team expects to introduce improvements in setting up the DKG node engine in the local environment in the future. diff --git a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/querying-the-dkg.md b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/querying-the-dkg.md index 6c7b07e0..ae80b769 100644 --- a/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/querying-the-dkg.md +++ b/docs/build-a-dkg-node-ai-agent/advanced-features-and-toolkits/querying-the-dkg.md @@ -8,18 +8,18 @@ Paranets are isolated environments within the DKG where participants can publish Relationships: -* A paranet contains multiple Knowledge Collections (KCs). -* Each KC contains multiple Knowledge Assets (KAs). -* Each KA is stored in its own named graph. +- A paranet contains multiple Knowledge Collections (KCs). +- Each KC contains multiple Knowledge Assets (KAs). +- Each KA is stored in its own named graph. ## Understanding DKG connections Before diving into queries, here’s a quick overview of the most important RDF connections you'll encounter in the DKG: -* \ dkg:hasNamedGraph \ - This tells us which Knowledge Asset graphs are currently considered valid and active. You’ll use this to filter for the current version of a KA. -* \ \ dkg:hasNamedGraph \ - This connection links a Knowledge Collection (KC) to one or more Knowledge Assets (KAs). It’s used when looking up KAs via their KC metadata (e.g. publisher, timestamp). -* \ \ dkg:hasKnowledgeAsset \ - This links the KC to the KA’s Universal Asset Locator (UAL). While it doesn’t point to the named graph directly, it’s important for referencing and versioning KAs. -* <[paranetUAL](#user-content-fn-1)[^1]> dkg:hasNamedGraph \ - This is used when querying inside a paranet. The paranet graph stores references to all associated KAs within that scope. You can use it to restrict queries to a specific environment. +- \ dkg:hasNamedGraph \ - This tells us which Knowledge Asset graphs are currently considered valid and active. You’ll use this to filter for the current version of a KA. +- \ \ dkg:hasNamedGraph \ - This connection links a Knowledge Collection (KC) to one or more Knowledge Assets (KAs). It’s used when looking up KAs via their KC metadata (e.g. publisher, timestamp). +- \ \ dkg:hasKnowledgeAsset \ - This links the KC to the KA’s Universal Asset Locator (UAL). While it doesn’t point to the named graph directly, it’s important for referencing and versioning KAs. +- <[paranetUAL](#user-content-fn-1)[^1]> dkg:hasNamedGraph \ - This is used when querying inside a paranet. The paranet graph stores references to all associated KAs within that scope. You can use it to restrict queries to a specific environment. {% hint style="info" %} KaGraph - did:dkg:hardhat1:31337/0xd5724171c2b7f0aa717a324626050bd05767e2c6/4/1/public @@ -108,9 +108,9 @@ Want to dive deeper into SPARQL? Check out this awesome guide:[ SPARQL 1.1 Query Happy querying! You've got this. 🚀 -*** +--- **Next step: DKG SDK**\ Once you know how to query the graph, it’s time to go deeper and start **building with code**. The next section introduces the official **DKG SDKs (JavaScript and Python)**, which make it simple to publish, retrieve, and verify Knowledge Assets programmatically. -[^1]: +[^1]: diff --git a/docs/build-a-dkg-node-ai-agent/architecture.md b/docs/build-a-dkg-node-ai-agent/architecture.md index 29d606b4..9c54769f 100644 --- a/docs/build-a-dkg-node-ai-agent/architecture.md +++ b/docs/build-a-dkg-node-ai-agent/architecture.md @@ -2,8 +2,8 @@ The DKG Node is built as a modular project with two core runtimes: -* The **DKG Engine**, which powers network communication and implements the core protocol -* The **DKG Node Runtime,** which hosts an AI Agent with MCP capabilities +- The **DKG Engine**, which powers network communication and implements the core protocol +- The **DKG Node Runtime,** which hosts an AI Agent with MCP capabilities Adding functionality is done through **Plugins,** which is where you'll likely spend the majority of your time coding. Conceptually, the architecture is illustrated below. @@ -19,9 +19,9 @@ Plugins are like mini-apps for your DKG Node AI Agent — small add-ons that unl Some useful built-in plugins include: -* **DKG Essential Plugin** — includes the basic tools for publishing and retrieving knowledge. -* **OAuth 2.1 authentication** — controls who can access your node. -* **Swagger** — automatically documents available APIs. +- **DKG Essential Plugin** — includes the basic tools for publishing and retrieving knowledge. +- **OAuth 2.1 authentication** — controls who can access your node. +- **Swagger** — automatically documents available APIs. #### DKG Node Engine diff --git a/docs/build-a-dkg-node-ai-agent/contributing-a-plugin.md b/docs/build-a-dkg-node-ai-agent/contributing-a-plugin.md index 25ed2795..650b5da8 100644 --- a/docs/build-a-dkg-node-ai-agent/contributing-a-plugin.md +++ b/docs/build-a-dkg-node-ai-agent/contributing-a-plugin.md @@ -36,8 +36,8 @@ git checkout -b my-contribution #### 4. Make your changes -* Implement your plugin, fix, or feature. -* Run tests if applicable. +- Implement your plugin, fix, or feature. +- Run tests if applicable. #### 5. Push changes to your fork @@ -50,15 +50,15 @@ git push origin my-contribution 1. Go to your fork on GitHub. 2. Click **Compare & pull request**. 3. On the PR page, make sure the branches are correct: - * **base repository**: `OriginTrail/dkg-node` - * **base**: `main` (or other target branch) - * **compare**: `my-contribution` + - **base repository**: `OriginTrail/dkg-node` + - **base**: `main` (or other target branch) + - **compare**: `my-contribution` 4. Fill in a clear PR description. A good template: - * **What**: brief summary of the change - * **Why**: the problem it solves / motivation - * **How**: key implementation details - * **Tests**: how you verified it (commands, screenshots) - * **Breaking changes/migration**: if any + - **What**: brief summary of the change + - **Why**: the problem it solves / motivation + - **How**: key implementation details + - **Tests**: how you verified it (commands, screenshots) + - **Breaking changes/migration**: if any 5. Click **Create pull request**. The OriginTrail core developer team will review your PR. If everything looks good, it will be merged and published. 🎉 @@ -75,27 +75,27 @@ From the repo root, run: turbo gen plugin ``` -* Name it starting with `plugin-` (e.g. `plugin-custom`). -* A new package will be created at: +- Name it starting with `plugin-` (e.g. `plugin-custom`). +- A new package will be created at: - ```sh - packages/plugin-/src/index.ts - ``` + ```sh + packages/plugin-/src/index.ts + ``` #### 2. Develop your plugin -* Add your logic inside `index.ts`. -* Your package name will be: +- Add your logic inside `index.ts`. +- Your package name will be: - ```sh - @dkg/plugin- - ``` + ```sh + @dkg/plugin- + ``` #### 3. Submit via PR -* Commit your work. -* Push it to your fork. -* Open a pull request as described above. +- Commit your work. +- Push it to your fork. +- Open a pull request as described above. Once reviewed and merged, your plugin will be published to **npm** under the `@dkg/` namespace for the community to use. @@ -115,26 +115,26 @@ turbo ls You’ll see entries like: -* `@dkg/agent` → Example of a DKG agent (Expo UI + MCP Server) -* `@dkg/plugins` → Utility package for creating DKG plugins -* `@dkg/eslint-config` → Shared ESLint configuration -* `@dkg/typescript-config` → Shared TypeScript configs -* `@dkg/plugin-oauth` → OAuth 2.1 module for the DKG Node +- `@dkg/agent` → Example of a DKG agent (Expo UI + MCP Server) +- `@dkg/plugins` → Utility package for creating DKG plugins +- `@dkg/eslint-config` → Shared ESLint configuration +- `@dkg/typescript-config` → Shared TypeScript configs +- `@dkg/plugin-oauth` → OAuth 2.1 module for the DKG Node #### Add new packages -* Use `turbo gen` to generate new packages. -* New packages will be published under the `@dkg/` namespace once reviewed and merged. +- Use `turbo gen` to generate new packages. +- New packages will be published under the `@dkg/` namespace once reviewed and merged. ### Repo utilities The DKG Node monorepo comes with powerful tools preconfigured: -* [**Turborepo**](https://turborepo.com/) → build system with caching -* [**TypeScript**](https://www.typescriptlang.org/) → static type checking -* [**ESLint**](https://eslint.org/) **+** [**Prettier**](https://prettier.io) → code linting & formatting +- [**Turborepo**](https://turborepo.com/) → build system with caching +- [**TypeScript**](https://www.typescriptlang.org/) → static type checking +- [**ESLint**](https://eslint.org/) **+** [**Prettier**](https://prettier.io) → code linting & formatting -#### Remote caching with [Vercel](https://vercel.com/signup?/signup?utm_source=remote-cache-sdk\&utm_campaign=free_remote_cache) +#### Remote caching with [Vercel](https://vercel.com/signup?/signup?utm_source=remote-cache-sdk&utm_campaign=free_remote_cache) By default, builds are cached locally.\ Enable [**remote caching**](https://turborepo.com/docs/core-concepts/remote-caching) to share build caches across your team or CI/CD: @@ -146,7 +146,7 @@ npx turbo link # link this repo to remote cache Learn more in Turborepo docs. -*** +--- ### Further resources @@ -154,15 +154,14 @@ Learn more in Turborepo docs. 📖 **Expo framework:** -* [Expo docs](https://docs.expo.dev/) -* [Video tutorials](https://www.youtube.com/@ExpoDevelopers/videos) +- [Expo docs](https://docs.expo.dev/) +- [Video tutorials](https://www.youtube.com/@ExpoDevelopers/videos) ⚡**Turborepo:** -* [Tasks](https://turborepo.com/docs/crafting-your-repository/running-tasks) -* [Caching](https://turborepo.com/docs/crafting-your-repository/caching) -* [Remote Caching](https://turborepo.com/docs/core-concepts/remote-caching) -* [Filtering](https://turborepo.com/docs/crafting-your-repository/running-tasks#using-filters) -* [Configuration Options](https://turborepo.com/docs/reference/configuration) -* [CLI Usage](https://turborepo.com/docs/reference/command-line-reference) - +- [Tasks](https://turborepo.com/docs/crafting-your-repository/running-tasks) +- [Caching](https://turborepo.com/docs/crafting-your-repository/caching) +- [Remote Caching](https://turborepo.com/docs/core-concepts/remote-caching) +- [Filtering](https://turborepo.com/docs/crafting-your-repository/running-tasks#using-filters) +- [Configuration Options](https://turborepo.com/docs/reference/configuration) +- [CLI Usage](https://turborepo.com/docs/reference/command-line-reference) diff --git a/docs/build-a-dkg-node-ai-agent/essentials-plugin.md b/docs/build-a-dkg-node-ai-agent/essentials-plugin.md index 6c508179..4460b6bd 100644 --- a/docs/build-a-dkg-node-ai-agent/essentials-plugin.md +++ b/docs/build-a-dkg-node-ai-agent/essentials-plugin.md @@ -14,17 +14,17 @@ The **DKG Node Essentials Plugin** ships preinstalled with every DKG Node. It pr ### What’s included -* **DKG Knowledge Asset create tool** - basic too to publish Knowledge assets from a JSON-LD object with `public` or `private` visibility -* **DKG Knowledge Asset get** tool - retrieve a Knowledge asset by it's **UAL**. +- **DKG Knowledge Asset create tool** - basic too to publish Knowledge assets from a JSON-LD object with `public` or `private` visibility +- **DKG Knowledge Asset get** tool - retrieve a Knowledge asset by it's **UAL**. Publishing Knowledge assets with the "public" visibility, will replicate their content to the entirety of the DKG - making it **publicly visible**. When creating private knowledge assets, their content never leaves your node - only knowledge asset registration material (such as the cryptographic hash and UALs) will be published publicly. #### 🧱 Resources (MCP) -* **Knowledge Asset (KA) resource** — resolve a **KA UAL.** -* **Knowledge Collection (KC) resource** — resolve a **KC UAL**. +- **Knowledge Asset (KA) resource** — resolve a **KA UAL.** +- **Knowledge Collection (KC) resource** — resolve a **KC UAL**. -*** +--- ### Tool reference @@ -37,17 +37,17 @@ Publish a single **Knowledge Asset** **(KA)** or a single **Knowledge Collection **Inputs** -* `content` _(string, required)_ — a **JSON-LD** string (e.g., Schema.org-based) representing a KA or KC. -* `privacy` _(string, optional)_ — `"public"` or `"private"`, defaults to `"private"` if no input is provided. +- `content` _(string, required)_ — a **JSON-LD** string (e.g., Schema.org-based) representing a KA or KC. +- `privacy` _(string, optional)_ — `"public"` or `"private"`, defaults to `"private"` if no input is provided. **Returns** All tools return an **MCP-formatted** payload: -* `content` _(array)_ — human-readable messages. This tool returns: - * a success line, - * the **UAL**, and - * a **DKG Explorer** link derived from the UAL. +- `content` _(array)_ — human-readable messages. This tool returns: + - a success line, + - the **UAL**, and + - a **DKG Explorer** link derived from the UAL. **Example input (JSON-LD)** @@ -70,7 +70,7 @@ UAL: did:dkg:otp:20430/0xABCDEF0123456789/12345/67890 DKG Explorer link: https://dkg-testnet.origintrail.io/explore?ual=did:dkg:otp:20430/0xABCDEF0123456789/12345/67890 ``` -*** +--- #### 2) DKG Knowledge Asset **get** @@ -79,15 +79,15 @@ Fetch a **KA or KC** by **UAL**. **Inputs** -* `ual` _(string, required)_ — the KA or KC UAL. +- `ual` _(string, required)_ — the KA or KC UAL. **Returns** All tools return an **MCP-formatted** payload: -* `content` _(array)_ — one item with **pretty-printed JSON** (as text) containing: - * `assertion` — the JSON-LD content of the KA/KC - * `operation` — retrieval info: `operationId` and `status` (e.g., `COMPLETED`) +- `content` _(array)_ — one item with **pretty-printed JSON** (as text) containing: + - `assertion` — the JSON-LD content of the KA/KC + - `operation` — retrieval info: `operationId` and `status` (e.g., `COMPLETED`) **Example input (UAL)** @@ -112,9 +112,7 @@ did:dkg:otp:20430/0xABCDEF0123456789/12345/67890 "@value": "The best KA example on the DKG" } ], - "@type": [ - "http://schema.org/CreativeWork" - ] + "@type": ["http://schema.org/CreativeWork"] } ], "operation": { @@ -128,9 +126,9 @@ did:dkg:otp:20430/0xABCDEF0123456789/12345/67890 ### Coming soon (preview) -* **DKG query & retrieve** - generate/execute Schema.org-based **SPARQL** queries on the DKG. -* **Document → JSON/Markdown** - convert PDFs/Word/TXT/… into JSON/Markdown for downstream processing. -* **JSON/Markdown → JSON-LD** - transform structured text into a **schema.org** knowledge graph ready for publishing. +- **DKG query & retrieve** - generate/execute Schema.org-based **SPARQL** queries on the DKG. +- **Document → JSON/Markdown** - convert PDFs/Word/TXT/… into JSON/Markdown for downstream processing. +- **JSON/Markdown → JSON-LD** - transform structured text into a **schema.org** knowledge graph ready for publishing. ### Source Knowledge Assets in tool responses @@ -150,22 +148,27 @@ mcp.registerTool( { title: "Tool name", description: "Tool description", - inputSchema: { /* expected input variables and format */ }, + inputSchema: { + /* expected input variables and format */ + }, }, async (params) => { // Your tool code here return { - content: [{type: "text", text: "My tool response..."}], + content: [{ type: "text", text: "My tool response..." }], }; - - return withSourceKnowledgeAssets({ - content: [{type: "text", text: "My tool response..."}], - }, [ - { title: "KA 1", issuer: "OriginTrail", ual: "did:dkg..." }, - { title: "KA 2", issuer: "OriginTrail", ual: "did:dkg..." }, - { title: "KA 3", issuer: "OriginTrail", ual: "did:dkg..." }, - ]); - } + + return withSourceKnowledgeAssets( + { + content: [{ type: "text", text: "My tool response..." }], + }, + [ + { title: "KA 1", issuer: "OriginTrail", ual: "did:dkg..." }, + { title: "KA 2", issuer: "OriginTrail", ual: "did:dkg..." }, + { title: "KA 3", issuer: "OriginTrail", ual: "did:dkg..." }, + ], + ); + }, ); ``` @@ -175,19 +178,19 @@ mcp.registerTool( You can also check the `packages/plugin-example` to see how this works first-hand. -*** +--- ### Customize & extend -* **Tune the essentials** — adjust defaults (e.g., privacy, retry/finality settings) or validate inputs for your domain. -* **Use as a scaffold** — copy the patterns (tool registration, response helpers, resource resolvers) to **build new tools** and full plugins. -* **Compose with other plugins** — chain tools into **end-to-end agentic pipelines**. +- **Tune the essentials** — adjust defaults (e.g., privacy, retry/finality settings) or validate inputs for your domain. +- **Use as a scaffold** — copy the patterns (tool registration, response helpers, resource resolvers) to **build new tools** and full plugins. +- **Compose with other plugins** — chain tools into **end-to-end agentic pipelines**. {% hint style="success" %} Builders are encouraged to **customize DKG Essentials** to fit their use case, and to **use these tools as the basis** for creating new, domain-specific capabilities. {% endhint %} -*** +--- **Next step: Creating custom plugins for your node**\ Want more than the basics? Next, we’ll show you how to **build your own plugins** — integrating APIs, adding new tools, and tailoring your node’s capabilities to your specific use case. diff --git a/docs/build-a-dkg-node-ai-agent/evaluating-agent-responses.md b/docs/build-a-dkg-node-ai-agent/evaluating-agent-responses.md index 74f680b7..40d21dbb 100644 --- a/docs/build-a-dkg-node-ai-agent/evaluating-agent-responses.md +++ b/docs/build-a-dkg-node-ai-agent/evaluating-agent-responses.md @@ -17,16 +17,16 @@ Each evaluation measures these key aspects: #### **1. Context metrics (How well does the agent find information)** -* **Context precision** — Is the agent pulling the right information from our knowledge base? -* **Context recall** — Did the agent find all the relevant information available? -* **Context relevance** — Is the information the agent retrieved actually useful for the question? +- **Context precision** — Is the agent pulling the right information from our knowledge base? +- **Context recall** — Did the agent find all the relevant information available? +- **Context relevance** — Is the information the agent retrieved actually useful for the question? #### **2. Answer metrics (How well does the agent respond)** -* **Answer relevance** — Does the answer actually address what was asked? -* **Faithfulness** — Is the answer based on facts from our knowledge base (no hallucinations)? -* **Answer similarity** — How close is the answer to what we expect? -* **Answer correctness** — Is the answer factually correct? +- **Answer relevance** — Does the answer actually address what was asked? +- **Faithfulness** — Is the answer based on facts from our knowledge base (no hallucinations)? +- **Answer similarity** — How close is the answer to what we expect? +- **Answer correctness** — Is the answer factually correct? {% hint style="info" %} Each metric gets a score from 0-1, and you can set minimum thresholds (e.g., 0.8 = 80%) that the answers must meet to pass. @@ -46,25 +46,25 @@ apps/agent/tests/ragas/questionsAnswers/dkg-node-evaluation-dataset.json
-The JSON file contains an array of test cases, each with questions, answers, ground\_truth, and context. All fields are already populated with examples for DKG Node, which you can modify or replace to fit your chatbot's specific use case. +The JSON file contains an array of test cases, each with questions, answers, ground_truth, and context. All fields are already populated with examples for DKG Node, which you can modify or replace to fit your chatbot's specific use case. ### What each field means Think of the dataset as a set of four parallel lists that work together: -* **Questions** are the prompts you're testing ("What is DKG Node?"), -* **Ground\_truths** are your ideal answers — the gold standard you're measuring against, -* **Contexts** are the documentation or knowledge your AI should be using to answer, -* **Answers** should contain actual responses from your DKG Node for each question. +- **Questions** are the prompts you're testing ("What is DKG Node?"), +- **Ground_truths** are your ideal answers — the gold standard you're measuring against, +- **Contexts** are the documentation or knowledge your AI should be using to answer, +- **Answers** should contain actual responses from your DKG Node for each question. ### Adding new questions step by step Adding a new test question is straightforward: -* Start by putting your question in the questions array. -* Then write what you consider the perfect answer and add it to ground\_truths. -* Next, include any relevant documentation in the contexts array — this is the source material your AI should reference. -* For the answers field, you need to manually add the actual response from your DKG Node. You can get this by asking your DKG Node the question directly and copying the response, or you can run a test session to see what it generates, then add that to the array. +- Start by putting your question in the questions array. +- Then write what you consider the perfect answer and add it to ground_truths. +- Next, include any relevant documentation in the contexts array — this is the source material your AI should reference. +- For the answers field, you need to manually add the actual response from your DKG Node. You can get this by asking your DKG Node the question directly and copying the response, or you can run a test session to see what it generates, then add that to the array. Just remember: all four arrays need to stay in sync. The first item in each array corresponds to the same test case. @@ -72,10 +72,10 @@ Just remember: all four arrays need to stay in sync. The first item in each arra Edit \`tests/ragas/config.ts\` to change: -* **Which metrics to run** — Enable/disable specific RAGAS metrics -* **Score thresholds** — Set minimum passing scores (e.g., require 80% minimum) -* **LLM model** — Choose which AI model evaluates the responses -* **Browser automation settings** — Playwright timeouts and behavior +- **Which metrics to run** — Enable/disable specific RAGAS metrics +- **Score thresholds** — Set minimum passing scores (e.g., require 80% minimum) +- **LLM model** — Choose which AI model evaluates the responses +- **Browser automation settings** — Playwright timeouts and behavior ### Setup and installation @@ -101,7 +101,7 @@ npm run test:ragas:results npm run test:ragas:dashboard ``` -* **Update login credentials** in `apps/agent/tests/ragas/dkg-node-client.ts`: +- **Update login credentials** in `apps/agent/tests/ragas/dkg-node-client.ts`: ```typescript // Lines 28-29 and 264-265 @@ -113,11 +113,11 @@ password: "adminN131!" // Change to your password When you run npm run ragas, a web dashboard opens at http://localhost:3001 showing: -* **Overall score** — How well the DKG Node agent is performing (0-100%) -* **Metric breakdown** — Individual scores for each RAGAS metric -* **Question-by-question analysis** — Detailed view of each failed test question with: - * The question asked - * DKG Node's actual answer - * Expected answer - * Which metrics failed and why - * Real-time Results — Dashboard auto-refreshes as new evaluations complete +- **Overall score** — How well the DKG Node agent is performing (0-100%) +- **Metric breakdown** — Individual scores for each RAGAS metric +- **Question-by-question analysis** — Detailed view of each failed test question with: + - The question asked + - DKG Node's actual answer + - Expected answer + - Which metrics failed and why + - Real-time Results — Dashboard auto-refreshes as new evaluations complete diff --git a/docs/build-a-dkg-node-ai-agent/plugins/README.md b/docs/build-a-dkg-node-ai-agent/plugins/README.md index b2c0f26c..e1b90b07 100644 --- a/docs/build-a-dkg-node-ai-agent/plugins/README.md +++ b/docs/build-a-dkg-node-ai-agent/plugins/README.md @@ -11,4 +11,3 @@ Plugins can expose: ## Available Plugins - [EPCIS Plugin](epcis-plugin.md) - diff --git a/docs/build-a-dkg-node-ai-agent/set-up-your-custom-dkg-node-fork-and-update-flow.md b/docs/build-a-dkg-node-ai-agent/set-up-your-custom-dkg-node-fork-and-update-flow.md index f3cff38b..af447a47 100644 --- a/docs/build-a-dkg-node-ai-agent/set-up-your-custom-dkg-node-fork-and-update-flow.md +++ b/docs/build-a-dkg-node-ai-agent/set-up-your-custom-dkg-node-fork-and-update-flow.md @@ -13,9 +13,9 @@ The DKG Node is continuously evolving - new features, performance improvements, This setup allows you to: -* **Safely integrate official updates** without overwriting local changes. -* **Experiment and customize** your node codebase while staying compatible with the latest OriginTrail releases. -* **Stay stable and secure**, ensuring your node runs the most reliable version of the network software. +- **Safely integrate official updates** without overwriting local changes. +- **Experiment and customize** your node codebase while staying compatible with the latest OriginTrail releases. +- **Stay stable and secure**, ensuring your node runs the most reliable version of the network software. In this section, you’ll learn how to structure your repository, pull updates from the official source, and merge them into your project with confidence. @@ -23,8 +23,8 @@ In this section, you’ll learn how to structure your repository, pull updates f To receive new updates, you must maintain a **private fork** of the DKG Node monorepo. Your local project will use **two git remotes**: -* `origin` pointing to your **custom GitHub repository** (private or public) -* `upstream` pointing to the **official DKG Node repository** +- `origin` pointing to your **custom GitHub repository** (private or public) +- `upstream` pointing to the **official DKG Node repository** This setup lets you safely pull in upstream changes while keeping your customizations.​ @@ -70,14 +70,14 @@ git push -u origin main Your custom DKG Node repository is now set up with: -* `origin` pointing to your private fork -* `upstream` pointing to the official DKG Node +- `origin` pointing to your private fork +- `upstream` pointing to the official DKG Node ## Configure and start your custom DKG Node project Once this setup process is complete, you are ready to configure and run your custom DKG Node using the `dkg-cli`. The `dkg-cli` provides automated installation, configuration management, and service control for your DKG Node. Detailed instructions on how to use `dkg-cli` to configure your node, and manage its services are available in the [**Installation**](../getting-started/decentralized-knowle-dge-graph-dkg.md#id-1-install-cli) page under "Getting started" section. -## Update your custom DKG Node project +## Update your custom DKG Node project When a new version of DKG Node is released, follow the process steps below to update your custom DKG Node project. @@ -110,6 +110,6 @@ At this point, your codebase is synced with the latest official [DKG Node](https {% hint style="info" %} ⚠️ **Tips for smoother updates** -* Pull upstream updates **regularly** to avoid large conflict sets. -* Always test your DKG Node after merging updates to ensure compatibility -{% endhint %} +- Pull upstream updates **regularly** to avoid large conflict sets. +- Always test your DKG Node after merging updates to ensure compatibility + {% endhint %} diff --git a/docs/contribute-to-the-dkg/bounties-and-rewards/README.md b/docs/contribute-to-the-dkg/bounties-and-rewards/README.md index 15e05668..415982a3 100644 --- a/docs/contribute-to-the-dkg/bounties-and-rewards/README.md +++ b/docs/contribute-to-the-dkg/bounties-and-rewards/README.md @@ -1,2 +1 @@ # Bounties & rewards - diff --git a/docs/contribute-to-the-dkg/bounties-and-rewards/code-contributions-and-v8-bug-bounty.md b/docs/contribute-to-the-dkg/bounties-and-rewards/code-contributions-and-v8-bug-bounty.md index ef6cd274..863bf2a1 100644 --- a/docs/contribute-to-the-dkg/bounties-and-rewards/code-contributions-and-v8-bug-bounty.md +++ b/docs/contribute-to-the-dkg/bounties-and-rewards/code-contributions-and-v8-bug-bounty.md @@ -10,10 +10,10 @@ Interested in helping us build the substrate of collective neuro-symbolic AI?&#x We encourage code contributions to the following repositories. -* ot-node -* dkg-evm-module -* dkg.js -* dkg.py +- ot-node +- dkg-evm-module +- dkg.js +- dkg.py Please check the contribution guidelines in each repo. @@ -25,9 +25,9 @@ To ensure the **security and proper functioning of the DKG V8**, Trace Labs has ### Vulnerability categories and rewards -* **Minor bug:** 50 TRAC -* **Medium bug:** 200 TRAC -* **Critical bug:** 5000 TRAC +- **Minor bug:** 50 TRAC +- **Medium bug:** 200 TRAC +- **Critical bug:** 5000 TRAC ### Bug bounty rules @@ -36,17 +36,17 @@ To ensure the **security and proper functioning of the DKG V8**, Trace Labs has ### Security vulnerabilities -* SQL injection. -* Cross-site scripting (XSS). -* Cross-site request forgery (CSRF). -* Remote code execution (RCE). -* Insecure configurations in web servers, databases, and application frameworks. -* Session hijacking and clickjacking. -* Sensitive data exposure. -* Unauthorized access to user accounts. -* Bypassing authentication mechanisms. -* Credentials exposure. -* Logic bypasses. +- SQL injection. +- Cross-site scripting (XSS). +- Cross-site request forgery (CSRF). +- Remote code execution (RCE). +- Insecure configurations in web servers, databases, and application frameworks. +- Session hijacking and clickjacking. +- Sensitive data exposure. +- Unauthorized access to user accounts. +- Bypassing authentication mechanisms. +- Credentials exposure. +- Logic bypasses. ### Example submission template diff --git a/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/README.md b/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/README.md index 832e5c20..771b414b 100644 --- a/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/README.md +++ b/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/README.md @@ -8,10 +8,10 @@ To ensure the **security and proper functioning of our websites and applications ## Vulnerability categories and rewards -* **Minor bug:** 50 TRAC -* **Medium bug:** 250 TRAC -* **Serious bug:** 500 TRAC -* **Critical bug:** 1000 TRAC +- **Minor bug:** 50 TRAC +- **Medium bug:** 250 TRAC +- **Serious bug:** 500 TRAC +- **Critical bug:** 1000 TRAC ## Bug bounty rules @@ -20,17 +20,17 @@ To ensure the **security and proper functioning of our websites and applications ## Security vulnerabilities -* SQL injection. -* Cross-site scripting (XSS). -* Cross-site request forgery (CSRF). -* Remote code execution (RCE). -* Insecure configurations in web servers, databases, and application frameworks. -* Session hijacking and clickjacking. -* Sensitive data exposure. -* Unauthorized access to user accounts. -* Bypassing authentication mechanisms. -* Credentials exposure. -* Logic bypasses. +- SQL injection. +- Cross-site scripting (XSS). +- Cross-site request forgery (CSRF). +- Remote code execution (RCE). +- Insecure configurations in web servers, databases, and application frameworks. +- Session hijacking and clickjacking. +- Sensitive data exposure. +- Unauthorized access to user accounts. +- Bypassing authentication mechanisms. +- Credentials exposure. +- Logic bypasses. ## Example submission template diff --git a/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/staking-security-bounty.md b/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/staking-security-bounty.md index d8cff9cb..93600ccc 100644 --- a/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/staking-security-bounty.md +++ b/docs/contribute-to-the-dkg/bounties-and-rewards/general-bug-bounty/staking-security-bounty.md @@ -8,15 +8,14 @@ As a part of the DKG V8 mainnet launch, a 100k TRAC staking security bounty will The new, improved staking system includes: -* The new [Staking Dashboard](https://staking.origintrail.io/) -* The updated [smart contracts](https://github.com/OriginTrail/dkg-evm-module/) +- The new [Staking Dashboard](https://staking.origintrail.io/) +- The updated [smart contracts](https://github.com/OriginTrail/dkg-evm-module/) To be eligible, users need to: -* Stake TRAC on the mainnet and test the upgraded V8 Staking Dashboard between Dec 27, 17:00 CET, and January 10, 17:00 CET. -* Register for claiming the reward from January 10, 17:00 CET, to January 16, 17:00 CET, on [the designated reward claiming interface](https://dkg-v8-incentivised-testnet.origintrail.io/claim-rewards). Registration includes submitting KYC data for the purposes of reward distribution and qualitative feedback on the usage of the staking interface. +- Stake TRAC on the mainnet and test the upgraded V8 Staking Dashboard between Dec 27, 17:00 CET, and January 10, 17:00 CET. +- Register for claiming the reward from January 10, 17:00 CET, to January 16, 17:00 CET, on [the designated reward claiming interface](https://dkg-v8-incentivised-testnet.origintrail.io/claim-rewards). Registration includes submitting KYC data for the purposes of reward distribution and qualitative feedback on the usage of the staking interface. The total reward amount will be distributed pro rata among eligible participants according to the size of the stake they contributed during that period. Rewards will be distributed no later than January 27, 17:00 CET. - diff --git a/docs/contribute-to-the-dkg/contribute/README.md b/docs/contribute-to-the-dkg/contribute/README.md index f715b036..6b7945c0 100644 --- a/docs/contribute-to-the-dkg/contribute/README.md +++ b/docs/contribute-to-the-dkg/contribute/README.md @@ -12,10 +12,10 @@ If you are new to OriginTrail development, there are guides in this documentatio Please follow the below procedure to contribute new code or fixes: -* Create a separate branch by branching the relevant branch (we generally follow [Gitflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow)) -* Create a pull request to **develop** (except for v6 contributions, then use the **v6/develop**) branch containing a description of what your code does and how it can be tested -* Provide at least a minimum of unit tests -* Please include descriptive commit messages +- Create a separate branch by branching the relevant branch (we generally follow [Gitflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow)) +- Create a pull request to **develop** (except for v6 contributions, then use the **v6/develop**) branch containing a description of what your code does and how it can be tested +- Provide at least a minimum of unit tests +- Please include descriptive commit messages ## Rules @@ -29,21 +29,21 @@ There are a few basic ground rules for contributors: ### Merging pull requests once CI is successful -* Each pull request **must be reviewed and approved by at least two OriginTrail core developers** -* A pull request that does not significantly change logic and is urgently needed may be merged after a non-author OriginTrail core developer has reviewed it thoroughly. -* All other PRs should sit for 48 hours in order to garner feedback. -* No PR should be merged until all review comments are addressed. +- Each pull request **must be reviewed and approved by at least two OriginTrail core developers** +- A pull request that does not significantly change logic and is urgently needed may be merged after a non-author OriginTrail core developer has reviewed it thoroughly. +- All other PRs should sit for 48 hours in order to garner feedback. +- No PR should be merged until all review comments are addressed. ### Reviewing pull requests When reviewing a pull request, the end goal is to suggest useful changes to the author. Reviews should finish with approval unless there are issues that would result in: -* Buggy behavior. -* Undue maintenance burden. -* Breaking with house coding style. -* Pessimization (i.e., reduction of speed as measured in the project benchmarks). -* Feature reduction (i.e., it removes some aspect of functionality that a significant minority of users rely on). -* Uselessness (i.e., it does not strictly add a feature or fix a known issue). +- Buggy behavior. +- Undue maintenance burden. +- Breaking with house coding style. +- Pessimization (i.e., reduction of speed as measured in the project benchmarks). +- Feature reduction (i.e., it removes some aspect of functionality that a significant minority of users rely on). +- Uselessness (i.e., it does not strictly add a feature or fix a known issue). ### Releases diff --git a/docs/contribute-to-the-dkg/contribute/guidelines-for-automated-test-contributions.md b/docs/contribute-to-the-dkg/contribute/guidelines-for-automated-test-contributions.md index 78227729..f6ab34ed 100644 --- a/docs/contribute-to-the-dkg/contribute/guidelines-for-automated-test-contributions.md +++ b/docs/contribute-to-the-dkg/contribute/guidelines-for-automated-test-contributions.md @@ -29,4 +29,3 @@ Tests need to cover errors listed on the [GitHub discussions](https://github.com 6. When you're satisfied with the scenarios and their step definitions, you may now mark your PR draft as "Ready for review". 7. There will likely be feedback on your PR before it's approved, so make sure to follow the status of your PR contribution. After PR approval, your changes will be merged. 8. Congratulations! You've just made ot-node more robust :tada: - diff --git a/docs/contribute-to-the-dkg/delegated-staking/README.md b/docs/contribute-to-the-dkg/delegated-staking/README.md index 6a1e7714..28a6cf67 100644 --- a/docs/contribute-to-the-dkg/delegated-staking/README.md +++ b/docs/contribute-to-the-dkg/delegated-staking/README.md @@ -27,7 +27,7 @@ Note that Core Node operators and node delegators are not distinct - you can be Contrary to inflationary systems, TRAC staking is strictly utility-based, and rewards are generated through DKG usage via knowledge publishing fees. {% endhint %} -*** +--- ## How do delegators earn TRAC fees? @@ -56,18 +56,16 @@ If you want to withdraw tokens in order to delegate to another node on the same Delegated staking is a non-custodial system, so the Core Node operator has no access to the locked TRAC tokens at any time. {% endhint %} - - Each Core Node operator can also set an “**operator fee,**” which is a percentage of the TRAC rewards deducted each time a node claims rewards from a Knowledge Asset. The remaining TRAC fee is then split proportionally to the share of staked tokens across all delegators. {% hint style="info" %} -**Example**: If a node accumulated **1,000 TRAC** tokens in the previous period, and the node has two delegators, both with a 50% share, and the operator\_fee is 10%: +**Example**: If a node accumulated **1,000 TRAC** tokens in the previous period, and the node has two delegators, both with a 50% share, and the operator_fee is 10%: -* The node operator will receive 100 TRAC (10%) -* Each delegator receives 450 TRAC (50% of the remaining 900 TRAC) -{% endhint %} +- The node operator will receive 100 TRAC (10%) +- Each delegator receives 450 TRAC (50% of the remaining 900 TRAC) + {% endhint %} -*** +--- ## What makes a good node? How should I pick a node to delegate to? @@ -79,9 +77,9 @@ Nodes compete to provide the best service in the network — the better the node **Node Power** is a metric that gives delegators a simplified view of a node’s overall strength in the network. It combines: -* The amount of TRAC staked on the node -* How much new knowledge has the node published -* The node's service ask (lower ask = higher competitiveness) +- The amount of TRAC staked on the node +- How much new knowledge has the node published +- The node's service ask (lower ask = higher competitiveness) This score shows how competitive the node is in attracting publishing rewards, and how its influence compares to other nodes in the network. @@ -91,12 +89,12 @@ This score shows how competitive the node is in attracting publishing rewards, a **Node Health** indicates how reliably a node has performed in the random sampling proof system. It reflects: -* How many proof challenges did the node successfully respond to -* Compared to the number of challenges it was expected to respond to in that epoch +- How many proof challenges did the node successfully respond to +- Compared to the number of challenges it was expected to respond to in that epoch High node health indicates the node has strong uptime and actively maintains the availability of Knowledge Assets—both critical for earning consistent rewards. -*** +--- ### Operator fee @@ -104,7 +102,7 @@ Each node may **charge an operator fee** (e.g., 10%) on rewards earned. A lower
-*** +--- ## Delegating if you run a Core Node @@ -114,7 +112,7 @@ To understand how to set up your operator fee, follow the [Core Node setup](../.

Depiction of delegating and withdrawing of TRAC from DKG smart contracts

-*** +--- ## **Have questions?** diff --git a/docs/contribute-to-the-dkg/delegated-staking/redelegating-stake.md b/docs/contribute-to-the-dkg/delegated-staking/redelegating-stake.md index ab532db7..0fa3c505 100644 --- a/docs/contribute-to-the-dkg/delegated-staking/redelegating-stake.md +++ b/docs/contribute-to-the-dkg/delegated-staking/redelegating-stake.md @@ -6,14 +6,14 @@ description: Moving your TRAC stake from one node to another If you want **move your delegated TRAC stake from one DKG node to another**, you can use the **redelegate** feature instead of withdrawing and then delegating again. With redelegation, the amount of TRAC stake you are "redelegating" will be transferred from the original DKG node to the new DKG node of your choice, avoiding the 28-day delay that would otherwise take place if you were to withdraw tokens first. -*** +--- ## Keep in mind -* The DKG is multichain. However, **TRAC tokens can only be redelegated within nodes on the same blockchain** -* The amount of stake (TRAC) that you want to redelegate **should not exceed the second node's remaining capacity** (a node can have a maximum of 2,000,000 TRAC stake delegated to it). +- The DKG is multichain. However, **TRAC tokens can only be redelegated within nodes on the same blockchain** +- The amount of stake (TRAC) that you want to redelegate **should not exceed the second node's remaining capacity** (a node can have a maximum of 2,000,000 TRAC stake delegated to it). -*** +--- ## How can you redelegate TRAC? @@ -21,12 +21,10 @@ If you want **move your delegated TRAC stake from one DKG node to another**, you 2. Go to the **'My delegation**' tab to see available nodes that you can redelegate from. 3. Optionally, use the **'Filter by blockchain'** dropdown to select the desired blockchain, which will filter and display nodes on this network along with their staking information. 4. Once you've decided which node you want to redelegate your TRAC from, click on the **'Manage stake'** button next to the desired node on the right side of the table. Make sure you read the disclaimer. -5. When the staking pop-up opens, you'll have the option to **Delegate, Redelegate,** or **Withdraw** TRAC tokens from the node. Proceed by selecting '**Redelegate**'. +5. When the staking pop-up opens, you'll have the option to **Delegate, Redelegate,** or **Withdraw** TRAC tokens from the node. Proceed by selecting '**Redelegate**'.

Use the redelegate button in the popup to redelegate your stake

- - 6. After clicking on 'Redelegate', a field to enter the amount of TRAC you wish to redelegate to another node will appear on the right side of the pop-up, as well as the select box, for selecting the other node — the one that will receive the TRAC. **Enter the amount of TRAC you want redelegated and select the node you want to redelegate to.**
@@ -46,7 +44,6 @@ Only the nodes from the same network with the remaining capacity greater than ze 8. Once both transactions are signed and confirmed, you should see a **'Stake redelegated successfully'** message appear. 9. To confirm that the process was successful, **check your TRAC delegation** by going to the 'My delegations' tab above the table with the nodes and verifying that your delegations are listed there. Additionally, ensure that the stake amount on the node has decreased and the amount on the other node has increased following the successful redelegation.\ - {% hint style="info" %} If you encounter any issues during the staking process or require assistance, please get in touch with the OriginTrail community in [Discord](https://discord.gg/xCaY7hvNwD). {% endhint %} diff --git a/docs/contribute-to-the-dkg/delegated-staking/step-by-step-staking.md b/docs/contribute-to-the-dkg/delegated-staking/step-by-step-staking.md index 8b9b7526..be12ce96 100644 --- a/docs/contribute-to-the-dkg/delegated-staking/step-by-step-staking.md +++ b/docs/contribute-to-the-dkg/delegated-staking/step-by-step-staking.md @@ -10,23 +10,23 @@ Welcome to the step-by-step TRAC delegated staking guide! First, lets start with 1. You need to have some TRAC tokens to delegate. See ['How to get on TRAC(k)?' section of this website >](https://origintrail.io/get-started/trac-token) 2. You need to decide which blockchain you want to stake on. The DKG supports multiple blockchains: - * [Base Blockchain](../../dkg-knowledge-hub/learn-more/connected-blockchains/base-blockchain/) - * [NeuroWeb](../../dkg-knowledge-hub/learn-more/connected-blockchains/neuroweb.md) - * [Gnosis Chain](../../dkg-knowledge-hub/learn-more/connected-blockchains/gnosis-chain/) + - [Base Blockchain](../../dkg-knowledge-hub/learn-more/connected-blockchains/base-blockchain/) + - [NeuroWeb](../../dkg-knowledge-hub/learn-more/connected-blockchains/neuroweb.md) + - [Gnosis Chain](../../dkg-knowledge-hub/learn-more/connected-blockchains/gnosis-chain/) 3. Bridge your TRAC to the chosen blockchain. See instructions for bridging: - * [Base Blockchain](../../dkg-knowledge-hub/learn-more/connected-blockchains/base-blockchain/) - * [NeuroWeb](../../graveyard/everything/teleport-instructions-neuroweb.md) - * [Gnosis Chain](../../dkg-knowledge-hub/learn-more/connected-blockchains/gnosis-chain/) + - [Base Blockchain](../../dkg-knowledge-hub/learn-more/connected-blockchains/base-blockchain/) + - [NeuroWeb](../../graveyard/everything/teleport-instructions-neuroweb.md) + - [Gnosis Chain](../../dkg-knowledge-hub/learn-more/connected-blockchains/gnosis-chain/) 4. Have some gas fee tokens available on the chosen network: - * Base Mainnet: ETH on Base - * NeuroWeb: NEURO - * Gnosis Chain: xDAI + - Base Mainnet: ETH on Base + - NeuroWeb: NEURO + - Gnosis Chain: xDAI {% hint style="warning" %} _If you are staking on NeuroWeb, please make sure that you update both **"Max base fee"** and "**Priority fee**" to **0.00000001** before signing transactions._ {% endhint %} -*** +--- ## **TRAC staking using the Staking Dashboard** @@ -52,12 +52,12 @@ Make sure you have selected the right blockchain in your wallet. The Staking Dashboard shows a list of all the Core Nodes hosting the DKG. This table shows different information, such as: -* The node name, -* Which blockchain it's connected to, -* How much stake does a node have, -* The node's ask, -* The node's operator fee, -* Reward statistics, and other. +- The node name, +- Which blockchain it's connected to, +- How much stake does a node have, +- The node's ask, +- The node's operator fee, +- Reward statistics, and other. **To delegate your TRAC tokens, you need to pick one or more nodes you believe are going to perform best for the network** (on the basis of criteria explained [here](./)). The chosen node has to have **enough "room" to take TRAC,** meaning less than 2M TRAC already staked. 2M is the maximum amount of TRAC staked per node. diff --git a/package-lock.json b/package-lock.json index ca2e6cc8..31aefdb4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -59,13 +59,13 @@ "@expo-google-fonts/manrope": "^0.4.1", "@expo-google-fonts/space-grotesk": "^0.4.0", "@expo/vector-icons": "^14.1.0", - "@langchain/anthropic": "^0.3.28", - "@langchain/core": "^0.3.66", - "@langchain/google-genai": "^0.2.18", - "@langchain/groq": "^0.2.4", - "@langchain/mistralai": "^0.2.1", - "@langchain/openai": "^0.6.3", - "@langchain/xai": "^0.1.0", + "@langchain/anthropic": "^1.3.22", + "@langchain/core": "^1.1.30", + "@langchain/google-genai": "^2.1.24", + "@langchain/groq": "^1.1.4", + "@langchain/mistralai": "^1.0.7", + "@langchain/openai": "^1.2.12", + "@langchain/xai": "^1.3.8", "@modelcontextprotocol/sdk": "^1.16.0", "@node-rs/argon2": "^2.0.2", "@react-native-async-storage/async-storage": "2.1.2", @@ -140,6 +140,182 @@ "typescript": "^5.0.0" } }, + "apps/agent/node_modules/@anthropic-ai/sdk": { + "version": "0.74.0", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.74.0.tgz", + "integrity": "sha512-srbJV7JKsc5cQ6eVuFzjZO7UR3xEPJqPamHFIe29bs38Ij2IripoAhC0S5NslNbaFUYqBKypmmpzMTpqfHEUDw==", + "license": "MIT", + "dependencies": { + "json-schema-to-ts": "^3.1.1" + }, + "bin": { + "anthropic-ai-sdk": "bin/cli" + }, + "peerDependencies": { + "zod": "^3.25.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, + "apps/agent/node_modules/@langchain/anthropic": { + "version": "1.3.22", + "resolved": "https://registry.npmjs.org/@langchain/anthropic/-/anthropic-1.3.22.tgz", + "integrity": "sha512-P/XpLzZlaCU7qba+cgHIO2IKk9EeJwA1OLUJh9+iSRr0peLiZ1ssZeBPNLjBU28MuSDvDtUJt1fFYUYGD4Km1g==", + "license": "MIT", + "dependencies": { + "@anthropic-ai/sdk": "^0.74.0", + "zod": "^3.25.76 || ^4" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@langchain/core": "^1.1.30" + } + }, + "apps/agent/node_modules/@langchain/core": { + "version": "1.1.30", + "resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.30.tgz", + "integrity": "sha512-tPjY7TUI/w5Jby93TBCENH3QlcuUi0cuq2hpQ3WO1rd3x3WULvdmtfDzLcLQC427oPMlOFuDI1NIvR89jxD6Ng==", + "license": "MIT", + "dependencies": { + "@cfworker/json-schema": "^4.0.2", + "@standard-schema/spec": "^1.1.0", + "ansi-styles": "^5.0.0", + "camelcase": "6", + "decamelize": "1.2.0", + "js-tiktoken": "^1.0.12", + "langsmith": ">=0.5.0 <1.0.0", + "mustache": "^4.2.0", + "p-queue": "^6.6.2", + "uuid": "^11.1.0", + "zod": "^3.25.76 || ^4" + }, + "engines": { + "node": ">=20" + } + }, + "apps/agent/node_modules/@langchain/core/node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "apps/agent/node_modules/@langchain/google-genai": { + "version": "2.1.24", + "resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.1.24.tgz", + "integrity": "sha512-gBuYWIrTiT4S8U3AxaAMl6SKeGKtB10fXc+m0p2BPSvTfCaTbIlycH7IZjZUTD1L92dQMi/SULwCWffq5OIBgQ==", + "license": "MIT", + "dependencies": { + "@google/generative-ai": "^0.24.0", + "uuid": "^11.1.0" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@langchain/core": "^1.1.30" + } + }, + "apps/agent/node_modules/@langchain/google-genai/node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "apps/agent/node_modules/@langchain/groq": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@langchain/groq/-/groq-1.1.4.tgz", + "integrity": "sha512-adpu8dTxw009smtAhPL9tzYy/0SjTKhKO/xuPzeTZBpfi19kqFJUeOTzzEbPcHxlgv5KuIBqOwa6nNd5xleIsg==", + "license": "MIT", + "dependencies": { + "groq-sdk": "^0.37.0" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@langchain/core": "^1.0.0" + } + }, + "apps/agent/node_modules/@langchain/mistralai": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@langchain/mistralai/-/mistralai-1.0.7.tgz", + "integrity": "sha512-5lXqdDnicHTTHMVv08q9Uja3CNkml3+JF3iq3Odfw+uOZ0wvwlQ5w90QNgW5rHluVQ1xnwI8PlBwsD8IhEqpvg==", + "license": "MIT", + "dependencies": { + "@mistralai/mistralai": "^1.3.1", + "uuid": "^13.0.0" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@langchain/core": "^1.0.0" + } + }, + "apps/agent/node_modules/@langchain/mistralai/node_modules/uuid": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-13.0.0.tgz", + "integrity": "sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist-node/bin/uuid" + } + }, + "apps/agent/node_modules/@langchain/openai": { + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-1.2.12.tgz", + "integrity": "sha512-Im6PPNujrfkZk4vpc9JAjbeERg+RbNtWRe3KSFOP7aNGa/yZ+XD69lxXwbsZGaZkbiUN/hwe9RYeisUfThb5wg==", + "license": "MIT", + "dependencies": { + "js-tiktoken": "^1.0.12", + "openai": "^6.24.0", + "zod": "^3.25.76 || ^4" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@langchain/core": "^1.1.30" + } + }, + "apps/agent/node_modules/@langchain/xai": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@langchain/xai/-/xai-1.3.8.tgz", + "integrity": "sha512-9+4e9TUagz5YpIORTGFFSc4+YOxAJplNC1B1No9IdEwJOnB3j/uWkpXHVrwMwwDONkAJIb6G9IZ7nNKyuFsXLw==", + "license": "MIT", + "dependencies": { + "@langchain/openai": "1.2.12" + }, + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@langchain/core": "^1.0.0" + } + }, "apps/agent/node_modules/@types/node": { "version": "20.19.23", "dev": true, @@ -148,11 +324,44 @@ "undici-types": "~6.21.0" } }, + "apps/agent/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "apps/agent/node_modules/argparse": { "version": "2.0.1", "dev": true, "license": "Python-2.0" }, + "apps/agent/node_modules/chalk": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", + "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "apps/agent/node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "apps/agent/node_modules/dotenv": { "version": "16.6.1", "license": "BSD-2-Clause", @@ -192,6 +401,36 @@ "url": "https://ko-fi.com/tunnckoCore/commissions" } }, + "apps/agent/node_modules/groq-sdk": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/groq-sdk/-/groq-sdk-0.37.0.tgz", + "integrity": "sha512-lT72pcT8b/X5XrzdKf+rWVzUGW1OQSKESmL8fFN5cTbsf02gq6oFam4SVeNtzELt9cYE2Pt3pdGgSImuTbHFDg==", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + } + }, + "apps/agent/node_modules/groq-sdk/node_modules/@types/node": { + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "apps/agent/node_modules/groq-sdk/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, "apps/agent/node_modules/js-yaml": { "version": "4.1.0", "dev": true, @@ -203,6 +442,40 @@ "js-yaml": "bin/js-yaml.js" } }, + "apps/agent/node_modules/langsmith": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.5.7.tgz", + "integrity": "sha512-FjYf2oBGMoSXnaT4SRaFguIiGJaonZ5VKWKJDPl9awLZjz2RkN29AcQWceecSINVzXzTvtRWPOjAWT+XggqNNg==", + "license": "MIT", + "dependencies": { + "@types/uuid": "^10.0.0", + "chalk": "^5.6.2", + "console-table-printer": "^2.12.1", + "p-queue": "^6.6.2", + "semver": "^7.6.3", + "uuid": "^10.0.0" + }, + "peerDependencies": { + "@opentelemetry/api": "*", + "@opentelemetry/exporter-trace-otlp-proto": "*", + "@opentelemetry/sdk-trace-base": "*", + "openai": "*" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@opentelemetry/exporter-trace-otlp-proto": { + "optional": true + }, + "@opentelemetry/sdk-trace-base": { + "optional": true + }, + "openai": { + "optional": true + } + } + }, "apps/agent/node_modules/mocha": { "version": "10.8.2", "dev": true, @@ -237,6 +510,47 @@ "node": ">= 14.0.0" } }, + "apps/agent/node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "apps/agent/node_modules/openai": { + "version": "6.25.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-6.25.0.tgz", + "integrity": "sha512-mEh6VZ2ds2AGGokWARo18aPISI1OhlgdEIC1ewhkZr8pSIT31dec0ecr9Nhxx0JlybyOgoAT1sWeKtwPZzJyww==", + "license": "Apache-2.0", + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.25 || ^4.0" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } + } + }, "apps/agent/node_modules/superagent": { "version": "8.1.2", "dev": true, @@ -296,24 +610,6 @@ "version": "1.10.1", "license": "MIT" }, - "node_modules/@anthropic-ai/sdk": { - "version": "0.65.0", - "license": "MIT", - "dependencies": { - "json-schema-to-ts": "^3.1.1" - }, - "bin": { - "anthropic-ai-sdk": "bin/cli" - }, - "peerDependencies": { - "zod": "^3.25.0 || ^4.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, "node_modules/@asteasolutions/zod-to-openapi": { "version": "7.3.4", "license": "MIT", @@ -6138,161 +6434,6 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@langchain/anthropic": { - "version": "0.3.33", - "license": "MIT", - "dependencies": { - "@anthropic-ai/sdk": "^0.65.0", - "fast-xml-parser": "^4.4.1" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.58 <0.4.0" - } - }, - "node_modules/@langchain/core": { - "version": "0.3.79", - "license": "MIT", - "dependencies": { - "@cfworker/json-schema": "^4.0.2", - "ansi-styles": "^5.0.0", - "camelcase": "6", - "decamelize": "1.2.0", - "js-tiktoken": "^1.0.12", - "langsmith": "^0.3.67", - "mustache": "^4.2.0", - "p-queue": "^6.6.2", - "p-retry": "4", - "uuid": "^10.0.0", - "zod": "^3.25.32", - "zod-to-json-schema": "^3.22.3" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@langchain/core/node_modules/ansi-styles": { - "version": "5.2.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@langchain/core/node_modules/decamelize": { - "version": "1.2.0", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@langchain/core/node_modules/uuid": { - "version": "10.0.0", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/@langchain/google-genai": { - "version": "0.2.18", - "license": "MIT", - "dependencies": { - "@google/generative-ai": "^0.24.0", - "uuid": "^11.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.58 <0.4.0" - } - }, - "node_modules/@langchain/google-genai/node_modules/uuid": { - "version": "11.1.0", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/esm/bin/uuid" - } - }, - "node_modules/@langchain/groq": { - "version": "0.2.4", - "license": "MIT", - "dependencies": { - "groq-sdk": "^0.29.0", - "zod": "^3.22.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.58 <0.4.0" - } - }, - "node_modules/@langchain/mistralai": { - "version": "0.2.3", - "license": "MIT", - "dependencies": { - "@mistralai/mistralai": "^1.3.1", - "uuid": "^10.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.58 <0.4.0" - } - }, - "node_modules/@langchain/mistralai/node_modules/uuid": { - "version": "10.0.0", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/@langchain/openai": { - "version": "0.6.16", - "license": "MIT", - "dependencies": { - "js-tiktoken": "^1.0.12", - "openai": "5.12.2", - "zod": "^3.25.32" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.68 <0.4.0" - } - }, - "node_modules/@langchain/xai": { - "version": "0.1.0", - "license": "MIT", - "dependencies": { - "@langchain/openai": "^0.6.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@langchain/core": ">=0.3.58 <0.4.0" - } - }, "node_modules/@manypkg/find-root": { "version": "1.1.0", "dev": true, @@ -9190,6 +9331,12 @@ "node": ">=18" } }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "license": "MIT" + }, "node_modules/@substrate/connect": { "version": "0.8.11", "license": "GPL-3.0-only", @@ -9818,10 +9965,6 @@ "@types/node": "*" } }, - "node_modules/@types/retry": { - "version": "0.12.0", - "license": "MIT" - }, "node_modules/@types/secp256k1": { "version": "4.0.7", "license": "MIT", @@ -16136,6 +16279,7 @@ }, "node_modules/fast-xml-parser": { "version": "4.5.3", + "dev": true, "funding": [ { "type": "github", @@ -17125,48 +17269,6 @@ "dev": true, "license": "MIT" }, - "node_modules/groq-sdk": { - "version": "0.29.0", - "license": "Apache-2.0", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" - } - }, - "node_modules/groq-sdk/node_modules/@types/node": { - "version": "18.19.130", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/groq-sdk/node_modules/node-fetch": { - "version": "2.7.0", - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/groq-sdk/node_modules/undici-types": { - "version": "5.26.5", - "license": "MIT" - }, "node_modules/handlebars": { "version": "4.7.8", "dev": true, @@ -19410,50 +19512,6 @@ "lan-network": "dist/lan-network-cli.js" } }, - "node_modules/langsmith": { - "version": "0.3.75", - "license": "MIT", - "dependencies": { - "@types/uuid": "^10.0.0", - "chalk": "^4.1.2", - "console-table-printer": "^2.12.1", - "p-queue": "^6.6.2", - "p-retry": "4", - "semver": "^7.6.3", - "uuid": "^10.0.0" - }, - "peerDependencies": { - "@opentelemetry/api": "*", - "@opentelemetry/exporter-trace-otlp-proto": "*", - "@opentelemetry/sdk-trace-base": "*", - "openai": "*" - }, - "peerDependenciesMeta": { - "@opentelemetry/api": { - "optional": true - }, - "@opentelemetry/exporter-trace-otlp-proto": { - "optional": true - }, - "@opentelemetry/sdk-trace-base": { - "optional": true - }, - "openai": { - "optional": true - } - } - }, - "node_modules/langsmith/node_modules/uuid": { - "version": "10.0.0", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "node_modules/layout-bmfont-text": { "version": "1.3.4", "license": "MIT", @@ -21954,25 +22012,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/openai": { - "version": "5.12.2", - "license": "Apache-2.0", - "bin": { - "openai": "bin/cli" - }, - "peerDependencies": { - "ws": "^8.18.0", - "zod": "^3.23.8" - }, - "peerDependenciesMeta": { - "ws": { - "optional": true - }, - "zod": { - "optional": true - } - } - }, "node_modules/openapi3-ts": { "version": "4.5.0", "license": "MIT", @@ -22242,17 +22281,6 @@ "version": "4.0.7", "license": "MIT" }, - "node_modules/p-retry": { - "version": "4.6.2", - "license": "MIT", - "dependencies": { - "@types/retry": "0.12.0", - "retry": "^0.13.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/p-timeout": { "version": "3.2.0", "license": "MIT", @@ -24107,6 +24135,7 @@ }, "node_modules/retry": { "version": "0.13.1", + "dev": true, "license": "MIT", "engines": { "node": ">= 4" @@ -25640,6 +25669,7 @@ }, "node_modules/strnum": { "version": "1.1.2", + "dev": true, "funding": [ { "type": "github", From a07a8c39710fc80829e85bc42c8dcc87bfbe854d Mon Sep 17 00:00:00 2001 From: Gregor Balkovec Date: Wed, 4 Mar 2026 11:32:07 +0100 Subject: [PATCH 2/2] Adds additional type check so we execute on safe types --- apps/agent/src/app/(protected)/chat.tsx | 42 ++++++++++++------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/apps/agent/src/app/(protected)/chat.tsx b/apps/agent/src/app/(protected)/chat.tsx index bd6eb50b..10f68394 100644 --- a/apps/agent/src/app/(protected)/chat.tsx +++ b/apps/agent/src/app/(protected)/chat.tsx @@ -1,8 +1,8 @@ -import { useCallback, useEffect, useRef, useState } from "react"; -import { View, Platform, KeyboardAvoidingView, ScrollView } from "react-native"; -import { Image } from "expo-image"; import * as Clipboard from "expo-clipboard"; +import { Image } from "expo-image"; import { fetch } from "expo/fetch"; +import { useCallback, useEffect, useRef, useState } from "react"; +import { KeyboardAvoidingView, Platform, ScrollView, View } from "react-native"; import { useSafeAreaInsets } from "react-native-safe-area-context"; //import AsyncStorage from "@react-native-async-storage/async-storage"; import { @@ -11,17 +11,18 @@ import { } from "@dkg/plugin-dkg-essentials/utils"; import { useMcpClient } from "@/client"; -import useMcpToolsSession from "@/hooks/useMcpToolsSession"; -import useColors from "@/hooks/useColors"; -import usePlatform from "@/hooks/usePlatform"; -import Page from "@/components/layout/Page"; -import Container from "@/components/layout/Container"; -import Header from "@/components/layout/Header"; +import { useAlerts } from "@/components/Alerts"; import Chat from "@/components/Chat"; import { SourceKAResolver } from "@/components/Chat/Message/SourceKAs/CollapsibleItem"; +import Container from "@/components/layout/Container"; +import Header from "@/components/layout/Header"; +import Page from "@/components/layout/Page"; import Markdown from "@/components/Markdown"; -import { useAlerts } from "@/components/Alerts"; +import useColors from "@/hooks/useColors"; +import useMcpToolsSession from "@/hooks/useMcpToolsSession"; +import usePlatform from "@/hooks/usePlatform"; +import useSettings from "@/hooks/useSettings"; import { type ChatMessage, type ToolCall, @@ -30,23 +31,22 @@ import { makeStreamingCompletionRequest, toContents, } from "@/shared/chat"; +import { toError } from "@/shared/errors"; import { FileDefinition, parseFilesFromContent, serializeFiles, uploadFiles, } from "@/shared/files"; -import { toError } from "@/shared/errors"; -import useSettings from "@/hooks/useSettings"; +import { + isThinkingVisible, + shouldStopGenerating, +} from "@/shared/thinkingIndicator"; import { type ToolExecutionMode, toToolExecutionMode, toToolExecutionSettings, } from "@/shared/toolExecutionMode"; -import { - isThinkingVisible, - shouldStopGenerating, -} from "@/shared/thinkingIndicator"; function normalizeStreamingMarkdown(content: string): string { const fencePattern = /^(`{3,})[^`]*$/gm; @@ -509,8 +509,8 @@ export default function ChatPage() { parsedContent.metadata .at(0) ?.[ - "https://ontology.origintrail.io/dkg/1.0#publishTime" - ]?.at(0)?.["@value"] ?? Date.now(), + "https://ontology.origintrail.io/dkg/1.0#publishTime" + ]?.at(0)?.["@value"] ?? Date.now(), ).getTime(), txHash: parsedContent.metadata .at(0) @@ -542,7 +542,7 @@ export default function ChatPage() { parsedContent.metadata .at(0) ?.["https://ontology.origintrail.io/dkg/1.0#publishTx"]?.at(0)?.[ - "@value" + "@value" ] ?? "unknown"; resolved.publisher = parsedContent.metadata @@ -639,8 +639,8 @@ export default function ChatPage() { const text: string[] = []; for (const c of toContents(m.content)) { - if (c.type === "image_url") { - images.push({ uri: c.image_url as string }); + if (c.type === "image_url" && typeof c.image_url === "string") { + images.push({ uri: c.image_url }); continue; }