diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..650cb88 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,4 @@ +{ + "singleQuote": true, + "semi": true +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 691b3e9..9104f62 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/). +## [1.4.0] - 2025-12-31 + +### Added +- Integrated Hardhat compilation error reporting for Solidity and Vyper. +- Enhanced VS Code extension to display compilation diagnostics (red squiggles). +- Refactored Hardhat tasks (audit, compile) into separate module files. + ## [1.3.0] - 2025-12-19 ### Added diff --git a/hardhat.config.ts b/hardhat.config.ts index 6b5e7bb..3e6ffaa 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -1,15 +1,15 @@ -import {internalTask, task} from 'hardhat/config'; +import { internalTask, task } from 'hardhat/config'; import { TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT, TASK_COMPILE, } from 'hardhat/builtin-tasks/task-names'; import fs from 'fs-extra'; import path from 'path'; -import {Artifact, BuildInfo} from 'hardhat/types'; +import { Artifact, BuildInfo } from 'hardhat/types'; import murmur128 from 'murmur-128'; // Somewhat counterintuive, @layerzerolabs/hardhat-deploy is a devDependency of itself. // It is required by hardhat-tron-solc who lists this package as a peerDependency. -import {HardhatUserConfig} from '@layerzerolabs/hardhat-deploy'; +import { HardhatUserConfig } from '@layerzerolabs/hardhat-deploy'; function addIfNotPresent(array: string[], value: string) { if (array.indexOf(value) === -1) { @@ -18,8 +18,8 @@ function addIfNotPresent(array: string[], value: string) { } function setupExtraSolcSettings(settings: { - metadata?: {useLiteralContent?: boolean}; - outputSelection: {[key: string]: {[key: string]: string[]}}; + metadata?: { useLiteralContent?: boolean }; + outputSelection: { [key: string]: { [key: string]: string[] } }; }): void { settings.metadata = settings.metadata || {}; settings.metadata.useLiteralContent = true; diff --git a/package.json b/package.json index c66e1f1..1fe29cf 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@sun-protocol/sunhat", - "version": "1.3.0", + "version": "1.4.0", "description": "An All-in-One Toolkit for the Complete TRON Smart Contract Lifecycle", "repository": "https://github.com/sun-protocol/sunhat.git", "author": "sun-protocol", diff --git a/src/DeploymentFactory.ts b/src/DeploymentFactory.ts index 2527ba0..05fe3db 100644 --- a/src/DeploymentFactory.ts +++ b/src/DeploymentFactory.ts @@ -43,7 +43,7 @@ export class DeploymentFactory { ); } else if (this.isTron) { let contractName = ''; - if ('contractName' in artifact) ({contractName} = artifact); + if ('contractName' in artifact) ({ contractName } = artifact); this.factory = new TronContractFactory( artifact.abi, artifact.bytecode, @@ -129,14 +129,14 @@ export class DeploymentFactory { const prefix = isTron ? '0x41' : '0xff'; return getAddress( '0x' + - solidityKeccak256( - ['bytes'], - [ - `${prefix}${create2DeployerAddress.slice(2)}${salt.slice( - 2 - )}${solidityKeccak256(['bytes'], [deploymentTx.data]).slice(2)}`, - ] - ).slice(-40) + solidityKeccak256( + ['bytes'], + [ + `${prefix}${create2DeployerAddress.slice(2)}${salt.slice( + 2 + )}${solidityKeccak256(['bytes'], [deploymentTx.data]).slice(2)}`, + ] + ).slice(-40) ); } @@ -182,7 +182,7 @@ export class DeploymentFactory { return transaction.data !== newData || currentFlattened != newFlattened; } else if (this.isTron) { - const tronDeployTx = newTransaction as CreateSmartContract; + const tronDeployTx = newTransaction as CreateSmartContract; const res = await ( this.factory.signer as TronSigner ).getTronWebTransaction(transaction.hash); diff --git a/src/DeploymentsManager.ts b/src/DeploymentsManager.ts index ebdf3e3..904ec4b 100644 --- a/src/DeploymentsManager.ts +++ b/src/DeploymentsManager.ts @@ -8,13 +8,13 @@ import { Export, DeterministicDeploymentInfo, } from '../types'; -import {ExtendedArtifact} from '../types'; -import {PartialExtension} from './internal/types'; +import { ExtendedArtifact } from '../types'; +import { PartialExtension } from './internal/types'; import fs from 'fs-extra'; import path from 'path'; -import {BigNumber} from '@ethersproject/bignumber'; +import { BigNumber } from '@ethersproject/bignumber'; import debug from 'debug'; const log = debug('hardhat:sun-protocol:tron-studio'); @@ -30,11 +30,11 @@ import { getNetworkName, getDeployPaths, } from './utils'; -import {addHelpers, waitForTx} from './helpers'; -import {TransactionResponse} from '@ethersproject/providers'; -import {Artifact, HardhatRuntimeEnvironment, Network} from 'hardhat/types'; -import {store} from './globalStore'; -import {bnReplacer} from './internal/utils'; +import { addHelpers, waitForTx } from './helpers'; +import { TransactionResponse } from '@ethersproject/providers'; +import { Artifact, HardhatRuntimeEnvironment, Network } from 'hardhat/types'; +import { store } from './globalStore'; +import { bnReplacer } from './internal/utils'; export class DeploymentsManager { public deploymentsExtension: DeploymentsExtension; @@ -43,7 +43,7 @@ export class DeploymentsManager { gasUsed: BigNumber; bandwith: BigNumber; accountsLoaded: boolean; - namedAccounts: {[name: string]: string}; + namedAccounts: { [name: string]: string }; unnamedAccounts: string[]; deploymentsLoaded: boolean; deployments: Record; @@ -60,12 +60,12 @@ export class DeploymentsManager { }; }; logEnabled: boolean; - pendingTransactions: {[hash: string]: any}; + pendingTransactions: { [hash: string]: any }; savePendingTx: boolean; gasPrice?: string; maxFeePerGas?: string; maxPriorityFeePerGas?: string; - migrations: {[id: string]: number}; + migrations: { [id: string]: number }; onlyArtifacts?: string[]; runAsNode: boolean; }; @@ -75,7 +75,7 @@ export class DeploymentsManager { public impersonateUnknownAccounts: boolean; public impersonatedAccounts: string[]; - public addressesToProtocol: {[address: string]: string} = {}; + public addressesToProtocol: { [address: string]: string } = {}; public readonly isTronNetworkWithTronSolc: boolean = false; private network: Network; @@ -292,7 +292,7 @@ export class DeploymentsManager { } ) => { await this.setup(tags === undefined); - options = {fallbackToGlobal: true, ...options}; + options = { fallbackToGlobal: true, ...options }; if (typeof tags === 'string') { tags = [tags]; } @@ -413,7 +413,7 @@ export class DeploymentsManager { maxPriorityFeePerGas = BigNumber.from(this.db.maxPriorityFeePerGas); } } - return {gasPrice, maxFeePerGas, maxPriorityFeePerGas}; + return { gasPrice, maxFeePerGas, maxPriorityFeePerGas }; }, this.partialExtension.log, print @@ -547,8 +547,8 @@ export class DeploymentsManager { chainId: tx.chainId, }; this.db.pendingTransactions[tx.hash] = name - ? {name, deployment, rawTx, decoded} - : {rawTx, decoded}; + ? { name, deployment, rawTx, decoded } + : { rawTx, decoded }; fs.writeFileSync( pendingTxPath, JSON.stringify(this.db.pendingTransactions, bnReplacer, ' ') @@ -568,7 +568,7 @@ export class DeploymentsManager { ); } this.db.gasUsed = this.db.gasUsed.add(receipt.gasUsed); - if(rawTx){ + if (rawTx) { this.db.bandwith = this.db.bandwith.add(rawTx?.length); } return receipt; @@ -584,7 +584,7 @@ export class DeploymentsManager { return tx; } - public async getNamedAccounts(): Promise<{[name: string]: string}> { + public async getNamedAccounts(): Promise<{ [name: string]: string }> { await this.setupAccounts(); return this.db.namedAccounts; } @@ -632,7 +632,7 @@ export class DeploymentsManager { public async loadDeployments( chainIdExpected = true - ): Promise<{[name: string]: Deployment}> { + ): Promise<{ [name: string]: Deployment }> { let chainId: string | undefined; if (chainIdExpected) { chainId = await this.getChainId(); @@ -867,7 +867,7 @@ export class DeploymentsManager { let numDeployments = 1; const oldDeployment = this.db.deployments[name] - ? {...this.db.deployments[name]} + ? { ...this.db.deployments[name] } : undefined; if (oldDeployment) { numDeployments = (oldDeployment.numDeployments || 1) + 1; @@ -880,7 +880,7 @@ export class DeploymentsManager { JSON.stringify( { address: deployment.address || actualReceipt?.contractAddress, - addressHex : deployment.addressHex, + addressHex: deployment.addressHex, abi: deployment.abi, transactionHash: deployment.transactionHash || actualReceipt?.transactionHash, @@ -980,7 +980,7 @@ export class DeploymentsManager { return true; } - private companionManagers: {[name: string]: DeploymentsManager} = {}; + private companionManagers: { [name: string]: DeploymentsManager } = {}; public addCompanionManager( name: string, networkDeploymentsManager: DeploymentsManager @@ -1009,7 +1009,7 @@ export class DeploymentsManager { writeDeploymentsToFiles: true, savePendingTx: false, } - ): Promise<{[name: string]: Deployment}> { + ): Promise<{ [name: string]: Deployment }> { log('runDeploy'); this.setupNetwork(); if (options.deletePreviousDeployments) { @@ -1122,8 +1122,8 @@ export class DeploymentsManager { }); log('deploy script folder parsed'); - const funcByFilePath: {[filename: string]: DeployFunction} = {}; - const scriptPathBags: {[tag: string]: string[]} = {}; + const funcByFilePath: { [filename: string]: DeployFunction } = {}; + const scriptPathBags: { [tag: string]: string[] } = {}; const scriptFilePaths: string[] = []; for (const filepath of filepaths) { const scriptFilePath = path.resolve(filepath); @@ -1170,7 +1170,7 @@ export class DeploymentsManager { log('tag collected'); // console.log({ scriptFilePaths }); - const scriptsRegisteredToRun: {[filename: string]: boolean} = {}; + const scriptsRegisteredToRun: { [filename: string]: boolean } = {}; const scriptsToRun: Array<{ func: DeployFunction; filePath: string; @@ -1483,7 +1483,7 @@ export class DeploymentsManager { snapshot, data, blockHash: latestBlock.hash, - deployments: {...this.db.deployments}, + deployments: { ...this.db.deployments }, }; } catch (err) { log(`failed to create snapshot`); @@ -1519,7 +1519,7 @@ export class DeploymentsManager { ); if (blockRetrieved) { saved.snapshot = await this.network.provider.send('evm_snapshot', []); // it is necessary to re-snapshot it - this.db.deployments = {...saved.deployments}; + this.db.deployments = { ...saved.deployments }; } else { // TODO or should we throw ? return false; @@ -1569,7 +1569,7 @@ export class DeploymentsManager { } public async setupAccounts(): Promise<{ - namedAccounts: {[name: string]: string}; + namedAccounts: { [name: string]: string }; unnamedAccounts: string[]; }> { if (!this.db.accountsLoaded) { diff --git a/src/cli.ts b/src/cli.ts index c6832ab..c3318d5 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -286,15 +286,15 @@ async function handleInit(args: string[]) { colors.cyan( `\n☀️ Initializing SunHat Project in ${ isCurrentDir ? 'current directory' : targetDir - }...\n`, - ), + }...\n` + ) ); if (!isCurrentDir && (await fs.pathExists(targetDir))) { const files = await fs.readdir(targetDir); if (files.length > 0) { console.error( - colors.red(`Error: Directory '${targetName}' is not empty.`), + colors.red(`Error: Directory '${targetName}' is not empty.`) ); process.exit(1); } @@ -319,28 +319,28 @@ async function handleInit(args: string[]) { await fs.outputFile(path.join(targetDir, 'tsconfig.json'), SCAFFOLD.tsconfig); await fs.outputFile( path.join(targetDir, 'contracts/TronGreeter.sol'), - SCAFFOLD.contract, + SCAFFOLD.contract ); await fs.outputFile( path.join(targetDir, 'test/TronGreeter.t.sol'), - SCAFFOLD.foundryTest, + SCAFFOLD.foundryTest ); await fs.outputFile( path.join(targetDir, 'test/TronGreeter.test.ts'), - SCAFFOLD.mochaTest, + SCAFFOLD.mochaTest ); await fs.outputFile( path.join(targetDir, 'deploy/01_deploy_greeter.ts'), - SCAFFOLD.deployScript, + SCAFFOLD.deployScript ); await fs.outputFile( path.join(targetDir, 'hardhat.config.ts'), - SCAFFOLD.hardhatConfig, + SCAFFOLD.hardhatConfig ); const pkgJsonData = SCAFFOLD.packageJson( myPackageJson.version, - path.basename(targetDir), + path.basename(targetDir) ); await fs.writeJSON(path.join(targetDir, 'package.json'), pkgJsonData, { spaces: 2, @@ -369,8 +369,8 @@ async function handleInit(args: string[]) { } catch (e) { console.warn( colors.yellow( - "⚠️ Failed to initialize git repository. You may need to run 'git init' manually.", - ), + "⚠️ Failed to initialize git repository. You may need to run 'git init' manually." + ) ); } try { @@ -383,8 +383,8 @@ async function handleInit(args: string[]) { } catch (e) { console.warn( colors.yellow( - "⚠️ Failed to initialize git repository. You may need to run 'npx hardhat init-foundry' manually.", - ), + "⚠️ Failed to initialize git repository. You may need to run 'npx hardhat init-foundry' manually." + ) ); } console.log(colors.green(`\n✅ Project ready!`)); @@ -402,7 +402,7 @@ async function handleProxy(args: string[]) { process.cwd(), 'node_modules', '.bin', - 'hardhat', + 'hardhat' ); let executable = localHardhatBin; @@ -412,12 +412,12 @@ async function handleProxy(args: string[]) { !fs.existsSync(localHardhatBin + '.cmd') ) { console.error( - colors.yellow(`\n⚠️ Could not find local hardhat installation.`), + colors.yellow(`\n⚠️ Could not find local hardhat installation.`) ); console.error( `Please run inside a project directory or run '${colors.cyan( - 'sunhat init', - )}' to create one.\n`, + 'sunhat init' + )}' to create one.\n` ); process.exit(1); } diff --git a/src/defaultArtifacts.ts b/src/defaultArtifacts.ts index ba9e085..dda154c 100644 --- a/src/defaultArtifacts.ts +++ b/src/defaultArtifacts.ts @@ -1,9 +1,9 @@ import * as ea from '../extendedArtifacts'; import * as eaT from '../extendedArtifactsTron'; -import {ExtendedArtifact} from '../types'; +import { ExtendedArtifact } from '../types'; type DefaultArtifactsName = keyof typeof ea & keyof typeof eaT; -type DefaultArtifacts = {[key in DefaultArtifactsName]: ExtendedArtifact}; +type DefaultArtifacts = { [key in DefaultArtifactsName]: ExtendedArtifact }; /* const isArtifactName = ( diff --git a/src/errors.ts b/src/errors.ts index 58b4fb2..70aea33 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -1,5 +1,5 @@ -import {BigNumber} from '@ethersproject/bignumber'; -import {bnReplacer} from './internal/utils'; +import { BigNumber } from '@ethersproject/bignumber'; +import { bnReplacer } from './internal/utils'; export class UnknownSignerError extends Error { constructor( @@ -8,7 +8,7 @@ export class UnknownSignerError extends Error { to?: string; data?: string; value?: string | BigNumber; - contract?: {name: string; method: string; args: unknown[]}; + contract?: { name: string; method: string; args: unknown[] }; } ) { super( diff --git a/src/etherscan.ts b/src/etherscan.ts index 4d02e85..9561f58 100644 --- a/src/etherscan.ts +++ b/src/etherscan.ts @@ -3,8 +3,8 @@ import fs from 'fs'; import axios from 'axios'; import qs from 'qs'; import path from 'path'; -import {defaultAbiCoder, ParamType} from '@ethersproject/abi'; -import {HardhatRuntimeEnvironment} from 'hardhat/types'; +import { defaultAbiCoder, ParamType } from '@ethersproject/abi'; +import { HardhatRuntimeEnvironment } from 'hardhat/types'; import chalk from 'chalk'; import matchAll from 'match-all'; @@ -64,7 +64,7 @@ function extractOneLicenseFromSourceFile(source: string): string | undefined { function extractLicenseFromSources(metadata: string): string[] { const regex = /\/\/\s*\t*SPDX-License-Identifier:\s*\t*(.*?)[\s\\]/g; const matches = matchAll(metadata, regex).toArray(); - const licensesFound: {[license: string]: boolean} = {}; + const licensesFound: { [license: string]: boolean } = {}; const licenses = []; if (matches) { for (const match of matches) { @@ -207,7 +207,7 @@ export async function submitSources( break; case '1284': host = 'https://api-moonbeam.moonscan.io'; - break; + break; case '1285': host = 'https://api-moonriver.moonscan.io'; break; @@ -233,11 +233,11 @@ export async function submitSources( host = 'https://api.snowtrace.io'; break; case '338': - host = 'https://api-testnet.cronoscan.com/api'; - break; + host = 'https://api-testnet.cronoscan.com/api'; + break; case '25': - host = 'https://api.cronoscan.com/api'; - break; + host = 'https://api.cronoscan.com/api'; + break; case '11155111': host = 'https://api-sepolia.etherscan.io'; break; @@ -250,11 +250,11 @@ export async function submitSources( async function submit(name: string, useSolcInput?: boolean) { const deployment = all[name]; - const {address, metadata: metadataString} = deployment; + const { address, metadata: metadataString } = deployment; const abiResponse = await axios.get( `${host}/api?module=contract&action=getabi&address=${address}&apikey=${etherscanApiKey}` ); - const {data: abiData} = abiResponse; + const { data: abiData } = abiResponse; let contractABI; if (abiData.status !== '0') { try { @@ -332,7 +332,7 @@ export async function submitSources( let solcInput: { language: string; settings: any; - sources: Record; + sources: Record; }; if (useSolcInput) { const solcInputHash = deployment.solcInputHash; @@ -350,7 +350,7 @@ export async function submitSources( } solcInput = JSON.parse(solcInputStringFromDeployment); } else { - const settings = {...metadata.settings}; + const settings = { ...metadata.settings }; delete settings.compilationTarget; solcInput = { language: metadata.language, @@ -384,7 +384,7 @@ export async function submitSources( let constructorArguements: string | undefined; if (deployment.args) { - const constructor: {inputs: ParamType[]} = deployment.abi.find( + const constructor: { inputs: ParamType[] } = deployment.abi.find( (v) => v.type === 'constructor' ); if (constructor) { @@ -415,10 +415,10 @@ export async function submitSources( const submissionResponse = await axios.request({ url: `${host}/api`, method: 'POST', - headers: {'content-type': 'application/x-www-form-urlencoded'}, + headers: { 'content-type': 'application/x-www-form-urlencoded' }, data: formDataAsString, }); - const {data: submissionData} = submissionResponse; + const { data: submissionData } = submissionResponse; let guid: string; if (submissionData.status === '1') { @@ -461,7 +461,7 @@ export async function submitSources( }, } ); - const {data: statusData} = statusResponse; + const { data: statusData } = statusResponse; // blockscout seems to return status == 1 in case of failure // so we check string first diff --git a/src/globalStore.ts b/src/globalStore.ts index 4644c68..c0504cd 100644 --- a/src/globalStore.ts +++ b/src/globalStore.ts @@ -1,8 +1,8 @@ -import {Network} from 'hardhat/types'; +import { Network } from 'hardhat/types'; // used a fallback as some plugin might override network fields, see for example : https://github.com/sc-forks/solidity-coverage/issues/624 export const store: { - networks: {[name: string]: Network}; + networks: { [name: string]: Network }; } = { networks: {}, }; diff --git a/src/helpers.ts b/src/helpers.ts index 810e4ca..bd740f2 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -1,19 +1,19 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ -import {Signer} from '@ethersproject/abstract-signer'; +import { Signer } from '@ethersproject/abstract-signer'; import { Web3Provider, TransactionResponse, TransactionRequest, } from '@ethersproject/providers'; -import {getAddress} from '@ethersproject/address'; -import {Contract, PayableOverrides} from '@ethersproject/contracts'; +import { getAddress } from '@ethersproject/address'; +import { Contract, PayableOverrides } from '@ethersproject/contracts'; import * as zk from 'zksync-ethers'; -import {AddressZero} from '@ethersproject/constants'; -import {BigNumber} from '@ethersproject/bignumber'; -import {Wallet} from '@ethersproject/wallet'; -import {keccak256 as solidityKeccak256} from '@ethersproject/solidity'; -import {zeroPad, hexlify} from '@ethersproject/bytes'; -import {Interface, FunctionFragment} from '@ethersproject/abi'; +import { AddressZero } from '@ethersproject/constants'; +import { BigNumber } from '@ethersproject/bignumber'; +import { Wallet } from '@ethersproject/wallet'; +import { keccak256 as solidityKeccak256 } from '@ethersproject/solidity'; +import { zeroPad, hexlify } from '@ethersproject/bytes'; +import { Interface, FunctionFragment } from '@ethersproject/abi'; import { Deployment, DeployResult, @@ -34,26 +34,26 @@ import { ArtifactData, ABI, } from '../types'; -import {PartialExtension} from './internal/types'; -import {UnknownSignerError} from './errors'; -import {filterABI, mergeABIs, recode, countElements} from './utils'; +import { PartialExtension } from './internal/types'; +import { UnknownSignerError } from './errors'; +import { filterABI, mergeABIs, recode, countElements } from './utils'; import fs from 'fs-extra'; import oldDiamonBase from './old_diamondbase.json'; -import {Artifact, EthereumProvider} from 'hardhat/types'; -import {DeploymentsManager} from './DeploymentsManager'; +import { Artifact, EthereumProvider } from 'hardhat/types'; +import { DeploymentsManager } from './DeploymentsManager'; import enquirer from 'enquirer'; import { parse as parseTransaction, Transaction, } from '@ethersproject/transactions'; -import {getDerivationPath} from './hdpath'; -import {bnReplacer} from './internal/utils'; -import {DeploymentFactory} from './DeploymentFactory'; -import {TronWeb3Provider} from './tron/provider'; -import {TronSigner} from './tron/signer'; -import {CreateSmartContract} from './tron/types'; -import {getDefaultArtifact} from './defaultArtifacts'; +import { getDerivationPath } from './hdpath'; +import { bnReplacer } from './internal/utils'; +import { DeploymentFactory } from './DeploymentFactory'; +import { TronWeb3Provider } from './tron/provider'; +import { TronSigner } from './tron/signer'; +import { CreateSmartContract } from './tron/types'; +import { getDefaultArtifact } from './defaultArtifacts'; import TronWeb from 'tronweb'; let LedgerSigner: any; // TODO type @@ -119,7 +119,7 @@ function fixProvider(providerGiven: any): any { providerGiven .send(req.method, req.params) .then((result: any) => - callback(null, {result, id: req.id, jsonrpc: req.jsonrpc}) + callback(null, { result, id: req.id, jsonrpc: req.jsonrpc }) ) .catch((error: any) => callback(error, null)); }; @@ -162,7 +162,7 @@ function linkRawLibrary( function linkRawLibraries( bytecode: string, - libraries: {[libraryName: string]: Address} + libraries: { [libraryName: string]: Address } ): string { for (const libName of Object.keys(libraries)) { const libAddress = libraries[libName]; @@ -176,11 +176,11 @@ function linkLibraries( bytecode: string; linkReferences?: { [libraryFileName: string]: { - [libraryName: string]: Array<{length: number; start: number}>; + [libraryName: string]: Array<{ length: number; start: number }>; }; }; }, - libraries?: {[libraryName: string]: Address} + libraries?: { [libraryName: string]: Address } ) { let bytecode = artifact.bytecode; @@ -270,7 +270,7 @@ export function addHelpers( }; } { let provider: Web3Provider | zk.Web3Provider | TronWeb3Provider; - const availableAccounts: {[name: string]: boolean} = {}; + const availableAccounts: { [name: string]: boolean } = {}; async function init(): Promise< Web3Provider | zk.Web3Provider | TronWeb3Provider @@ -492,20 +492,20 @@ export function addHelpers( artifactName = name; artifact = await getArtifact(artifactName); } - return {artifact, artifactName}; + return { artifact, artifactName }; } async function getLinkedArtifact( name: string, options: DeployOptions - ): Promise<{artifact: Artifact; artifactName: string | undefined}> { + ): Promise<{ artifact: Artifact; artifactName: string | undefined }> { // TODO get linked artifact - const {artifact, artifactName} = await getArtifactFromOptions( + const { artifact, artifactName } = await getArtifactFromOptions( name, options ); const byteCode = linkLibraries(artifact, options.libraries); - return {artifact: {...artifact, bytecode: byteCode}, artifactName}; + return { artifact: { ...artifact, bytecode: byteCode }, artifactName }; } async function _deploy( @@ -521,7 +521,7 @@ export function addHelpers( unknown, } = await getFrom(options.from); - const {artifact: linkedArtifact, artifactName} = await getLinkedArtifact( + const { artifact: linkedArtifact, artifactName } = await getLinkedArtifact( name, options ); @@ -624,7 +624,7 @@ export function addHelpers( ...linkedArtifact, transactionHash: tx.hash, args, - linkedData: options.linkedData + linkedData: options.linkedData, }; if (artifactName && willSaveToDisk()) { const extendedArtifact = await partialExtension.getExtendedArtifact( @@ -653,33 +653,31 @@ export function addHelpers( let address; let addressHex; let bandwith; - if (network.tron && ethersSigner instanceof TronSigner){ - + if (network.tron && ethersSigner instanceof TronSigner) { const tronwebCurr = ethersSigner.getTronWeb(); - const url = tronwebCurr.fullNode.host.replace(/\/jsonrpc$/, ""); - const tronweb = new TronWeb( - url, - url, - false, - false - ); + const url = tronwebCurr.fullNode.host.replace(/\/jsonrpc$/, ''); + const tronweb = new TronWeb(url, url, false, false); receipt = receiptPre; receipt.from = tronweb.address.fromHex(receiptPre.from); receipt.to = tronweb.address.fromHex(receiptPre.to); - receipt.contractAddress = tronweb.address.fromHex(receiptPre.contractAddress); + receipt.contractAddress = tronweb.address.fromHex( + receiptPre.contractAddress + ); address = tronweb.address.fromHex(addressPre); addressHex = tronweb.address.toHex(receiptPre.contractAddress); - const resp = await tronweb.trx.getUnconfirmedTransactionInfo(tx.hash) as TronTxInfo; + const resp = (await tronweb.trx.getUnconfirmedTransactionInfo( + tx.hash + )) as TronTxInfo; if (resp?.receipt?.net_usage != null) { bandwith = resp?.receipt?.net_usage; } else if (resp?.fee != null) { bandwith = resp.fee; } - }else{ + } else { receipt = receiptPre; address = addressPre; } - + const argNumbers = countElements(preDeployment.args); const deployment = { ...preDeployment, @@ -711,7 +709,7 @@ export function addHelpers( implementationAddress?: Address; deploy: () => Promise; }> { - options = {...options}; // ensure no change + options = { ...options }; // ensure no change await init(); const deployFunction = () => @@ -742,9 +740,9 @@ export function addHelpers( } = await _getProxyInfo(name, options); /* eslint-enable prefer-const */ - const {address: implementationAddress} = await deterministic( + const { address: implementationAddress } = await deterministic( implementationName, - {...implementationOptions, salt: options.salt} + { ...implementationOptions, salt: options.salt } ); const implementationContract = new Contract( @@ -774,7 +772,7 @@ export function addHelpers( } if (!proxyAdminDeployed) { - const {address: proxyAdminAddress} = await deterministic( + const { address: proxyAdminAddress } = await deterministic( proxyAdminName, { from: options.from, @@ -798,7 +796,7 @@ export function addHelpers( } } - const proxyOptions = {...options}; // ensure no change + const proxyOptions = { ...options }; // ensure no change delete proxyOptions.proxy; delete proxyOptions.libraries; proxyOptions.contract = proxyContract; @@ -807,7 +805,7 @@ export function addHelpers( proxyAdmin, data, }); - const {address: proxyAddress} = await deterministic(proxyName, { + const { address: proxyAddress } = await deterministic(proxyName, { ...proxyOptions, salt: options.salt, }); @@ -825,10 +823,8 @@ export function addHelpers( address: from, } = await getFrom(options.from); - const {artifact: linkedArtifact, artifactName} = await getLinkedArtifact( - name, - options - ); + const { artifact: linkedArtifact, artifactName } = + await getLinkedArtifact(name, options); const factory = new DeploymentFactory( getArtifact, linkedArtifact, @@ -873,13 +869,13 @@ export function addHelpers( async function fetchIfDifferent( name: string, options: DeployOptions - ): Promise<{differences: boolean; address?: string}> { - options = {...options}; // ensure no change + ): Promise<{ differences: boolean; address?: string }> { + options = { ...options }; // ensure no change const args = options.args ? [...options.args] : []; await init(); - const {ethersSigner} = await getFrom(options.from); - const {artifact: linkedArtifact} = await getLinkedArtifact(name, options); + const { ethersSigner } = await getFrom(options.from); + const { artifact: linkedArtifact } = await getLinkedArtifact(name, options); const factory = new DeploymentFactory( getArtifact, linkedArtifact, @@ -901,15 +897,15 @@ export function addHelpers( ); const code = await provider.getCode(create2Address); if (code === '0x') { - return {differences: true, address: undefined}; + return { differences: true, address: undefined }; } else { - return {differences: false, address: create2Address}; + return { differences: false, address: create2Address }; } } const deployment = await partialExtension.getOrNull(name); if (deployment) { if (options.skipIfAlreadyDeployed) { - return {differences: false, address: undefined}; // TODO check receipt, see below + return { differences: false, address: undefined }; // TODO check receipt, see below } // TODO transactionReceipt + check for status let transactionDetailsAvailable = false; @@ -929,9 +925,9 @@ export function addHelpers( if (transaction) { const differences = await factory.compareDeploymentTransaction( transaction, - deployment + deployment ); - return {differences, address: deployment.address}; + return { differences, address: deployment.address }; } else { if (transactionDetailsAvailable) { throw new Error( @@ -941,11 +937,11 @@ export function addHelpers( console.error( `no transaction details found for ${name}'s previous deployment, if the deployment is t be discarded, please delete the file` ); - return {differences: false, address: deployment.address}; + return { differences: false, address: deployment.address }; } } } - return {differences: true, address: undefined}; + return { differences: true, address: undefined }; } async function _deployOne( @@ -954,7 +950,7 @@ export function addHelpers( failsOnExistingDeterminisitc?: boolean ): Promise { const argsArray = options.args ? [...options.args] : []; - options = {...options, args: argsArray}; + options = { ...options, args: argsArray }; let result: DeployResult; const diffResult = await fetchIfDifferent(name, options); @@ -973,7 +969,7 @@ export function addHelpers( diffResult.address && diffResult.address.toLowerCase() !== deployment.address.toLowerCase() ) { - const {artifact: linkedArtifact, artifactName} = + const { artifact: linkedArtifact, artifactName } = await getLinkedArtifact(name, options); // receipt missing @@ -1000,7 +996,7 @@ export function addHelpers( ); } - const {artifact: linkedArtifact, artifactName} = + const { artifact: linkedArtifact, artifactName } = await getLinkedArtifact(name, options); // receipt missing @@ -1034,7 +1030,7 @@ export function addHelpers( } if (upgradeIndex === 0) { if (oldDeployment) { - return {...oldDeployment, newlyDeployed: false}; + return { ...oldDeployment, newlyDeployed: false }; } } else if (upgradeIndex === 1) { if (!oldDeployment) { @@ -1046,7 +1042,7 @@ export function addHelpers( (oldDeployment.history && oldDeployment.history.length > 0) || (oldDeployment.numDeployments && oldDeployment.numDeployments > 1) ) { - return {...oldDeployment, newlyDeployed: false}; + return { ...oldDeployment, newlyDeployed: false }; } } else { if (!oldDeployment) { @@ -1058,7 +1054,7 @@ export function addHelpers( if (!oldDeployment.history) { if (oldDeployment.numDeployments && oldDeployment.numDeployments > 1) { if (oldDeployment.numDeployments > upgradeIndex) { - return {...oldDeployment, newlyDeployed: false}; + return { ...oldDeployment, newlyDeployed: false }; } else if (oldDeployment.numDeployments < upgradeIndex) { throw new Error( `upgradeIndex === ${upgradeIndex} : expects Deployments numDeployments to be at least ${upgradeIndex}` @@ -1070,7 +1066,7 @@ export function addHelpers( ); } } else if (oldDeployment.history.length > upgradeIndex - 1) { - return {...oldDeployment, newlyDeployed: false}; + return { ...oldDeployment, newlyDeployed: false }; } else if (oldDeployment.history.length < upgradeIndex - 1) { throw new Error( `upgradeIndex === ${upgradeIndex} : expects Deployments history length to be at least ${ @@ -1087,7 +1083,7 @@ export function addHelpers( ): Promise<{ viaAdminContract: | string - | {name: string; artifact?: string | ArtifactData} + | { name: string; artifact?: string | ArtifactData } | undefined; proxyAdminName: string | undefined; proxyAdminDeployed: Deployment | undefined; @@ -1111,7 +1107,7 @@ export function addHelpers( upgradeMethod: string | undefined; upgradeArgsTemplate: any[]; }> { - const {isTronNetworkWithTronSolc} = deploymentManager; + const { isTronNetworkWithTronSolc } = deploymentManager; const oldDeployment = await getDeploymentOrNUll(name); let contractName = options.contract; let implementationName = name + '_Implementation'; @@ -1127,7 +1123,7 @@ export function addHelpers( let checkProxyAdmin = true; let viaAdminContract: | string - | {name: string; artifact?: string | ArtifactData} + | { name: string; artifact?: string | ArtifactData } | undefined; let proxyArgsTemplate = ['{implementation}', '{admin}', '{data}']; let upgradeMethod: string | undefined; @@ -1232,7 +1228,7 @@ export function addHelpers( } const proxyName = name + '_Proxy'; - const {address: owner} = await getProxyOwner(options); + const { address: owner } = await getProxyOwner(options); const implementationArgs = options.args ? [...options.args] : []; // --- Implementation Deployment --- @@ -1254,7 +1250,7 @@ export function addHelpers( waitConfirmations: options.waitConfirmations, }; - const {artifact} = await getArtifactFromOptions( + const { artifact } = await getArtifactFromOptions( name, implementationOptions ); @@ -1270,7 +1266,7 @@ export function addHelpers( mergedABI.push(proxyContractConstructor); // use proxy constructor abi const constructor = artifact.abi.find( - (fragment: {type: string; inputs: any[]}) => + (fragment: { type: string; inputs: any[] }) => fragment.type === 'constructor' ); @@ -1290,7 +1286,7 @@ Please specify the correct number of arguments as part of the deploy options: "a inputs: any[]; name: string; } = artifact.abi.find( - (fragment: {type: string; inputs: any[]; name: string}) => + (fragment: { type: string; inputs: any[]; name: string }) => fragment.type === 'function' && fragment.name === updateMethod ); if (!updateMethodFound) { @@ -1507,7 +1503,7 @@ Note that in this case, the contract deployment will not behave the same if depl let proxy = await getDeploymentOrNUll(proxyName); if (!proxy) { - const proxyOptions = {...options}; // ensure no change + const proxyOptions = { ...options }; // ensure no change delete proxyOptions.proxy; delete proxyOptions.libraries; proxyOptions.contract = proxyContract; @@ -1543,7 +1539,7 @@ Note that in this case, the contract deployment will not behave the same if depl } const oldProxy = proxy.abi.find( - (frag: {name: string}) => frag.name === 'changeImplementation' + (frag: { name: string }) => frag.name === 'changeImplementation' ); if (oldProxy) { upgradeMethod = 'changeImplementation'; @@ -1573,14 +1569,14 @@ Note that in this case, the contract deployment will not behave the same if depl executeReceipt = await execute( proxyAdminName, - {...options, from: currentProxyAdminOwner}, + { ...options, from: currentProxyAdminOwner }, upgradeMethod, ...upgradeArgs ); } else { executeReceipt = await execute( name, - {...options, from}, + { ...options, from }, upgradeMethod, ...upgradeArgs ); @@ -1722,7 +1718,7 @@ Note that in this case, the contract deployment will not behave the same if depl ethersSigner.sendTransaction = async ( txRequest: TransactionRequest ) => { - const response: {hash: string} = await enquirer.prompt({ + const response: { hash: string } = await enquirer.prompt({ type: 'input', name: 'hash', message: ` @@ -1838,7 +1834,7 @@ Note that in this case, the contract deployment will not behave the same if depl ethersSigner = provider.getSigner(from); } - return {address: from, ethersSigner, hardwareWallet, unknown}; + return { address: from, ethersSigner, hardwareWallet, unknown }; } // async function findEvents(contract: Contract, event: string, blockHash: string): Promise { @@ -1860,7 +1856,7 @@ Note that in this case, the contract deployment will not behave the same if depl name: string, options: DiamondOptions ): Promise { - const {isTronNetworkWithTronSolc} = deploymentManager; + const { isTronNetworkWithTronSolc } = deploymentManager; let proxy: Deployment | undefined; const proxyName = name + '_DiamondProxy'; const oldDeployment = await getDeploymentOrNUll(name); @@ -1894,7 +1890,7 @@ Note that in this case, the contract deployment will not behave the same if depl } } - const {address: owner} = await getDiamondOwner(options); + const { address: owner } = await getDiamondOwner(options); const newSelectors: string[] = []; const facetSnapshot: Facet[] = []; let oldFacets: Facet[] = []; @@ -2001,7 +1997,7 @@ Note that in this case, the contract deployment will not behave the same if depl } } const constructor = artifact.abi.find( - (fragment: {type: string; inputs: any[]}) => + (fragment: { type: string; inputs: any[] }) => fragment.type === 'constructor' ); if (!argsSpecific && (!constructor || constructor.inputs.length === 0)) { @@ -2084,7 +2080,7 @@ Note that in this case, the contract deployment will not behave the same if depl } const oldSelectors: string[] = []; - const oldSelectorsFacetAddress: {[selector: string]: string} = {}; + const oldSelectorsFacetAddress: { [selector: string]: string } = {}; for (const oldFacet of oldFacets) { for (const selector of oldFacet.functionSelectors) { oldSelectors.push(selector); @@ -2450,7 +2446,7 @@ Note that in this case, the contract deployment will not behave the same if depl value: options.value, }); - await saveDeployment(proxyName, {...proxy, abi}); + await saveDeployment(proxyName, { ...proxy, abi }); await saveDeployment(name, { ...proxy, linkedData: options.linkedData, @@ -2477,7 +2473,7 @@ Note that in this case, the contract deployment will not behave the same if depl const executeReceipt = await execute( name, - {...options, from: currentOwner}, + { ...options, from: currentOwner }, 'diamondCut', facetCuts, executeData === '0x' @@ -2540,7 +2536,7 @@ Note that in this case, the contract deployment will not behave the same if depl name: string, options: DeployOptions ): Promise { - options = {...options}; // ensure no change + options = { ...options }; // ensure no change await init(); if (!options.proxy) { return _deployOne(name, options); @@ -2552,13 +2548,13 @@ Note that in this case, the contract deployment will not behave the same if depl name: string, options: DiamondOptions ): Promise { - options = {...options}; // ensure no change + options = { ...options }; // ensure no change await init(); return _deployViaDiamondProxy(name, options); } async function rawTx(tx: SimpleTx): Promise { - tx = {...tx}; + tx = { ...tx }; await init(); const { address: from, @@ -2612,7 +2608,7 @@ Note that in this case, the contract deployment will not behave the same if depl async function catchUnknownSigner( action: Promise | (() => Promise), - options?: {log?: boolean} + options?: { log?: boolean } ): Promise { - options = {...options}; // ensure no change + options = { ...options }; // ensure no change await init(); const { address: from, @@ -2742,7 +2738,7 @@ data: ${data} if (unknown) { const ethersArgs = args ? args.concat([overrides]) : [overrides]; - const {data} = await ethersContract.populateTransaction[methodName]( + const { data } = await ethersContract.populateTransaction[methodName]( ...ethersArgs ); throw new UnknownSignerError({ @@ -2831,7 +2827,7 @@ data: ${data} methodName = options; options = {}; } - options = {...options}; // ensure no change + options = { ...options }; // ensure no change await init(); if (typeof args === 'undefined') { args = []; @@ -2843,7 +2839,7 @@ data: ${data} | zk.Signer | TronWeb3Provider | TronSigner = provider; - const {ethersSigner} = await getOptionalFrom(options.from); + const { ethersSigner } = await getOptionalFrom(options.from); if (ethersSigner) { caller = ethersSigner; } @@ -2896,7 +2892,7 @@ data: ${data} } async function getSigner(address: string): Promise { await init(); - const {ethersSigner} = await getFrom(address); + const { ethersSigner } = await getFrom(address); return ethersSigner; } @@ -3054,7 +3050,7 @@ data: ${data} console.log('waiting for newly broadcasted tx ...'); } else { console.log('resigning the tx...'); - const {ethersSigner, hardwareWallet} = await getOptionalFrom( + const { ethersSigner, hardwareWallet } = await getOptionalFrom( tx.from ); if (!ethersSigner) { @@ -3104,7 +3100,7 @@ data: ${data} if (!tx) { throw new Error(`cannot resubmit a tx if info not available`); } - const {ethersSigner, hardwareWallet} = await getOptionalFrom( + const { ethersSigner, hardwareWallet } = await getOptionalFrom( tx.from ); if (!ethersSigner) { @@ -3259,11 +3255,11 @@ data: ${data} } const proxyName = name + '_DiamondProxy'; - const {address: owner} = await getDiamondOwner(options); + const { address: owner } = await getDiamondOwner(options); const newSelectors: string[] = []; const facetSnapshot: Facet[] = []; const oldFacets: Facet[] = []; - const selectorToNotTouch: {[selector: string]: boolean} = {}; + const selectorToNotTouch: { [selector: string]: boolean } = {}; for (const selector of [ '0xcdffacc6', '0x52ef6b2c', @@ -3359,7 +3355,7 @@ data: ${data} } } const constructor = artifact.abi.find( - (fragment: {type: string; inputs: any[]}) => + (fragment: { type: string; inputs: any[] }) => fragment.type === 'constructor' ); if ((!argsSpecific && !constructor) || constructor.inputs.length === 0) { @@ -3406,7 +3402,7 @@ data: ${data} } const oldSelectors: string[] = []; - const oldSelectorsFacetAddress: {[selector: string]: string} = {}; + const oldSelectorsFacetAddress: { [selector: string]: string } = {}; for (const oldFacet of oldFacets) { for (const selector of oldFacet.functionSelectors) { oldSelectors.push(selector); @@ -3501,7 +3497,7 @@ data: ${data} const executeReceipt = await execute( name, - {...options, from: currentOwner}, + { ...options, from: currentOwner }, 'diamondCut', facetCuts, data === '0x' @@ -3546,7 +3542,7 @@ data: ${data} } } - return {extension, utils}; + return { extension, utils }; } function pause(duration: number): Promise { diff --git a/src/index.ts b/src/index.ts index 010803d..5cc821e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,6 @@ import './type-extensions'; +import './tasks/compile'; +import './tasks/audit'; import path from 'path'; import fs from 'fs-extra'; import murmur128 from 'murmur-128'; @@ -26,9 +28,6 @@ import { } from 'hardhat/builtin-tasks/task-names'; import { lazyObject } from 'hardhat/plugins'; import { loadTronSolc } from './tron/solc'; -import { OpenAI, AzureOpenAI } from 'openai'; -import { GoogleGenerativeAI } from '@google/generative-ai'; -import { glob } from 'glob'; import debug from 'debug'; const log = debug('hardhat:sun-protocol:tron-studio'); @@ -40,7 +39,6 @@ import { submitSourcesToSourcify } from './sourcify'; import { Network } from 'hardhat/types/runtime'; import { store } from './globalStore'; import { getDeployPaths, getNetworkName } from './utils'; -import { LlmConfig, LlmProviderConfig } from './llm'; export { getNetworkName }; @@ -76,7 +74,7 @@ function normalizePathArray(config: HardhatConfig, paths: string[]): string[] { function normalizePath( config: HardhatConfig, userPath: string | undefined, - defaultPath: string, + defaultPath: string ): string { if (userPath === undefined) { userPath = path.join(config.paths.root, defaultPath); @@ -93,13 +91,13 @@ extendConfig( config.paths.deployments = normalizePath( config, userConfig.paths?.deployments, - 'deployments', + 'deployments' ); config.paths.imports = normalizePath( config, userConfig.paths?.imports, - 'imports', + 'imports' ); if (userConfig.paths?.deploy) { @@ -110,7 +108,7 @@ extendConfig( deployPaths = userConfig.paths.deploy; } config.paths.deploy = deployPaths.map((p) => - normalizePath(config, p, 'deploy'), + normalizePath(config, p, 'deploy') ); } else { config.paths.deploy = [normalizePath(config, undefined, 'deploy')]; @@ -144,7 +142,7 @@ extendConfig( ? normalizePath( config, userDefinedExternalContracts.deploy, - userDefinedExternalContracts.deploy, + userDefinedExternalContracts.deploy ) : undefined, }); @@ -155,7 +153,7 @@ extendConfig( for (const key of Object.keys(userConfig.external.deployments)) { config.external.deployments[key] = normalizePathArray( config, - userConfig.external.deployments[key], + userConfig.external.deployments[key] ); } } @@ -178,13 +176,13 @@ extendConfig( config.verify.etherscan = (userConfig as any).etherscan; } } - }, + } ); function createNetworkFromConfig( env: HardhatRuntimeEnvironment, networkName: string, - config: NetworkConfig, + config: NetworkConfig ): Network { const tags: { [tag: string]: boolean } = {}; const tagsCollected = config.tags || []; @@ -210,7 +208,7 @@ function createNetworkFromConfig( function networkFromConfig( env: HardhatRuntimeEnvironment, network: Network, - companion: boolean, + companion: boolean ) { let live = true; const networkName = network.name; // cannot use fork here as this could be set via task, T @@ -292,7 +290,7 @@ extendEnvironment((env) => { if (deploymentsManager === undefined || env.deployments === undefined) { deploymentsManager = new DeploymentsManager( env, - lazyObject(() => env.network), // IMPORTANT, else other plugin cannot set env.network before end, like solidity-coverage does here in the coverage task : https://github.com/sc-forks/solidity-coverage/blob/3c0f3a5c7db26e82974873bbf61cf462072a7c6d/plugins/resources/nomiclabs.utils.js#L93-L98 + lazyObject(() => env.network) // IMPORTANT, else other plugin cannot set env.network before end, like solidity-coverage does here in the coverage task : https://github.com/sc-forks/solidity-coverage/blob/3c0f3a5c7db26e82974873bbf61cf462072a7c6d/plugins/resources/nomiclabs.utils.js#L93-L98 ); env.deployments = deploymentsManager.deploymentsExtension; env.getNamedAccounts = @@ -311,7 +309,7 @@ extendEnvironment((env) => { store.networks[networkName] = createNetworkFromConfig( env, networkName, - config, + config ); } } @@ -362,9 +360,9 @@ function setupExtraSolcSettings(settings: { addIfNotPresent(settings.outputSelection['*']['*'], 'storageLayout'); addIfNotPresent(settings.outputSelection['*']['*'], 'evm.methodIdentifiers'); addIfNotPresent(settings.outputSelection['*']['*'], 'evm.gasEstimates'); - // addIfNotPresent(settings.outputSelection["*"][""], "ir"); - // addIfNotPresent(settings.outputSelection["*"][""], "irOptimized"); - // addIfNotPresent(settings.outputSelection["*"][""], "ast"); + // addIfNotPresent(settings.outputSelection['*'][''], 'ir'); + // addIfNotPresent(settings.outputSelection['*'][''], 'irOptimized'); + // addIfNotPresent(settings.outputSelection['*'][''], 'ast'); } function initCompanionNetworks(hre: HardhatRuntimeEnvironment) { @@ -387,7 +385,7 @@ function initCompanionNetworks(hre: HardhatRuntimeEnvironment) { const config = hre.config.networks[networkName]; if (!('url' in config) || networkName === 'hardhat') { throw new Error( - `in memory network like hardhat are not supported as companion network`, + `in memory network like hardhat are not supported as companion network` ); } @@ -420,36 +418,36 @@ subtask(TASK_DEPLOY_RUN_DEPLOY, 'deploy run only') 'tags', 'specify which deploy script to execute via tags, separated by commas', undefined, - types.string, + types.string ) .addFlag( 'tagsRequireAll', - 'execute only deploy scripts containing all the tags specified', + 'execute only deploy scripts containing all the tags specified' ) .addOptionalParam( 'write', 'whether to write deployments to file', true, - types.boolean, + types.boolean ) .addOptionalParam( 'pendingtx', 'whether to save pending tx', false, - types.boolean, + types.boolean ) .addOptionalParam( 'gasprice', 'gas price to use for transactions', undefined, - types.string, + types.string ) .addOptionalParam('maxfee', 'max fee per gas', undefined, types.string) .addOptionalParam( 'priorityfee', 'max priority fee per gas', undefined, - types.string, + types.string ) .addFlag('reset', 'whether to delete deployments files first') .addFlag('log', 'whether to output log') @@ -484,36 +482,36 @@ subtask(TASK_DEPLOY_MAIN, 'deploy') 'tags', 'specify which deploy script to execute via tags, separated by commas', undefined, - types.string, + types.string ) .addFlag( 'tagsRequireAll', - 'execute only deploy scripts containing all the tags specified', + 'execute only deploy scripts containing all the tags specified' ) .addOptionalParam( 'write', 'whether to write deployments to file', true, - types.boolean, + types.boolean ) .addOptionalParam( 'pendingtx', 'whether to save pending tx', false, - types.boolean, + types.boolean ) .addOptionalParam( 'gasprice', 'gas price to use for transactions', undefined, - types.string, + types.string ) .addOptionalParam('maxfee', 'max fee per gas', undefined, types.string) .addOptionalParam( 'priorityfee', 'max priority fee per gas', undefined, - types.string, + types.string ) .addFlag('noCompile', 'disable pre compilation') .addFlag('reset', 'whether to delete deployments files first') @@ -521,13 +519,13 @@ subtask(TASK_DEPLOY_MAIN, 'deploy') .addFlag('watch', 'redeploy on every change of contract or deploy script') .addFlag( 'watchOnly', - 'do not actually deploy, just watch and deploy if changes occurs', + 'do not actually deploy, just watch and deploy if changes occurs' ) .addFlag('reportGas', 'report gas use') .setAction(async (args, hre) => { if (args.reset) { await deploymentsManager.deletePreviousDeployments( - args.runAsNode ? 'localhost' : undefined, + args.runAsNode ? 'localhost' : undefined ); } @@ -548,11 +546,11 @@ subtask(TASK_DEPLOY_MAIN, 'deploy') { ignored: /(^|[/\\])\../, // ignore dotfiles persistent: true, - }, + } ); watcher.on('ready', () => - console.log('Initial scan complete. Ready for changes'), + console.log('Initial scan complete. Ready for changes') ); // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -642,37 +640,37 @@ task(TASK_DEPLOY, 'Deploy contracts') 'tags', 'specify which deploy script to execute via tags, separated by commas', undefined, - types.string, + types.string ) .addFlag( 'tagsRequireAll', - 'execute only deploy scripts containing all the tags specified', + 'execute only deploy scripts containing all the tags specified' ) .addOptionalParam( 'write', 'whether to write deployments to file', undefined, - types.boolean, + types.boolean ) // TODO pendingtx .addOptionalParam( 'gasprice', 'gas price to use for transactions', undefined, - types.string, + types.string ) .addOptionalParam('maxfee', 'max fee per gas', undefined, types.string) .addOptionalParam( 'priorityfee', 'max priority fee per gas', undefined, - types.string, + types.string ) .addOptionalParam( 'deployScripts', 'override deploy script folder path', undefined, - types.string, + types.string ) .addFlag('noImpersonation', 'do not impersonate unknown accounts') .addFlag('noCompile', 'disable pre compilation') @@ -704,7 +702,7 @@ task(TASK_DEPLOY, 'Deploy contracts') task( TASK_EXPORT, - 'export contract deployment of the specified network into one file', + 'export contract deployment of the specified network into one file' ) .addOptionalParam('export', 'export current network deployments') .addOptionalParam('exportAll', 'export all deployments into one file') @@ -718,7 +716,7 @@ task( async function enableProviderLogging( provider: EthereumProvider, - enabled: boolean, + enabled: boolean ) { await provider.request({ method: 'hardhat_setLoggingEnabled', @@ -733,26 +731,26 @@ task(TASK_NODE, 'Starts a JSON-RPC server on top of Hardhat EVM') 'tags', 'specify which deploy script to execute via tags, separated by commas', undefined, - types.string, + types.string ) .addOptionalParam( 'write', 'whether to write deployments to file', true, - types.boolean, + types.boolean ) .addOptionalParam( 'gasprice', 'gas price to use for transactions', undefined, - types.string, + types.string ) .addOptionalParam('maxfee', 'max fee per gas', undefined, types.string) .addOptionalParam( 'priorityfee', 'max priority fee per gas', undefined, - types.string, + types.string ) // TODO --unlock-accounts .addFlag('noReset', 'do not delete deployments files already present') @@ -770,7 +768,7 @@ task(TASK_NODE, 'Starts a JSON-RPC server on top of Hardhat EVM') ` Unsupported network for JSON-RPC server. Only hardhat is currently supported. you can specifiy hardhat via "--network hardhat" -`, +` ); } @@ -801,7 +799,7 @@ subtask(TASK_NODE_GET_PROVIDER).setAction( // copy existing deployment from specified netwotk into localhost deployment folder await fs.copy( path.join(hre.config.paths.deployments, networkName), - path.join(hre.config.paths.deployments, 'localhost'), + path.join(hre.config.paths.deployments, 'localhost') ); } @@ -817,7 +815,7 @@ subtask(TASK_NODE_GET_PROVIDER).setAction( await enableProviderLogging(provider, true); return provider; - }, + } ); subtask(TASK_NODE_SERVER_READY).setAction(async (args, hre, runSuper) => { @@ -838,35 +836,35 @@ task(TASK_ETHERSCAN_VERIFY, 'submit contract source code to etherscan') 'license', 'SPDX license (useful if SPDX is not listed in the sources), need to be supported by etherscan: https://etherscan.io/contract-license-types', undefined, - types.string, + types.string ) .addOptionalParam( 'apiUrl', 'specify the url manually', undefined, - types.string, + types.string ) .addOptionalParam( 'contractName', 'specific contract name to verify', undefined, - types.string, + types.string ) .addFlag( 'forceLicense', - 'force the use of the license specified by --license option', + 'force the use of the license specified by --license option' ) .addFlag( 'sleep', - 'sleep 500ms between each verification, so API rate limit is not exceeded', + 'sleep 500ms between each verification, so API rate limit is not exceeded' ) .addFlag( 'solcInput', - 'fallback on solc-input (useful when etherscan fails on the minimum sources, see https://github.com/ethereum/solidity/issues/9573)', + 'fallback on solc-input (useful when etherscan fails on the minimum sources, see https://github.com/ethereum/solidity/issues/9573)' ) .addFlag( 'writePostData', - 'write the post data on file in "etherscan_requests/" folder, for debugging purpose', + 'write the post data on file in "etherscan_requests/" folder, for debugging purpose' ) .setAction(async (args, hre) => { const etherscanApiKey = @@ -876,7 +874,7 @@ task(TASK_ETHERSCAN_VERIFY, 'submit contract source code to etherscan') hre.config.verify?.etherscan?.apiKey; if (!etherscanApiKey) { throw new Error( - `No Etherscan API KEY provided. Set it through command line option, in hardhat.config.ts, or by setting the "ETHERSCAN_API_KEY" env variable`, + `No Etherscan API KEY provided. Set it through command line option, in hardhat.config.ts, or by setting the "ETHERSCAN_API_KEY" env variable` ); } const solcInputsPath = await deploymentsManager.getSolcInputPath(); @@ -894,23 +892,23 @@ task(TASK_ETHERSCAN_VERIFY, 'submit contract source code to etherscan') task( TASK_SOURCIFY, - 'submit contract source code to sourcify (https://sourcify.dev)', + 'submit contract source code to sourcify (https://sourcify.dev)' ) .addOptionalParam( 'endpoint', 'endpoint url for sourcify', undefined, - types.string, + types.string ) .addOptionalParam( 'contractName', 'specific contract name to verify', undefined, - types.string, + types.string ) .addFlag( 'writeFailingMetadata', - 'write to disk failing metadata for easy debugging', + 'write to disk failing metadata for easy debugging' ) .setAction(async (args, hre) => { await submitSourcesToSourcify(hre, args); @@ -921,41 +919,41 @@ task('export-artifacts') 'dest', 'destination folder where the extended artifacts files will be written to', undefined, - types.string, + types.string ) .addFlag( 'solcInput', - 'if set, artifacts will have an associated solcInput files (required for old version of solidity to ensure verifiability', + 'if set, artifacts will have an associated solcInput files (required for old version of solidity to ensure verifiability' ) .addFlag( 'includingEmptyBytecode', - 'if set, even contract without bytecode (like interfaces) will be exported', + 'if set, even contract without bytecode (like interfaces) will be exported' ) .addFlag( 'includingNoPublicFunctions', - 'if set, even contract without public interface (like imternal libraries) will be exported', + 'if set, even contract without public interface (like imternal libraries) will be exported' ) .addOptionalParam( 'exclude', 'list of contract names separated by commas to exclude', undefined, - types.string, + types.string ) .addOptionalParam( 'include', 'list of contract names separated by commas to include. If specified, only these will be considered', undefined, - types.string, + types.string ) .addFlag( 'hideSources', - 'if set, the artifacts files will not contain source code (metadata or other data exposing it) unless specified via --sources-for', + 'if set, the artifacts files will not contain source code (metadata or other data exposing it) unless specified via --sources-for' ) .addOptionalParam( 'sourcesFor', 'list of contract names separated by commas to include source (metadata,etc...) for (see --hide-sources)', undefined, - types.string, + types.string ) .setAction(async (args, hre) => { await hre.run('compile'); @@ -966,7 +964,7 @@ task('export-artifacts') result[item] = true; return result; }, - {}, + {} ); const argsExclude: string[] = args.exclude ? args.exclude.split(',') : []; const exclude = argsExclude.reduce( @@ -974,7 +972,7 @@ task('export-artifacts') result[item] = true; return result; }, - {}, + {} ); const argsSourcesFor: string[] = args.sourcesFor ? args.sourcesFor.split(',') @@ -984,7 +982,7 @@ task('export-artifacts') result[item] = true; return result; }, - {}, + {} ); const extendedArtifactFolderpath = args.dest; fs.emptyDirSync(extendedArtifactFolderpath); @@ -1000,12 +998,12 @@ task('export-artifacts') } const artifactDBGPath = path.join( path.dirname(artifactPath), - artifactName + '.dbg.json', + artifactName + '.dbg.json' ); const artifactDBG = await fs.readJSON(artifactDBGPath); const buildinfoPath = path.join( path.dirname(artifactDBGPath), - artifactDBG.buildInfo, + artifactDBG.buildInfo ); const buildInfo: BuildInfo = await fs.readJSON(buildinfoPath); const output = @@ -1078,20 +1076,20 @@ task('export-artifacts') let filepath = path.join( extendedArtifactFolderpath, - artifactName + '.json', + artifactName + '.json' ); if (dataToWrite.sourceName) { if (dataToWrite.contractName) { filepath = path.join( extendedArtifactFolderpath, dataToWrite.sourceName, - dataToWrite.contractName + '.json', + dataToWrite.contractName + '.json' ); } else { filepath = path.join( extendedArtifactFolderpath, dataToWrite.sourceName, - artifactName + '.json', + artifactName + '.json' ); } } @@ -1108,7 +1106,7 @@ subtask( solcVersion: string; }, hre, - runSuper, + runSuper ) => { const nw = hre.hardhatArguments['network'] ? hre.hardhatArguments['network'] @@ -1118,362 +1116,5 @@ subtask( return await loadTronSolc(args.solcVersion); } return runSuper(); - }, -); - -type ProviderName = keyof LlmConfig['providers']; -type SupportedLanguage = 'solidity' | 'vyper'; - -/** - * 统一的 LLM 调用函数 - * @param {string} provider - LLM 提供商 (e.g., 'openai', 'gemini') - * @param {object} config - 该 provider 的配置 (apiKey, model, baseURL?) - * @param {string} prompt - 发送给模型的 Prompt - * @returns {Promise} - 返回模型的分析结果文本 - */ -async function callLLM( - provider: ProviderName, - config: LlmProviderConfig, - prompt: string, -): Promise { - console.log(`[INFO] Using provider: ${provider}, model: ${config.model}`); - // console.log(`[INFO] prompt: ${prompt}`); - - switch (provider) { - case 'openai': - case 'qwen': - case 'deepseek': { - // 这些模型使用 OpenAI 兼容的 API - const openaiConfig = config as LlmConfig['providers']['openai']; - const openai = new OpenAI({ - apiKey: openaiConfig.apiKey, - baseURL: openaiConfig.baseURL, - }); - const response = await openai.chat.completions.create({ - model: openaiConfig.model, - messages: [ - { - role: 'system', - content: - 'You are a professional and meticulous smart contract auditor.', - }, - { role: 'user', content: prompt }, - ], - temperature: 0.3, - }); - return response.choices[0].message.content ?? ''; - } - case 'azure_openai': { - const azureConfig = config as LlmConfig['providers']['azure_openai']; - - const azureClient = new AzureOpenAI({ - endpoint: azureConfig.endpoint, - apiKey: azureConfig.apiKey, - apiVersion: azureConfig.apiVersion, - deployment: azureConfig.deploymentName, - }); - - console.log(`[INFO] Using model deployment: ${azureConfig.model}`); - const response = await azureClient.chat.completions.create({ - model: azureConfig.model, - messages: [ - { - role: 'system', - content: - 'You are a professional and meticulous smart contract auditor.', - }, - { role: 'user', content: prompt }, - ], - temperature: 0.3, - }); - return response.choices[0].message.content ?? ''; - } - case 'gemini': { - const geminiConfig = config as LlmConfig['providers']['gemini']; - const genAI = new GoogleGenerativeAI(geminiConfig.apiKey); - const model = genAI.getGenerativeModel({ model: geminiConfig.model }); - const result = await model.generateContent(prompt); - const response = await result.response; - return response.text(); - } - - default: - throw new Error(`Unsupported LLM provider: ${provider}`); } -} - -/** - * 根据配置获取基础审计 Prompt - * @param llmConfig - LLM 的配置对象 - * @returns {string} - 基础审计指令 - */ -function getBasePromptTemplate(llmConfig: LlmConfig): string { - const defaultPromptTemplate = `As an expert smart contract auditor, please analyze the following {language} code. -The file name is "{contractName}". - -Your analysis should cover: -1. **Security Vulnerabilities**: Identify potential risks. -2. **Gas Optimization**: Suggest gas-saving improvements. -3. **Best Practices**: Check for code style and common practices.`; - - const userPrompt = llmConfig.promptTemplate; - return userPrompt || defaultPromptTemplate; -} - -/** - * 生成最终的审计 Prompt,结合了用户自定义部分和固定的格式要求 - * @param contractName - 合约名称 - * @param contractCode - 带行号的合约代码 - * @param format - 输出格式 - * @param llmConfig - LLM 配置,用于获取自定义 prompt - * @param language - 合约语言 - * @returns {string} - 完整的 Prompt - */ -function getAuditPrompt( - contractName: string, - contractCode: string, - format: 'text' | 'json', - llmConfig: LlmConfig, - language: SupportedLanguage, -): string { - const codeWithLineNumbers = contractCode - .split('\n') - .map((line, index) => `${index + 1}: ${line}`) // 为每行加上 "行号: " 前缀 - .join('\n'); - - let basePrompt = getBasePromptTemplate(llmConfig); - basePrompt = basePrompt - .replace('{contractName}', contractName) - .replace('{codeWithLineNumbers}', codeWithLineNumbers) - .replace('{language}', language); - - if (format === 'json') { - return ` - ${basePrompt} - - Your response MUST be a single, valid JSON array of objects, enclosed in a single \`\`\`json code block. Do not add any text before or after the JSON block. - Each object in the array represents a single issue you've found and must conform to this exact structure: - { - "severity": "HIGH" | "MEDIUM" | "LOW" | "INFO", - "filePath": "${contractName}", - "lineNumber": , - "message": "", - "detailedDescription": "", - "suggestion": "" - } - - If you find no issues, return an empty array []. - - Now, analyze the following contract code: - \`\`\`${language} - ${codeWithLineNumbers} - \`\`\` - `; - } - - return ` - ${basePrompt} - - **CRITICAL**: For each issue you find, you MUST format the title of the issue on a single line like this: - [SEVERITY]|[FILE_PATH]:[LINE_NUMBER] - [BRIEF_DESCRIPTION] - - - **SEVERITY**: Use one of: HIGH, MEDIUM, LOW, INFO. - - **FILE_PATH**: This MUST be the exact filename provided: ${contractName}. - - **LINE_NUMBER**: The specific line number where the issue occurs. - - **BRIEF_DESCRIPTION**: A short, one-sentence summary of the issue. - - After this title line, provide a detailed explanation and a code snippet with your suggested modification. - - Example of a single issue's format: - --- - MEDIUM|MyContract.sol:42 - Re-entrancy risk in the withdraw function. - - **Details**: The current implementation of the \`withdraw\` function updates the user's balance *after* the external call (transfer), which makes it vulnerable to a re-entrancy attack. - - **Recommendation**: - \`\`\`diff - - balance[msg.sender] = 0; - - (bool sent, ) = msg.sender.call{value: amount}(""); - + (bool sent, ) = msg.sender.call{value: amount}(""); - + require(sent, "Failed to send Ether"); - + balance[msg.sender] = 0; - \`\`\` - --- - - Now, analyze the following contract code: - \`\`\`${language} - ${codeWithLineNumbers} - \`\`\` - `; -} - -// 提取 Markdown JSON -function extractJson(rawOutput: string): string { - const match = rawOutput.match(/```json\s*(\[[\s\S]*?\])\s*```/); - if (!match || !match[1]) { - try { - JSON.parse(rawOutput); - return rawOutput; - } catch (e) { - throw new Error( - 'Could not find a valid JSON code block or parse the raw output as JSON.', - ); - } - } - return match[1]; -} - -interface AuditTaskArgs { - contract?: string; - provider?: ProviderName; - format?: 'text' | 'json'; -} - -task('audit', 'Audits a smart contract using a specified LLM provider') - .addOptionalParam( - 'contract', - 'The name of the contract file to audit (e.g., "MyContract.sol"). If not provided, audits all contracts.', - ) - .addOptionalParam( - 'provider', - 'The LLM provider to use (openai, azure_openai, gemini, qwen, deepseek)', - ) - .addOptionalParam('format', "The output format: 'text' (default) or 'json'") - .setAction( - async (taskArgs: AuditTaskArgs, hre: HardhatRuntimeEnvironment) => { - const { - contract: contractArg, - provider: providerArg, - format: formatArg, - } = taskArgs; - const { llm: llmConfig } = hre.config; - const format = formatArg || 'text'; - const provider = providerArg || llmConfig.defaultProvider; - const providerConfig = llmConfig.providers[provider]; - - if (!providerConfig || !providerConfig.apiKey) { - console.error( - `\n[ERROR] Configuration for provider '${provider}' is missing or incomplete. Check your hardhat.config.ts and .env file.`, - ); - return; - } - - let contractPaths: string[]; - const sourcesPath = hre.config.paths.sources; - - if (!contractArg || contractArg.toLowerCase() === 'all') { - console.log( - '[INFO] No specific contract provided. Auditing all contracts...', - ); - contractPaths = await glob(`${sourcesPath}/**/*.{sol,vy}`); - } else { - contractPaths = [path.resolve(sourcesPath, contractArg)]; - } - - if (contractPaths.length === 0) { - console.error('\n[ERROR] No contract files found to audit.'); - return; - } - - console.log(`[INFO] Found ${contractPaths.length} contract(s) to audit.`); - - const allIssues: any[] = []; - for (const contractPath of contractPaths) { - const contractName = path.basename(contractPath); - - const extension = path.extname(contractPath).substring(1); // 'sol' or 'vy' - let language: SupportedLanguage; - - if (extension === 'sol') { - language = 'solidity'; - } else if (extension === 'vy') { - language = 'vyper'; - } else { - console.warn( - `[WARN] Unsupported file type ".${extension}" for ${contractName}. Skipping.`, - ); - continue; - } - - console.log(`\n---------------------------------------------`); - console.log(` Auditing: ${contractName} (${language})`); - console.log(`---------------------------------------------\n`); - - let contractCode: string; - try { - const contractPath = path.resolve( - hre.config.paths.sources, - contractName, - ); - contractCode = fs.readFileSync(contractPath, 'utf8'); - console.log(`[INFO] Successfully read contract: ${contractName}`); - } catch (error) { - console.error( - `\n[ERROR] Could not read contract file: ${contractName}.`, - ); - continue; - } - - const prompt = getAuditPrompt( - contractName, - contractCode, - format, - llmConfig, - language, - ); - - try { - console.log( - `[INFO] Sending code to LLM for analysis (format: ${format})...`, - ); - const rawAnalysis = await callLLM(provider, providerConfig, prompt); - console.log(`\n=============================================`); - console.log( - ` 🤖 LLM Audit Report for ${contractName} (${provider.toUpperCase()})`, - ); - console.log(`=============================================\n`); - console.log(rawAnalysis); - if (format === 'json') { - const jsonString = extractJson(rawAnalysis); - try { - const parsedJson = JSON.parse(jsonString); - allIssues.push(...parsedJson); - } catch (e) { - console.error( - "\n[ERROR] Failed to parse the JSON extracted from the LLM's response.", - ); - if (e instanceof SyntaxError) { - console.error('Syntax Error:', e.message); - } - console.error( - 'Extracted string that failed to parse:', - jsonString, - ); - } - } - } catch (error: any) { - console.error( - `\n[ERROR] An error occurred during the audit of ${contractName}:`, - ); - console.error(error.message); - } - } - if (format === 'json') { - if (allIssues.length > 0) { - const formattedJsonString = JSON.stringify(allIssues, null, 2); - const outputPath = path.join( - hre.config.paths.root, - 'audit-report.json', - ); - fs.writeFileSync(outputPath, formattedJsonString, 'utf8'); - console.log( - `\n✅ [SUCCESS] Combined audit report for ${contractPaths.length} contract(s) has been saved to: ${outputPath}`, - ); - } else { - console.log( - `\n✅ [SUCCESS] All contracts were audited, and no issues were found.`, - ); - } - } - }, - ); +); diff --git a/src/internal/types.ts b/src/internal/types.ts index ffba711..d0686bf 100644 --- a/src/internal/types.ts +++ b/src/internal/types.ts @@ -1,4 +1,4 @@ -import {Artifact} from 'hardhat/types'; +import { Artifact } from 'hardhat/types'; import { Deployment, @@ -18,7 +18,7 @@ export interface PartialExtension { get(name: string): Promise; getOrNull(name: string): Promise; getDeploymentsFromAddress(address: string): Promise; - all(): Promise<{[name: string]: Deployment}>; + all(): Promise<{ [name: string]: Deployment }>; getExtendedArtifact(name: string): Promise; getArtifact(name: string): Promise; run( @@ -30,11 +30,11 @@ export interface PartialExtension { export?: string; exportAll?: string; } - ): Promise<{[name: string]: Deployment}>; + ): Promise<{ [name: string]: Deployment }>; fixture( tags?: string | string[], - options?: {fallbackToGlobal?: boolean; keepExistingDeployments?: boolean} - ): Promise<{[name: string]: Deployment}>; + options?: { fallbackToGlobal?: boolean; keepExistingDeployments?: boolean } + ): Promise<{ [name: string]: Deployment }>; createFixture( func: FixtureFunc, id?: string diff --git a/src/llm.ts b/src/llm.ts index 0eedb8e..aba7ad7 100644 --- a/src/llm.ts +++ b/src/llm.ts @@ -10,7 +10,7 @@ interface OpenAICompatibleConfig extends BaseProviderConfig { } // Gemini 的特定配置 -interface GeminiConfig extends BaseProviderConfig {} +type GeminiConfig = BaseProviderConfig; // Azure 的特定配置 interface AzureConfig extends BaseProviderConfig { diff --git a/src/sourcify.ts b/src/sourcify.ts index 6059d9a..4043501 100644 --- a/src/sourcify.ts +++ b/src/sourcify.ts @@ -1,11 +1,11 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import axios from 'axios'; import FormData from 'form-data'; -import {HardhatRuntimeEnvironment} from 'hardhat/types'; +import { HardhatRuntimeEnvironment } from 'hardhat/types'; import chalk from 'chalk'; import fs from 'fs-extra'; import path from 'path'; -import {Readable} from 'stream'; +import { Readable } from 'stream'; function log(...args: any[]) { console.log(...args); @@ -51,13 +51,13 @@ export async function submitSourcesToSourcify( async function submit(name: string) { const deployment = all[name]; - const {address, metadata: metadataString} = deployment; + const { address, metadata: metadataString } = deployment; try { const checkResponse = await axios.get( `${url}checkByAddresses?addresses=${address.toLowerCase()}&chainIds=${chainId}` ); - const {data: checkData} = checkResponse; + const { data: checkData } = checkResponse; if (checkData[0].status === 'perfect') { log(`already verified: ${name} (${address}), skipping.`); return; diff --git a/src/tasks/audit.ts b/src/tasks/audit.ts new file mode 100644 index 0000000..d0e5b18 --- /dev/null +++ b/src/tasks/audit.ts @@ -0,0 +1,358 @@ +import { task } from 'hardhat/config'; +import { HardhatRuntimeEnvironment } from 'hardhat/types'; +import fs from 'fs-extra'; +import path from 'path'; +import { glob } from 'glob'; +import OpenAI from 'openai'; +import { AzureOpenAI } from 'openai'; +import { GoogleGenerativeAI } from '@google/generative-ai'; +import { LlmConfig, LlmProviderConfig } from '../llm'; + +type ProviderName = keyof LlmConfig['providers']; +type SupportedLanguage = 'solidity' | 'vyper'; + +interface AuditTaskArgs { + contract?: string; + provider?: ProviderName; + format?: 'text' | 'json'; +} + +/** + * 统一的 LLM 调用函数 + */ +async function callLLM( + provider: ProviderName, + config: LlmProviderConfig, + prompt: string +): Promise { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + console.log(`[INFO] Using provider: ${provider}, model: ${config.model}`); + + switch (provider) { + case 'openai': + case 'qwen': + case 'deepseek': { + const openaiConfig = config as LlmConfig['providers']['openai']; + const openai = new OpenAI({ + apiKey: openaiConfig.apiKey, + baseURL: openaiConfig.baseURL, + }); + const response = await openai.chat.completions.create({ + model: openaiConfig.model, + messages: [ + { + role: 'system', + content: + 'You are a professional and meticulous smart contract auditor.', + }, + { role: 'user', content: prompt }, + ], + temperature: 0.3, + }); + return response.choices[0].message.content ?? ''; + } + case 'azure_openai': { + const azureConfig = config as LlmConfig['providers']['azure_openai']; + const azureClient = new AzureOpenAI({ + endpoint: azureConfig.endpoint, + apiKey: azureConfig.apiKey, + apiVersion: azureConfig.apiVersion, + deployment: azureConfig.deploymentName, + }); + + console.log(`[INFO] Using model deployment: ${azureConfig.model}`); + const response = await azureClient.chat.completions.create({ + model: azureConfig.model, + messages: [ + { + role: 'system', + content: + 'You are a professional and meticulous smart contract auditor.', + }, + { role: 'user', content: prompt }, + ], + temperature: 0.3, + }); + return response.choices[0].message.content ?? ''; + } + case 'gemini': { + const geminiConfig = config as LlmConfig['providers']['gemini']; + const genAI = new GoogleGenerativeAI(geminiConfig.apiKey); + const model = genAI.getGenerativeModel({ model: geminiConfig.model }); + const result = await model.generateContent(prompt); + const response = await result.response; + return response.text(); + } + + default: + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + throw new Error(`Unsupported LLM provider: ${provider}`); + } +} + +/** + * 根据配置获取基础审计 Prompt + */ +function getBasePromptTemplate(llmConfig: LlmConfig): string { + const defaultPromptTemplate = `As an expert smart contract auditor, please analyze the following {language} code. +The file name is '{contractName}'. + +Your analysis should cover: +1. **Security Vulnerabilities**: Identify potential risks. +2. **Gas Optimization**: Suggest gas-saving improvements. +3. **Best Practices**: Check for code style and common practices.`; + + const userPrompt = llmConfig.promptTemplate; + return userPrompt || defaultPromptTemplate; +} + +/** + * 生成最终的审计 Prompt + */ +function getAuditPrompt( + contractName: string, + contractCode: string, + format: 'text' | 'json', + llmConfig: LlmConfig, + language: SupportedLanguage +): string { + const codeWithLineNumbers = contractCode + .split('\n') + .map((line, index) => `${index + 1}: ${line}`) + .join('\n'); + + let basePrompt = getBasePromptTemplate(llmConfig); + basePrompt = basePrompt + .replace('{contractName}', contractName) + .replace('{codeWithLineNumbers}', codeWithLineNumbers) + .replace('{language}', language); + + if (format === 'json') { + return ` + ${basePrompt} + + Your response MUST be a single, valid JSON array of objects, enclosed in a single \`\`\`json code block. Do not add any text before or after the JSON block. + Each object in the array represents a single issue you've found and must conform to this exact structure: + { + 'severity': 'HIGH' | 'MEDIUM' | 'LOW' | 'INFO', + 'filePath': '${contractName}', + 'lineNumber': , + 'message': '', + 'detailedDescription': '', + 'suggestion': '' + } + + If you find no issues, return an empty array []. + + Now, analyze the following contract code: + \`\`\`${language} + ${codeWithLineNumbers} + \`\`\` + `; + } + + return ` + ${basePrompt} + + **CRITICAL**: For each issue you find, you MUST format the title of the issue on a single line like this: + [SEVERITY]|[FILE_PATH]:[LINE_NUMBER] - [BRIEF_DESCRIPTION] + + - **SEVERITY**: Use one of: HIGH, MEDIUM, LOW, INFO. + - **FILE_PATH**: This MUST be the exact filename provided: ${contractName}. + - **LINE_NUMBER**: The specific line number where the issue occurs. + - **BRIEF_DESCRIPTION**: A short, one-sentence summary of the issue. + + After this title line, provide a detailed explanation and a code snippet with your suggested modification. + + Example of a single issue's format: + --- + MEDIUM|MyContract.sol:42 - Re-entrancy risk in the withdraw function. + + **Details**: The current implementation of the \`withdraw\` function updates the user's balance *after* the external call (transfer), which makes it vulnerable to a re-entrancy attack. + + **Recommendation**: + \`\`\`diff + - balance[msg.sender] = 0; + - (bool sent, ) = msg.sender.call{value: amount}(''); + + (bool sent, ) = msg.sender.call{value: amount}(''); + + require(sent, 'Failed to send Ether'); + + balance[msg.sender] = 0; + \`\`\` + --- + + Now, analyze the following contract code: + \`\`\`${language} + ${codeWithLineNumbers} + \`\`\` + `; +} + +// 提取 Markdown JSON +function extractJson(rawOutput: string): string { + const match = rawOutput.match(/```json\s*(\[[\s\S]*?\])\s*```/); + if (!match || !match[1]) { + try { + JSON.parse(rawOutput); + return rawOutput; + } catch (e) { + throw new Error( + 'Could not find a valid JSON code block or parse the raw output as JSON.' + ); + } + } + return match[1]; +} + +task('audit', 'Audits a smart contract using a specified LLM provider') + .addOptionalParam( + 'contract', + 'The name of the contract file to audit (e.g., "MyContract.sol"). If not provided, audits all contracts.' + ) + .addOptionalParam( + 'provider', + 'The LLM provider to use (openai, azure_openai, gemini, qwen, deepseek)' + ) + .addOptionalParam('format', 'The output format: "text" (default) or "json"') + .setAction( + async (taskArgs: AuditTaskArgs, hre: HardhatRuntimeEnvironment) => { + const { + contract: contractArg, + provider: providerArg, + format: formatArg, + } = taskArgs; + const { llm: llmConfig } = hre.config; + const format = formatArg || 'text'; + const provider = providerArg || llmConfig.defaultProvider; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const providerConfig = llmConfig.providers[provider] as any; + + if (!providerConfig || !providerConfig.apiKey) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + console.error( + `\n[ERROR] Configuration for provider '${provider}' is missing or incomplete. Check your hardhat.config.ts and .env file.` + ); + return; + } + + let contractPaths: string[]; + const sourcesPath = hre.config.paths.sources; + + if (!contractArg || contractArg.toLowerCase() === 'all') { + console.log( + '[INFO] No specific contract provided. Auditing all contracts...' + ); + contractPaths = await glob(`${sourcesPath}/**/*.{sol,vy}`); + } else { + contractPaths = [path.resolve(sourcesPath, contractArg)]; + } + + if (contractPaths.length === 0) { + console.error('\n[ERROR] No contract files found to audit.'); + return; + } + + console.log(`[INFO] Found ${contractPaths.length} contract(s) to audit.`); + + const allIssues: any[] = []; + for (const contractPath of contractPaths) { + const contractName = path.basename(contractPath); + + const extension = path.extname(contractPath).substring(1); // 'sol' or 'vy' + let language: SupportedLanguage; + + if (extension === 'sol') { + language = 'solidity'; + } else if (extension === 'vy') { + language = 'vyper'; + } else { + console.warn( + `[WARN] Unsupported file type '.${extension}' for ${contractName}. Skipping.` + ); + continue; + } + + console.log(`\n---------------------------------------------`); + console.log(` Auditing: ${contractName} (${language})`); + console.log(`---------------------------------------------\n`); + + let contractCode: string; + try { + const resolvedPath = path.resolve( + hre.config.paths.sources, + contractName + ); + contractCode = fs.readFileSync(resolvedPath, 'utf8'); + console.log(`[INFO] Successfully read contract: ${contractName}`); + } catch (error) { + console.error( + `\n[ERROR] Could not read contract file: ${contractName}.` + ); + continue; + } + + const prompt = getAuditPrompt( + contractName, + contractCode, + format, + llmConfig, + language + ); + + try { + console.log( + `[INFO] Sending code to LLM for analysis (format: ${format})...` + ); + const rawAnalysis = await callLLM(provider, providerConfig, prompt); + console.log(`\n=============================================`); + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + console.log( + ` 🤖 LLM Audit Report for ${contractName} (${( + provider as string + ).toUpperCase()})` + ); + console.log(`=============================================\n`); + console.log(rawAnalysis); + if (format === 'json') { + const jsonString = extractJson(rawAnalysis); + try { + const parsedJson = JSON.parse(jsonString); + allIssues.push(...parsedJson); + } catch (e) { + console.error( + "\n[ERROR] Failed to parse the JSON extracted from the LLM's response." + ); + if (e instanceof SyntaxError) { + console.error('Syntax Error:', e.message); + } + console.error( + 'Extracted string that failed to parse:', + jsonString + ); + } + } + } catch (error: any) { + console.error( + `\n[ERROR] An error occurred during the audit of ${contractName}:` + ); + console.error(error.message); + } + } + if (format === 'json') { + if (allIssues.length > 0) { + const formattedJsonString = JSON.stringify(allIssues, null, 2); + const outputPath = path.join( + hre.config.paths.root, + 'audit-report.json' + ); + fs.writeFileSync(outputPath, formattedJsonString, 'utf8'); + console.log( + `\n✅ [SUCCESS] Combined audit report for ${contractPaths.length} contract(s) has been saved to: ${outputPath}` + ); + } else { + console.log( + `\n✅ [SUCCESS] All contracts were audited, and no issues were found.` + ); + } + } + } + ); diff --git a/src/tasks/compile.ts b/src/tasks/compile.ts new file mode 100644 index 0000000..8f353f3 --- /dev/null +++ b/src/tasks/compile.ts @@ -0,0 +1,160 @@ +import { task, subtask } from 'hardhat/config'; +import { + TASK_COMPILE, + TASK_COMPILE_SOLIDITY_LOG_COMPILATION_ERRORS, +} from 'hardhat/builtin-tasks/task-names'; +import fs from 'fs-extra'; +import path from 'path'; + +/** + * Appends issues to the compilation-errors.json file. + */ +function reportIssues(hre: any, issues: any[], append: boolean = false) { + const errorFilePath = path.join( + hre.config.paths.root, + 'compilation-errors.json' + ); + let allIssues = issues; + + if (append && fs.existsSync(errorFilePath)) { + try { + const existing = fs.readJsonSync(errorFilePath); + if (Array.isArray(existing)) { + allIssues = [...existing, ...issues]; + } + } catch (e) { + // Ignore read errors + } + } + + fs.writeJsonSync(errorFilePath, allIssues, { spaces: 2 }); + (hre as any)._sunhatIssuesCount = + ((hre as any)._sunhatIssuesCount || 0) + issues.length; +} + +task(TASK_COMPILE).setAction(async (args, hre, runSuper) => { + (hre as any)._sunhatIssuesCount = 0; + (hre as any)._solidityErrorsCaptured = false; + + try { + await runSuper(args); + + // Clear the file only if no issues (errors/warnings) were reported during the entire process + if (((hre as any)._sunhatIssuesCount || 0) === 0) { + reportIssues(hre, []); + } + } catch (error: any) { + // If Solidity didn't already capture and report detailed errors, + // or if the failure is NOT the standard Solidity failure, try to parse it. + if ( + !(hre as any)._solidityErrorsCaptured || + !error.message.includes('HH600') + ) { + let errors = []; + let filePath = 'hardhat.config.ts'; + + if (error.cmd) { + const cmdMatch = error.cmd.match(/\s([^\s]+\.vy)$/); + if (cmdMatch) filePath = cmdMatch[1]; + } + + const lineColMatch = error.message.match(/line (\d+):(\d+)/); + const lineOnlyMatch = error.message.match(/line (\d+)/); + const arrowMatch = error.message.match( + /(?:-->\s+)?([^:\s\n]+):(\d+):(\d+):/ + ); + + if (lineColMatch || lineOnlyMatch || arrowMatch) { + let line = 0; + let column = 0; + + if (arrowMatch) { + filePath = arrowMatch[1]; + line = parseInt(arrowMatch[2]); + column = parseInt(arrowMatch[3]); + } else if (lineColMatch) { + line = parseInt(lineColMatch[1]); + column = parseInt(lineColMatch[2]); + } else if (lineOnlyMatch) { + line = parseInt(lineOnlyMatch[1]); + } + + const messageLines = error.message.split('\n'); + const exceptionLine = messageLines.find( + (l: string) => l.includes('Exception:') || l.includes('Error:') + ); + const message = exceptionLine + ? exceptionLine.trim() + : error.message.split('\n')[0]; + + errors.push({ + filePath, + line, + column, + severity: 'error', + message, + original: error.message, + }); + } + + if (errors.length === 0) { + errors.push({ + filePath: 'hardhat.config.ts', + line: 0, + column: 0, + severity: 'error', + message: error.message, + original: error.message, + }); + } + + // Append if we already have some (e.g. Solidity warnings followed by Vyper error) + reportIssues(hre, errors, ((hre as any)._sunhatIssuesCount || 0) > 0); + } + throw error; + } +}); + +subtask(TASK_COMPILE_SOLIDITY_LOG_COMPILATION_ERRORS).setAction( + async (taskArgs: any, hre, runSuper) => { + const outputErrors = taskArgs.output?.errors || []; + const validIssues = []; + let hasStrictError = false; + + for (const issue of outputErrors) { + let filePath = issue.sourceLocation?.file || 'unknown'; + let line = 0; + let column = 0; + const severity = issue.severity || 'error'; + if (severity === 'error') hasStrictError = true; + + if (issue.formattedMessage) { + const match = issue.formattedMessage.match( + /(?:-->\s+)?([^:\s\n]+):(\d+):(\d+):/ + ); + if (match) { + if (filePath === 'unknown') filePath = match[1]; + line = parseInt(match[2]); + column = parseInt(match[3]); + } + } + + validIssues.push({ + filePath, + line, + column, + severity, + message: issue.message || issue.formattedMessage, + original: issue.formattedMessage, + detailed: issue, + }); + } + + if (hasStrictError) { + (hre as any)._solidityErrorsCaptured = true; + } + + reportIssues(hre, validIssues, ((hre as any)._sunhatIssuesCount || 0) > 0); + return runSuper(taskArgs); + } +); diff --git a/src/tron/contract.ts b/src/tron/contract.ts index 6fc22ae..c35dc97 100644 --- a/src/tron/contract.ts +++ b/src/tron/contract.ts @@ -1,9 +1,9 @@ -import {TronSigner} from './signer'; -import {Contract, ContractFactory, ContractInterface, ethers} from 'ethers'; -import {TransactionRequest} from '@ethersproject/providers'; -import {CreateSmartContract, MethodSymbol, TronTxMethods} from './types'; -import {strip0x} from './utils'; -export {Contract} from 'ethers'; +import { TronSigner } from './signer'; +import { Contract, ContractFactory, ContractInterface, ethers } from 'ethers'; +import { TransactionRequest } from '@ethersproject/providers'; +import { CreateSmartContract, MethodSymbol, TronTxMethods } from './types'; +import { strip0x } from './utils'; +export { Contract } from 'ethers'; /** * A contract factory for deploying and interacting with smart contracts on the TRON network. @@ -68,7 +68,7 @@ export class TronContractFactory extends ContractFactory { override getDeployTransaction( ...args: any[] ): ethers.providers.TransactionRequest { - const {data, value} = super.getDeployTransaction( + const { data, value } = super.getDeployTransaction( ...args ) as TransactionRequest; diff --git a/src/tron/provider.ts b/src/tron/provider.ts index 88154f0..31862ed 100644 --- a/src/tron/provider.ts +++ b/src/tron/provider.ts @@ -8,9 +8,9 @@ import { TransactionResponse, Web3Provider, } from '@ethersproject/providers'; -import {HttpNetworkConfig} from 'hardhat/types'; -import {TronSigner} from './signer'; -import {BigNumber, Wallet} from 'ethers'; +import { HttpNetworkConfig } from 'hardhat/types'; +import { TronSigner } from './signer'; +import { BigNumber, Wallet } from 'ethers'; import { Time, TronTransactionFailedError, @@ -25,7 +25,7 @@ import { parseTransaction, } from 'ethers/lib/utils'; import TronWeb from 'tronweb'; -import {Transaction, TronWebError1} from 'tronweb/interfaces'; +import { Transaction, TronWebError1 } from 'tronweb/interfaces'; /** * A provider for interacting with the TRON blockchain, extending the Web3Provider. @@ -52,7 +52,7 @@ import {Transaction, TronWebError1} from 'tronweb/interfaces'; export class TronWeb3Provider extends Web3Provider { protected signers = new Map(); public ro_tronweb: TronWeb; - public gasPrice: {time: number; value?: BigNumber} = {time: Time.NOW}; + public gasPrice: { time: number; value?: BigNumber } = { time: Time.NOW }; public maxFeeLimit?: number; public FALLBACK_MAX_FEE_LIMIT = 15e9; // 15,000 TRX; private readonly fullHost: string; @@ -64,13 +64,13 @@ export class TronWeb3Provider extends Web3Provider { network?: Networkish | undefined ) { super(provider, network); - const {httpHeaders: headers, url, accounts} = config; + const { httpHeaders: headers, url, accounts } = config; let fullHost = url; // the address of the tron node has the jsonrpc path chopped off fullHost = fullHost.replace(/\/jsonrpc\/?$/, ''); this.fullHost = fullHost; this.headers = headers; - this.ro_tronweb = new TronWeb({fullHost, headers}); + this.ro_tronweb = new TronWeb({ fullHost, headers }); // instantiate Tron Signer if (Array.isArray(accounts)) { for (const pk of accounts) { @@ -161,10 +161,10 @@ export class TronWeb3Provider extends Web3Provider { */ override async getGasPrice(): Promise { const DEFAULT_ENERGY_PRICE = BigNumber.from('1000'); - const {time, value} = this.gasPrice; + const { time, value } = this.gasPrice; if (time > Time.NOW - 15 * Time.SECOND && value) return value; const gasPrice = (await super.getGasPrice()) ?? DEFAULT_ENERGY_PRICE; - this.gasPrice = {time: Time.NOW, value: gasPrice}; + this.gasPrice = { time: Time.NOW, value: gasPrice }; return gasPrice; } @@ -182,7 +182,7 @@ export class TronWeb3Provider extends Web3Provider { ): Promise { signedTransaction = await signedTransaction; const deser = parseTransaction(signedTransaction); - const {to, data, from, value} = deser; + const { to, data, from, value } = deser; // is this a send trx transaction? if (this.isSendTRX(to, from, data)) { @@ -248,21 +248,21 @@ export class TronWeb3Provider extends Web3Provider { from: string, contract: string, funcSig: string, - params: {type: string; value: string | number}[], + params: { type: string; value: string | number }[], options: { gasLimit?: string | number | BigNumber; value?: string | BigNumber; } ) { const feeLimit = await this.getSigner(from).getFeeLimit( - {to: contract}, + { to: contract }, options ); - const {transaction} = + const { transaction } = await this.ro_tronweb.transactionBuilder.triggerSmartContract( this.ro_tronweb.address.toHex(contract), funcSig, - {feeLimit, callValue: options.value?.toString() ?? 0}, + { feeLimit, callValue: options.value?.toString() ?? 0 }, params, this.ro_tronweb.address.toHex(from) ); @@ -314,14 +314,12 @@ export class TronWeb3Provider extends Web3Provider { let curr_conf = initialConfirmations; while (targetConfirmations && curr_conf < targetConfirmations) { await Time.sleep(Time.SECOND); // sleep 1 sec - const {confirmations: latest_conf} = await this.getTransactionWithRetry( - hash, - 3 - ); + const { confirmations: latest_conf } = + await this.getTransactionWithRetry(hash, 3); curr_conf = latest_conf; } const receipt = await this.getTransactionReceipt(ensure0x(hash)); - const {status} = receipt; + const { status } = receipt; if (status === 0) { throw new TronTransactionFailedError(receipt); } @@ -374,7 +372,7 @@ export class TronWeb3Provider extends Web3Provider { ): Promise { const toDel = ['type', 'maxFeePerGas', 'maxPriorityFeePerGas', 'nonce']; for (const field of toDel) { - delete (transaction as {[key: string]: any})[field]; + delete (transaction as { [key: string]: any })[field]; } return super.estimateGas(transaction); } @@ -432,7 +430,7 @@ export class TronWeb3Provider extends Web3Provider { async getMaxFeeLimit(): Promise { if (this.maxFeeLimit == undefined) { const params = await this.ro_tronweb.trx.getChainParameters(); - const param = params.find(({key}) => key === 'getMaxFeeLimit'); + const param = params.find(({ key }) => key === 'getMaxFeeLimit'); this.maxFeeLimit = param?.value ?? this.FALLBACK_MAX_FEE_LIMIT; } return this.maxFeeLimit; diff --git a/src/tron/signer.ts b/src/tron/signer.ts index 2db6199..da6a7bb 100644 --- a/src/tron/signer.ts +++ b/src/tron/signer.ts @@ -3,12 +3,12 @@ * gasPrice in EVM == energyPrice in TVM */ -import {BigNumber, Wallet} from 'ethers'; -import {Deferrable} from 'ethers/lib/utils'; +import { BigNumber, Wallet } from 'ethers'; +import { Deferrable } from 'ethers/lib/utils'; import TronWeb from 'tronweb'; -import {TronWeb3Provider} from './provider'; -import {Time, TronWebGetTransactionError, strip0x} from './utils'; -import {CreateSmartContract, MethodSymbol, TronTxMethods} from './types'; +import { TronWeb3Provider } from './provider'; +import { Time, TronWebGetTransactionError, strip0x } from './utils'; +import { CreateSmartContract, MethodSymbol, TronTxMethods } from './types'; import { BlockTransaction, ContractExecutionParams, @@ -43,8 +43,8 @@ import { */ export class TronSigner extends Wallet { protected tronweb: TronWeb; - public gasPrice: {time: number; value?: BigNumber} = {time: Time.NOW}; - public energyFactors = new Map(); + public gasPrice: { time: number; value?: BigNumber } = { time: Time.NOW }; + public energyFactors = new Map(); public MAX_ENERGY_FACTOR = 1.2; public MAX_ENERGY_DIVISOR = 1000; @@ -250,7 +250,7 @@ export class TronSigner extends Wallet { if (contract_address == '') return energy_factor; const res = await this.tronweb.fullNode.request( 'wallet/getcontractinfo', - {value: contract_address, visible: false}, + { value: contract_address, visible: false }, 'post' ); @@ -281,7 +281,7 @@ export class TronSigner extends Wallet { if ('Error' in res) throw new TronWebGetTransactionError(res); return res; } - + getTronWeb() { return this.tronweb; } diff --git a/src/tron/tronweb.d.ts b/src/tron/tronweb.d.ts index 67db0d3..1ec60ef 100644 --- a/src/tron/tronweb.d.ts +++ b/src/tron/tronweb.d.ts @@ -3,7 +3,7 @@ * Original Source: https://github.com/daochild/tronweb-typescript */ declare module 'tronweb' { - import {BigNumber} from 'bignumber.js'; + import { BigNumber } from 'bignumber.js'; import { Account, AccountMnemonic, @@ -660,12 +660,15 @@ declare module 'tronweb/interfaces' { export interface TrxAccount { address: string; balance: number; - frozen: {frozen_balance: number; expire_time: number}[]; + frozen: { frozen_balance: number; expire_time: number }[]; create_time: number; latest_opration_time: number; latest_consume_free_time: number; account_resource: { - frozen_balance_for_energy: {frozen_balance: number; expire_time: number}; + frozen_balance_for_energy: { + frozen_balance: number; + expire_time: number; + }; latest_consume_time_for_energy: number; }; owner_permission: { @@ -681,8 +684,8 @@ declare module 'tronweb/interfaces' { operations: string; keys: [Array]; }[]; - assetV2: {key: string; value: number}[]; - free_asset_net_usageV2: {key: string; value: number}[]; + assetV2: { key: string; value: number }[]; + free_asset_net_usageV2: { key: string; value: number }[]; } export interface ParameterValueOnTriggerSC { @@ -724,11 +727,11 @@ declare module 'tronweb/interfaces' { } export interface TransactionResult { - result: {[key: string]: any} | boolean; + result: { [key: string]: any } | boolean; approved_list?: string[]; transaction: | { - result: {result: boolean}; + result: { result: boolean }; txid: string; transaction: { signature: any[]; @@ -741,11 +744,11 @@ declare module 'tronweb/interfaces' { } export interface RawTransactionResult { - result: {[key: string]: any} | boolean; + result: { [key: string]: any } | boolean; txid: string; approved_list?: string[]; transaction: { - result: {result: boolean}; + result: { result: boolean }; contract_address: string; visible: boolean; signature: string[]; @@ -758,8 +761,8 @@ declare module 'tronweb/interfaces' { export interface TronAccountResource { freeNetLimit: number; netLimit: number; - assetNetUsed: {key: string; value: number}[]; - assetNetLimit: {key: string; value: number}[]; + assetNetUsed: { key: string; value: number }[]; + assetNetLimit: { key: string; value: number }[]; totalNetLimit: number; totalNetWeight: number; energyLimit: number; @@ -768,7 +771,7 @@ declare module 'tronweb/interfaces' { } export interface BlockTransaction { - ret: Array<{contractRet: string}>; + ret: Array<{ contractRet: string }>; signature: string[]; txID: string; contract_address: string; @@ -974,7 +977,7 @@ declare module 'tronweb/interfaces' { export interface Proposal { proposal_id: number; proposer_address: string; - parameters: {[key: string]: any}[]; // Assuming the parameters can be of any type + parameters: { [key: string]: any }[]; // Assuming the parameters can be of any type expiration_time: number; create_time: number; approvals: string[]; diff --git a/src/tron/types.ts b/src/tron/types.ts index fc83acb..5376c78 100644 --- a/src/tron/types.ts +++ b/src/tron/types.ts @@ -1,4 +1,4 @@ -import {ContractExecutionParams} from 'tronweb/interfaces'; +import { ContractExecutionParams } from 'tronweb/interfaces'; export const MethodSymbol = Symbol('method'); diff --git a/src/tron/utils.ts b/src/tron/utils.ts index 5b569ab..9387936 100644 --- a/src/tron/utils.ts +++ b/src/tron/utils.ts @@ -1,4 +1,4 @@ -import {TronWebError1, TronWebError2} from 'tronweb/interfaces'; +import { TronWebError1, TronWebError2 } from 'tronweb/interfaces'; export const isArrayofBytes = (data: any): boolean => { return ( @@ -11,7 +11,7 @@ export const isArrayofBytes = (data: any): boolean => { export class TronWebError extends Error { readonly code: string; readonly hash: string; - constructor({code, message, txid}: TronWebError1) { + constructor({ code, message, txid }: TronWebError1) { // need to convert the message to utf-8 message = Buffer.from(message, 'hex').toString(); super(message); @@ -22,7 +22,7 @@ export class TronWebError extends Error { } export class TronWebGetTransactionError extends Error { - constructor({Error}: TronWebError2) { + constructor({ Error }: TronWebError2) { super(Error); this.name = this.constructor.name; } diff --git a/src/utils.ts b/src/utils.ts index 567fa2d..cbdc04c 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,18 +1,18 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import * as fs from 'fs-extra'; import * as path from 'path'; -import {Wallet} from '@ethersproject/wallet'; -import {getAddress, isAddress} from '@ethersproject/address'; -import {Interface, FunctionFragment, Fragment} from '@ethersproject/abi'; -import {Artifact, HardhatRuntimeEnvironment, Network} from 'hardhat/types'; -import {BigNumber} from '@ethersproject/bignumber'; -import {ABI, Export, ExtendedArtifact, MultiExport} from '../types'; -import {Artifacts} from 'hardhat/internal/artifacts'; +import { Wallet } from '@ethersproject/wallet'; +import { getAddress, isAddress } from '@ethersproject/address'; +import { Interface, FunctionFragment, Fragment } from '@ethersproject/abi'; +import { Artifact, HardhatRuntimeEnvironment, Network } from 'hardhat/types'; +import { BigNumber } from '@ethersproject/bignumber'; +import { ABI, Export, ExtendedArtifact, MultiExport } from '../types'; +import { Artifacts } from 'hardhat/internal/artifacts'; import murmur128 from 'murmur-128'; -import {Transaction} from '@ethersproject/transactions'; -import {store} from './globalStore'; -import {ERRORS} from 'hardhat/internal/core/errors-list'; -import {HardhatError} from 'hardhat/internal/core/errors'; +import { Transaction } from '@ethersproject/transactions'; +import { store } from './globalStore'; +import { ERRORS } from 'hardhat/internal/core/errors-list'; +import { HardhatError } from 'hardhat/internal/core/errors'; function getOldArtifactSync( name: string, @@ -110,9 +110,9 @@ export function loadAllDeployments( hre: HardhatRuntimeEnvironment, deploymentsPath: string, onlyABIAndAddress?: boolean, - externalDeployments?: {[networkName: string]: string[]} + externalDeployments?: { [networkName: string]: string[] } ): MultiExport { - const networksFound: {[networkName: string]: Export} = {}; + const networksFound: { [networkName: string]: Export } = {}; const all: MultiExport = {}; // TODO any is chainConfig fs.readdirSync(deploymentsPath).forEach((fileName) => { const fPath = path.resolve(deploymentsPath, fileName); @@ -167,7 +167,10 @@ export function loadAllDeployments( `mismatch between external deployment network ${networkName} chainId: ${networkChainId} vs existing chainId: ${networkExist.chainId}` ); } - networkExist.contracts = {...contracts, ...networkExist.contracts}; + networkExist.contracts = { + ...contracts, + ...networkExist.contracts, + }; } else { const network = { name: networkName, @@ -203,7 +206,7 @@ function loadDeployments( expectedChainId?: string, truffleChainId?: string ) { - const deploymentsFound: {[name: string]: any} = {}; + const deploymentsFound: { [name: string]: any } = {}; const deployPath = path.join(deploymentsPath, subPath); let filesStats; @@ -304,24 +307,24 @@ export function addDeployments( } function transformNamedAccounts( - configNamedAccounts: {[name: string]: any}, + configNamedAccounts: { [name: string]: any }, chainIdGiven: string | number, accounts: string[], networkConfigName: string ): { - namedAccounts: {[name: string]: string}; + namedAccounts: { [name: string]: string }; unnamedAccounts: string[]; unknownAccounts: string[]; - addressesToProtocol: {[address: string]: string}; + addressesToProtocol: { [address: string]: string }; } { - const addressesToProtocol: {[address: string]: string} = {}; - const unknownAccountsDict: {[address: string]: boolean} = {}; - const knownAccountsDict: {[address: string]: boolean} = {}; + const addressesToProtocol: { [address: string]: string } = {}; + const unknownAccountsDict: { [address: string]: boolean } = {}; + const knownAccountsDict: { [address: string]: boolean } = {}; for (const account of accounts) { knownAccountsDict[account.toLowerCase()] = true; } - const namedAccounts: {[name: string]: string} = {}; - const usedAccounts: {[address: string]: boolean} = {}; + const namedAccounts: { [name: string]: string } = {}; + const usedAccounts: { [address: string]: boolean } = {}; // TODO transform into checksum address if (configNamedAccounts) { const accountNames = Object.keys(configNamedAccounts); @@ -338,9 +341,7 @@ function transformNamedAccounts( addressesToProtocol[address.toLowerCase()] = protocolSplit[0].toLowerCase(); // knownAccountsDict[address.toLowerCase()] = true; // TODO ? this would prevent auto impersonation in fork/test - } else if ( - protocolSplit[0].toLowerCase() === 'trezor' - ) { + } else if (protocolSplit[0].toLowerCase() === 'trezor') { address = protocolSplit[1]; addressesToProtocol[address.toLowerCase()] = protocolSplit[0].toLowerCase(); @@ -470,15 +471,15 @@ export function processNamedAccounts( [name: string]: | string | number - | {[network: string]: null | number | string}; + | { [network: string]: null | number | string }; }, accounts: string[], chainIdGiven: string ): { - namedAccounts: {[name: string]: string}; + namedAccounts: { [name: string]: string }; unnamedAccounts: string[]; unknownAccounts: string[]; - addressesToProtocol: {[address: string]: string}; + addressesToProtocol: { [address: string]: string }; } { if (namedAccounts) { return transformNamedAccounts( @@ -561,16 +562,19 @@ export function getDeployPaths(network: Network): string[] { } } -export function filterABI( - abi: ABI, - excludeSighashes: Set, -): any[] { - return abi.filter(fragment => fragment.type !== 'function' || !excludeSighashes.has(Interface.getSighash(Fragment.from(fragment) as FunctionFragment))); +export function filterABI(abi: ABI, excludeSighashes: Set): any[] { + return abi.filter( + (fragment) => + fragment.type !== 'function' || + !excludeSighashes.has( + Interface.getSighash(Fragment.from(fragment) as FunctionFragment) + ) + ); } export function mergeABIs( abis: any[][], - options: {check: boolean; skipSupportsInterface: boolean} + options: { check: boolean; skipSupportsInterface: boolean } ): any[] { if (abis.length === 0) { return []; @@ -661,10 +665,10 @@ export function countElements(arr: any) { for (const item of arr) { if (Array.isArray(item)) { - count += countElements(item); + count += countElements(item); } else { count += 1; } } return count; -} \ No newline at end of file +} diff --git a/test/fixture-projects/hardhat-project/hardhat.config.ts b/test/fixture-projects/hardhat-project/hardhat.config.ts index f76fc04..d683934 100644 --- a/test/fixture-projects/hardhat-project/hardhat.config.ts +++ b/test/fixture-projects/hardhat-project/hardhat.config.ts @@ -1,7 +1,7 @@ // We load the plugin here. import '../../../src/index'; -import {HardhatUserConfig} from 'hardhat/types'; +import { HardhatUserConfig } from 'hardhat/types'; const config: HardhatUserConfig = { solidity: { diff --git a/test/helpers.ts b/test/helpers.ts index e34a279..5623d51 100644 --- a/test/helpers.ts +++ b/test/helpers.ts @@ -1,5 +1,5 @@ -import {resetHardhatContext} from 'hardhat/plugins-testing'; -import {HardhatRuntimeEnvironment} from 'hardhat/types'; +import { resetHardhatContext } from 'hardhat/plugins-testing'; +import { HardhatRuntimeEnvironment } from 'hardhat/types'; import path from 'path'; declare module 'mocha' { @@ -40,7 +40,6 @@ export function useTronEnvironment( try { this.env = require('hardhat'); // console.log(this.env); - } catch (error) { console.error('Failed to load Hardhat environment:', error); } @@ -49,4 +48,4 @@ export function useTronEnvironment( afterEach('Resetting hardhat', function () { resetHardhatContext(); }); -} \ No newline at end of file +} diff --git a/test/project.test.ts b/test/project.test.ts index 461d1b7..d87fbd1 100644 --- a/test/project.test.ts +++ b/test/project.test.ts @@ -1,11 +1,11 @@ -import {assert} from 'chai'; +import { assert } from 'chai'; -import {useEnvironment,useTronEnvironment} from './helpers'; +import { useEnvironment, useTronEnvironment } from './helpers'; describe('hardhat-deploy hre extension', function () { useEnvironment('hardhat-project', 'hardhat'); it('It should add the deployments field', function () { - assert.isNotNull(this.env.deployments); + assert.isNotNull(this.env.deployments); }); it('The getChainId should give the correct chainId', async function () { @@ -13,14 +13,13 @@ describe('hardhat-deploy hre extension', function () { }); }); - describe('hardhat-deploy hre-tron extension', function () { useTronEnvironment('hardhat-project', 'tron'); it('It should add the deployments field', function () { - assert.isNotNull(this.env.deployments); + assert.isNotNull(this.env.deployments); }); it('The Tron network flag should give the correct vaule', async function () { assert.equal(await this.env.network.config.tron, true); }); -}); \ No newline at end of file +}); diff --git a/types.ts b/types.ts index 07fa617..8d24706 100644 --- a/types.ts +++ b/types.ts @@ -6,8 +6,8 @@ import { Artifact, HardhatRuntimeEnvironment, } from 'hardhat/types'; -import type {BigNumber} from '@ethersproject/bignumber'; -import {Signer} from '@ethersproject/abstract-signer'; +import type { BigNumber } from '@ethersproject/bignumber'; +import { Signer } from '@ethersproject/abstract-signer'; export type ExtendedArtifact = { abi: any[]; @@ -93,7 +93,7 @@ export interface DiamondOptions extends TxOptions { execute?: { contract?: | string - | {name: string; artifact: string | ArtifactData; args?: any[]}; + | { name: string; artifact: string | ArtifactData; args?: any[] }; methodName: string; args: any[]; }; @@ -238,7 +238,7 @@ export interface DeploymentsExtension { fetchIfDifferent( // return true if new compiled code is different than deployed contract name: string, options: DeployOptions - ): Promise<{differences: boolean; address?: string}>; + ): Promise<{ differences: boolean; address?: string }>; readDotFile(name: string): Promise; saveDotFile(name: string, content: string): Promise; @@ -249,7 +249,7 @@ export interface DeploymentsExtension { get(name: string): Promise; // fetch a deployment by name, throw if not existing getOrNull(name: string): Promise; // fetch deployment by name, return null if not existing getDeploymentsFromAddress(address: string): Promise; - all(): Promise<{[name: string]: Deployment}>; // return all deployments + all(): Promise<{ [name: string]: Deployment }>; // return all deployments getArtifact(name: string): Promise; // return a hardhat artifact (compiled contract without deployment) getExtendedArtifact(name: string): Promise; // return a extended artifact (with more info) (compiled contract without deployment) run( // execute deployment scripts @@ -261,11 +261,11 @@ export interface DeploymentsExtension { export?: string; exportAll?: string; } - ): Promise<{[name: string]: Deployment}>; + ): Promise<{ [name: string]: Deployment }>; fixture( // execute deployment as fixture for test // use evm_snapshot to revert back tags?: string | string[], - options?: {fallbackToGlobal?: boolean; keepExistingDeployments?: boolean} - ): Promise<{[name: string]: Deployment}>; + options?: { fallbackToGlobal?: boolean; keepExistingDeployments?: boolean } + ): Promise<{ [name: string]: Deployment }>; createFixture( // execute a function as fixture using evm_snaphost to revert back each time func: FixtureFunc, id?: string @@ -284,7 +284,7 @@ export interface DeploymentsExtension { rawTx(tx: SimpleTx): Promise; // execute a simple transaction catchUnknownSigner( // you can wrap other function with this function and it will catch failure due to missing signer with the details of the tx to be executed action: Promise | (() => Promise), - options?: {log?: boolean} + options?: { log?: boolean } ): Promise(); + + // Group issues by file + for (const issue of issues) { + // Normalize path + let absolutePath = issue.filePath; + if (!path.isAbsolute(absolutePath)) { + absolutePath = path.join( + workspaceFolders[0].uri.fsPath, + issue.filePath + ); + } + const uri = vscode.Uri.file(absolutePath); + const uriStr = uri.toString(); + + if (!diagnosticsMap.has(uriStr)) { + diagnosticsMap.set(uriStr, []); + } + + const line = issue.line > 0 ? issue.line - 1 : 0; + const column = issue.column > 0 ? issue.column - 1 : 0; + + const range = new vscode.Range(line, column, line, column + 100); // Approximate end + + let severity = vscode.DiagnosticSeverity.Error; + if (issue.severity === 'warning') + severity = vscode.DiagnosticSeverity.Warning; + else if (issue.severity === 'info') + severity = vscode.DiagnosticSeverity.Information; + + const diagnostic = new vscode.Diagnostic(range, issue.message, severity); + diagnostic.source = 'Sunhat'; + diagnosticsMap.get(uriStr)!.push(diagnostic); + } + + compilationDiagnosticCollection.clear(); + diagnosticsMap.forEach((diags, uriStr) => { + compilationDiagnosticCollection.set(vscode.Uri.parse(uriStr), diags); + }); + } catch (e) { + console.error(`[Sunhat] Error reading compilation errors: ${e}`); + } +} function registerHoverProvider(): vscode.Disposable { return vscode.languages.registerHoverProvider(SUPPORTED_LANGUAGE_IDS, { @@ -93,7 +164,7 @@ function registerHoverProvider(): vscode.Disposable { } const llmDiagnostic = diagnostics.find( - (d) => d.source === 'Sunhat Auditor' && d.range.contains(position), + (d) => d.source === 'Sunhat Auditor' && d.range.contains(position) ); if (!llmDiagnostic || !(llmDiagnostic as any).auditInfo) { @@ -107,10 +178,10 @@ function registerHoverProvider(): vscode.Disposable { contents.supportThemeIcons = true; contents.appendMarkdown( - `### $(zap) Sunhat Auditor: ${llmDiagnostic.message}\n\n`, + `### $(zap) Sunhat Auditor: ${llmDiagnostic.message}\n\n` ); contents.appendMarkdown( - `**Details:** ${auditInfo.detailedDescription}\n\n---\n\n`, + `**Details:** ${auditInfo.detailedDescription}\n\n---\n\n` ); contents.appendMarkdown(`**Suggestion:**\n`); @@ -130,20 +201,21 @@ export function activate(context: vscode.ExtensionContext): void { context.subscriptions.push( vscode.workspace.onDidOpenTextDocument((doc) => { if (doc) updateDiagnostics(doc); - }), + }) ); context.subscriptions.push( vscode.workspace.onDidSaveTextDocument((doc) => { if (doc) updateDiagnostics(doc); - }), + }) ); const workspaceFolders = vscode.workspace.workspaceFolders; if (workspaceFolders) { + // Watch audit-report.json const reportPath = path.join( workspaceFolders[0].uri.fsPath, - 'audit-report.json', + 'audit-report.json' ); const fileWatcher = vscode.workspace.createFileSystemWatcher(reportPath); @@ -160,6 +232,20 @@ export function activate(context: vscode.ExtensionContext): void { fileWatcher.onDidDelete(updateDiagnosticsForAllOpenFiles); context.subscriptions.push(fileWatcher); + + // Watch compilation-errors.json + const compilationWatcher = vscode.workspace.createFileSystemWatcher( + path.join(workspaceFolders[0].uri.fsPath, 'compilation-errors.json') + ); + compilationWatcher.onDidCreate(() => updateCompilationDiagnostics()); + compilationWatcher.onDidChange(() => updateCompilationDiagnostics()); + compilationWatcher.onDidDelete(() => + compilationDiagnosticCollection.clear() + ); + context.subscriptions.push(compilationWatcher); + + // Initial check + updateCompilationDiagnostics(); } if (vscode.window.activeTextEditor) { @@ -169,4 +255,5 @@ export function activate(context: vscode.ExtensionContext): void { export function deactivate(): void { diagnosticCollection.clear(); + compilationDiagnosticCollection.clear(); }