diff --git a/.github/workflows/_publish-code.yml b/.github/workflows/_publish-code.yml index dfa8d8ee1..4abc5d17f 100644 --- a/.github/workflows/_publish-code.yml +++ b/.github/workflows/_publish-code.yml @@ -1,6 +1,3 @@ -# -# Publish releases to NPM -# name: Publish Code on: diff --git a/.github/workflows/_publish-docs.yml b/.github/workflows/_publish-docs.yml index 3fdb325e5..7ff3aeebc 100644 --- a/.github/workflows/_publish-docs.yml +++ b/.github/workflows/_publish-docs.yml @@ -1,6 +1,3 @@ -# -# Publish Documentation to GitHub pages. -# name: Publish Documentation on: diff --git a/.github/workflows/_static-analysis.yml b/.github/workflows/_static-analysis.yml index 17db8e65e..88f572eff 100644 --- a/.github/workflows/_static-analysis.yml +++ b/.github/workflows/_static-analysis.yml @@ -1,6 +1,3 @@ -# -# Run static code analysis. -# name: Static Analysis on: @@ -18,7 +15,7 @@ jobs: - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version: 22 cache: "npm" - name: Install Node.js dependencies diff --git a/.github/workflows/_test-code-samples.yml b/.github/workflows/_test-code-samples.yml deleted file mode 100644 index 371787e95..000000000 --- a/.github/workflows/_test-code-samples.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Test Code Samples - -on: - workflow_call: - workflow_dispatch: - -jobs: - build: - name: Run Tests - runs-on: ubuntu-latest - strategy: - matrix: - node-version: - - "20" - - "22" - - "24" - - steps: - - name: Check out Git repository - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Set up Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - cache: "npm" - - - name: Install Node.js dependencies - run: npm ci - - - name: Build - run: npm run build-for-dist - - - name: Tests sample code - run: | - ./tests/test_code_samples.sh ${{ secrets.MINDEE_ACCOUNT_SE_TESTS }} ${{ secrets.MINDEE_ENDPOINT_SE_TESTS }} ${{ secrets.MINDEE_API_KEY_SE_TESTS }} ${{ secrets.MINDEE_V2_SE_TESTS_API_KEY }} ${{ secrets.MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID }} - diff --git a/.github/workflows/_test-integrations.yml b/.github/workflows/_test-integrations.yml index 3519c18ef..905299d7e 100644 --- a/.github/workflows/_test-integrations.yml +++ b/.github/workflows/_test-integrations.yml @@ -1,15 +1,23 @@ -# -# Run integration tests. -# name: Integration Test on: workflow_call: workflow_dispatch: +env: + MINDEE_API_KEY: ${{ secrets.MINDEE_API_KEY_SE_TESTS }} + WORKFLOW_ID: ${{ secrets.WORKFLOW_ID_SE_TESTS }} + MINDEE_V2_API_KEY: ${{ secrets.MINDEE_V2_SE_TESTS_API_KEY }} + MINDEE_V2_SE_TESTS_BLANK_PDF_URL: ${{ secrets.MINDEE_V2_SE_TESTS_BLANK_PDF_URL }} + MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID }} + MINDEE_V2_SE_TESTS_CLASSIFICATION_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_CLASSIFICATION_MODEL_ID }} + MINDEE_V2_SE_TESTS_CROP_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_CROP_MODEL_ID }} + MINDEE_V2_SE_TESTS_SPLIT_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_SPLIT_MODEL_ID }} + MINDEE_V2_SE_TESTS_OCR_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_OCR_MODEL_ID }} + jobs: - run-tests: - name: Run Integration Tests + run-tests-with-optional-dependencies: + name: Run Integration Tests With Optional Dependencies timeout-minutes: 30 strategy: matrix: @@ -56,10 +64,40 @@ jobs: run: npm run build - name: Test code - env: - MINDEE_API_KEY: ${{ secrets.MINDEE_API_KEY_SE_TESTS }} - WORKFLOW_ID: ${{ secrets.WORKFLOW_ID_SE_TESTS }} - MINDEE_V2_API_KEY: ${{ secrets.MINDEE_V2_SE_TESTS_API_KEY }} - MINDEE_V2_FINDOC_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID }} - MINDEE_V2_SE_TESTS_BLANK_PDF_URL: ${{ secrets.MINDEE_V2_SE_TESTS_BLANK_PDF_URL }} run: npm run test-integration + + + run-tests-without-optional-dependencies: + name: Run Integration Tests Without Optional Dependencies + timeout-minutes: 30 + strategy: + matrix: + os: + - "ubuntu-latest" + - "windows-latest" + - "macos-latest" + node-version: + - "18" + - "24" + runs-on: ${{ matrix.os }} + + steps: + - name: Check out Git repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Set up Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: "npm" + + - name: Install Node.js dependencies + run: npm ci --omit=optional + + - name: Compilation + run: npm run build + + - name: Test code + run: npm run test-integration-light diff --git a/.github/workflows/_test-smoke.yml b/.github/workflows/_test-smoke.yml new file mode 100644 index 000000000..6f44cd7b5 --- /dev/null +++ b/.github/workflows/_test-smoke.yml @@ -0,0 +1,55 @@ +name: Smoke Test + +on: + workflow_call: + workflow_dispatch: + +env: + MINDEE_API_KEY: ${{ secrets.MINDEE_API_KEY_SE_TESTS }} + MINDEE_V2_API_KEY: ${{ secrets.MINDEE_V2_SE_TESTS_API_KEY }} + MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID }} + MINDEE_V2_SE_TESTS_CLASSIFICATION_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_CLASSIFICATION_MODEL_ID }} + MINDEE_V2_SE_TESTS_CROP_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_CROP_MODEL_ID }} + MINDEE_V2_SE_TESTS_SPLIT_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_SPLIT_MODEL_ID }} + MINDEE_V2_SE_TESTS_OCR_MODEL_ID: ${{ secrets.MINDEE_V2_SE_TESTS_OCR_MODEL_ID }} + +jobs: + build: + name: Run Tests + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - "20" + - "22" + - "24" + + steps: + - name: Check out Git repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Set up Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: "npm" + + - name: Install Node.js dependencies + run: npm ci + + - name: Build + run: npm run build-for-dist + + - name: Tests v2 sample code + run: | + ./tests/test_v2_code_samples.sh + + - name: Tests v2 CLI commands + run: | + ./tests/test_v2_cli.sh + + - name: Tests v1 sample code + run: | + ./tests/test_v1_code_samples.sh ${{ secrets.MINDEE_ACCOUNT_SE_TESTS }} ${{ secrets.MINDEE_ENDPOINT_SE_TESTS }} diff --git a/.github/workflows/_test-units.yml b/.github/workflows/_test-units.yml index 3639ffee3..99e25d53e 100644 --- a/.github/workflows/_test-units.yml +++ b/.github/workflows/_test-units.yml @@ -1,14 +1,11 @@ -# -# Run unit tests. -# name: Test on: workflow_call: jobs: - run-tests: - name: Run Tests + run-tests-with-optional-dependencies: + name: Run Tests With Optional Dependencies strategy: matrix: os: @@ -45,3 +42,36 @@ jobs: - name: Test code run: npm run test + run-tests-without-optional-dependencies: + name: Run Tests Without Optional Dependencies + strategy: + matrix: + os: + - "ubuntu-latest" + node-version: + - "18" + - "20" + - "22" + - "24" + runs-on: ${{ matrix.os }} + + steps: + - name: Check out Git repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Set up Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: "npm" + + - name: Install Node.js dependencies + run: npm ci --omit=optional + + - name: Compilation + run: npm run build + + - name: Test code + run: npm run test-light diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 6527968fb..72fcbf187 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -14,11 +14,11 @@ jobs: uses: ./.github/workflows/_test-units.yml needs: static_analysis secrets: inherit + test_smoke: + uses: ./.github/workflows/_test-smoke.yml + needs: static_analysis + secrets: inherit test_integrations: uses: ./.github/workflows/_test-integrations.yml needs: test_units secrets: inherit - test_code_samples: - uses: ./.github/workflows/_test-code-samples.yml - needs: test_units - secrets: inherit diff --git a/.mocharc.json b/.mocharc.json index 555e7056c..c82ad1d55 100644 --- a/.mocharc.json +++ b/.mocharc.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/mocharc", "extension": ["ts"], - "require": "ts-node/register", + "node-option": ["import=tsx"], "spec": ["tests"] } diff --git a/bin/mindee.ts b/bin/mindee.ts deleted file mode 100755 index b1d16ae69..000000000 --- a/bin/mindee.ts +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env node - -import { cli } from "../src/cli"; - -cli(); diff --git a/bin/mindeeV1.ts b/bin/mindeeV1.ts new file mode 100755 index 000000000..12fa8168a --- /dev/null +++ b/bin/mindeeV1.ts @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +import { cli } from "@/v1/cli.js"; + +cli(); diff --git a/bin/mindeeV2.ts b/bin/mindeeV2.ts new file mode 100755 index 000000000..ad38cd00d --- /dev/null +++ b/bin/mindeeV2.ts @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +import { cli } from "@/v2/cli.js"; + +cli(); diff --git a/docs/code_samples/bank_account_details_v1.txt b/docs/code_samples/bank_account_details_v1.txt index 2074b2d29..a5edf36a2 100644 --- a/docs/code_samples/bank_account_details_v1.txt +++ b/docs/code_samples/bank_account_details_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.fr.BankAccountDetailsV1, + mindee.v1.product.fr.BankAccountDetailsV1, inputSource ); diff --git a/docs/code_samples/bank_account_details_v2.txt b/docs/code_samples/bank_account_details_v2.txt index e1226a7fc..43f3f2321 100644 --- a/docs/code_samples/bank_account_details_v2.txt +++ b/docs/code_samples/bank_account_details_v2.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.fr.BankAccountDetailsV2, + mindee.v1.product.fr.BankAccountDetailsV2, inputSource ); diff --git a/docs/code_samples/bank_check_v1.txt b/docs/code_samples/bank_check_v1.txt index c10957cb6..76c8d74fd 100644 --- a/docs/code_samples/bank_check_v1.txt +++ b/docs/code_samples/bank_check_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.us.BankCheckV1, + mindee.v1.product.us.BankCheckV1, inputSource ); diff --git a/docs/code_samples/barcode_reader_v1.txt b/docs/code_samples/barcode_reader_v1.txt index 7368e1c45..afea500f0 100644 --- a/docs/code_samples/barcode_reader_v1.txt +++ b/docs/code_samples/barcode_reader_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.BarcodeReaderV1, + mindee.v1.product.BarcodeReaderV1, inputSource ); diff --git a/docs/code_samples/bill_of_lading_v1_async.txt b/docs/code_samples/bill_of_lading_v1_async.txt deleted file mode 100644 index dd780dbc0..000000000 --- a/docs/code_samples/bill_of_lading_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.BillOfLadingV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/business_card_v1_async.txt b/docs/code_samples/business_card_v1_async.txt deleted file mode 100644 index 9d043e173..000000000 --- a/docs/code_samples/business_card_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.BusinessCardV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/carte_grise_v1.txt b/docs/code_samples/carte_grise_v1.txt index f597c7274..38b8d04df 100644 --- a/docs/code_samples/carte_grise_v1.txt +++ b/docs/code_samples/carte_grise_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.fr.CarteGriseV1, + mindee.v1.product.fr.CarteGriseV1, inputSource ); diff --git a/docs/code_samples/cropper_v1.txt b/docs/code_samples/cropper_v1.txt index 12afcf3e8..e7513d2b1 100644 --- a/docs/code_samples/cropper_v1.txt +++ b/docs/code_samples/cropper_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.CropperV1, + mindee.v1.product.CropperV1, inputSource ); diff --git a/docs/code_samples/custom_v1.txt b/docs/code_samples/custom_v1.txt deleted file mode 100644 index 13cdbb2b4..000000000 --- a/docs/code_samples/custom_v1.txt +++ /dev/null @@ -1,32 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Create a custom endpoint for your product -const customEndpoint = mindeeClient.createEndpoint( - "my-endpoint", - "my-account", - // "my-version" // Optional: set the version, defaults to "1" -); - -// Parse it -const apiResponse = mindeeClient - .parse( - mindee.product.CustomV1, - inputSource, - { endpoint: customEndpoint } - ); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/default.txt b/docs/code_samples/default.txt index 96b938429..875363d69 100644 --- a/docs/code_samples/default.txt +++ b/docs/code_samples/default.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -20,7 +22,7 @@ const customEndpoint = mindeeClient.createEndpoint( // Parse the file. const apiResponse = mindeeClient .parse( - mindee.product.GeneratedV1, + mindee.v1.product.GeneratedV1, inputSource, { endpoint: customEndpoint } ); diff --git a/docs/code_samples/default_async.txt b/docs/code_samples/default_async.txt index 232c51a35..f4260c2f1 100644 --- a/docs/code_samples/default_async.txt +++ b/docs/code_samples/default_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -19,7 +21,7 @@ const customEndpoint = mindeeClient.createEndpoint( // Parse the file asynchronously. const asyncApiResponse = mindeeClient.enqueueAndParse( - mindee.product.GeneratedV1, + mindee.v1.product.GeneratedV1, inputSource, { endpoint: customEndpoint } ); diff --git a/docs/code_samples/delivery_notes_v1_async.txt b/docs/code_samples/delivery_notes_v1_async.txt deleted file mode 100644 index 7ee02c9ae..000000000 --- a/docs/code_samples/delivery_notes_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.DeliveryNoteV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/driver_license_v1_async.txt b/docs/code_samples/driver_license_v1_async.txt index 2c081f3c0..c2e4c1b2c 100644 --- a/docs/code_samples/driver_license_v1_async.txt +++ b/docs/code_samples/driver_license_v1_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.DriverLicenseV1, + mindee.v1.product.DriverLicenseV1, inputSource ); diff --git a/docs/code_samples/energy_bill_fra_v1_async.txt b/docs/code_samples/energy_bill_fra_v1_async.txt deleted file mode 100644 index 8d5ba1f5a..000000000 --- a/docs/code_samples/energy_bill_fra_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.fr.EnergyBillV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/expense_receipts_v5.txt b/docs/code_samples/expense_receipts_v5.txt index bacb0b850..502cad692 100644 --- a/docs/code_samples/expense_receipts_v5.txt +++ b/docs/code_samples/expense_receipts_v5.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.ReceiptV5, + mindee.v1.product.ReceiptV5, inputSource ); diff --git a/docs/code_samples/expense_receipts_v5_async.txt b/docs/code_samples/expense_receipts_v5_async.txt index ec4162d8f..6f8f0d7dc 100644 --- a/docs/code_samples/expense_receipts_v5_async.txt +++ b/docs/code_samples/expense_receipts_v5_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.ReceiptV5, + mindee.v1.product.ReceiptV5, inputSource ); diff --git a/docs/code_samples/financial_document_v1.txt b/docs/code_samples/financial_document_v1.txt index 065e0bca0..9e2a1ce3f 100644 --- a/docs/code_samples/financial_document_v1.txt +++ b/docs/code_samples/financial_document_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.FinancialDocumentV1, + mindee.v1.product.FinancialDocumentV1, inputSource ); diff --git a/docs/code_samples/financial_document_v1_async.txt b/docs/code_samples/financial_document_v1_async.txt index e856278e5..e5063cf8d 100644 --- a/docs/code_samples/financial_document_v1_async.txt +++ b/docs/code_samples/financial_document_v1_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.FinancialDocumentV1, + mindee.v1.product.FinancialDocumentV1, inputSource ); diff --git a/docs/code_samples/french_healthcard_v1_async.txt b/docs/code_samples/french_healthcard_v1_async.txt deleted file mode 100644 index c8d16616c..000000000 --- a/docs/code_samples/french_healthcard_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.fr.HealthCardV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/idcard_fr_v1.txt b/docs/code_samples/idcard_fr_v1.txt deleted file mode 100644 index 07a0973f1..000000000 --- a/docs/code_samples/idcard_fr_v1.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.parse( - mindee.product.fr.IdCardV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/idcard_fr_v2.txt b/docs/code_samples/idcard_fr_v2.txt index 07db4803b..d4e4d0016 100644 --- a/docs/code_samples/idcard_fr_v2.txt +++ b/docs/code_samples/idcard_fr_v2.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.fr.IdCardV2, + mindee.v1.product.fr.IdCardV2, inputSource ); diff --git a/docs/code_samples/ind_passport_v1_async.txt b/docs/code_samples/ind_passport_v1_async.txt deleted file mode 100644 index 5daa59138..000000000 --- a/docs/code_samples/ind_passport_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.ind.IndianPassportV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/international_id_v2_async.txt b/docs/code_samples/international_id_v2_async.txt index 107261dc5..3765f8eb8 100644 --- a/docs/code_samples/international_id_v2_async.txt +++ b/docs/code_samples/international_id_v2_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.InternationalIdV2, + mindee.v1.product.InternationalIdV2, inputSource ); diff --git a/docs/code_samples/invoice_splitter_v1_async.txt b/docs/code_samples/invoice_splitter_v1_async.txt index 81c9a5af6..451b6c1f8 100644 --- a/docs/code_samples/invoice_splitter_v1_async.txt +++ b/docs/code_samples/invoice_splitter_v1_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.InvoiceSplitterV1, + mindee.v1.product.InvoiceSplitterV1, inputSource ); diff --git a/docs/code_samples/invoices_v4.txt b/docs/code_samples/invoices_v4.txt index cc80b368b..c48565b60 100644 --- a/docs/code_samples/invoices_v4.txt +++ b/docs/code_samples/invoices_v4.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.InvoiceV4, + mindee.v1.product.InvoiceV4, inputSource ); diff --git a/docs/code_samples/invoices_v4_async.txt b/docs/code_samples/invoices_v4_async.txt index 85e4cc938..f11ed9619 100644 --- a/docs/code_samples/invoices_v4_async.txt +++ b/docs/code_samples/invoices_v4_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.InvoiceV4, + mindee.v1.product.InvoiceV4, inputSource ); diff --git a/docs/code_samples/multi_receipts_detector_v1.txt b/docs/code_samples/multi_receipts_detector_v1.txt index 0c89b623f..f17555ca3 100644 --- a/docs/code_samples/multi_receipts_detector_v1.txt +++ b/docs/code_samples/multi_receipts_detector_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.MultiReceiptsDetectorV1, + mindee.v1.product.MultiReceiptsDetectorV1, inputSource ); diff --git a/docs/code_samples/nutrition_facts_v1_async.txt b/docs/code_samples/nutrition_facts_v1_async.txt deleted file mode 100644 index bbdc347e4..000000000 --- a/docs/code_samples/nutrition_facts_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.NutritionFactsLabelV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/passport_v1.txt b/docs/code_samples/passport_v1.txt index 5fd1b9d2a..c60f4ff90 100644 --- a/docs/code_samples/passport_v1.txt +++ b/docs/code_samples/passport_v1.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.parse( - mindee.product.PassportV1, + mindee.v1.product.PassportV1, inputSource ); diff --git a/docs/code_samples/payslip_fra_v2_async.txt b/docs/code_samples/payslip_fra_v2_async.txt deleted file mode 100644 index 8a053cd53..000000000 --- a/docs/code_samples/payslip_fra_v2_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.fr.PayslipV2, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/payslip_fra_v3_async.txt b/docs/code_samples/payslip_fra_v3_async.txt deleted file mode 100644 index 8ac5f2530..000000000 --- a/docs/code_samples/payslip_fra_v3_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.fr.PayslipV3, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/resume_v1_async.txt b/docs/code_samples/resume_v1_async.txt index 20f699b0d..f760cc576 100644 --- a/docs/code_samples/resume_v1_async.txt +++ b/docs/code_samples/resume_v1_async.txt @@ -3,7 +3,9 @@ const mindee = require("mindee"); // import * as mindee from "mindee"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -12,7 +14,7 @@ const inputSource = new mindee.PathInput( // Parse the file const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.ResumeV1, + mindee.v1.product.ResumeV1, inputSource ); diff --git a/docs/code_samples/us_healthcare_cards_v1_async.txt b/docs/code_samples/us_healthcare_cards_v1_async.txt deleted file mode 100644 index 87e6cac08..000000000 --- a/docs/code_samples/us_healthcare_cards_v1_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.us.HealthcareCardV1, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/us_mail_v3_async.txt b/docs/code_samples/us_mail_v3_async.txt deleted file mode 100644 index 5938069a3..000000000 --- a/docs/code_samples/us_mail_v3_async.txt +++ /dev/null @@ -1,23 +0,0 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; - -// Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); - -// Load a file from disk -const inputSource = new mindee.PathInput( - { inputPath: "/path/to/the/file.ext" } -); - -// Parse the file -const apiResponse = mindeeClient.enqueueAndParse( - mindee.product.us.UsMailV3, - inputSource -); - -// Handle the response Promise -apiResponse.then((resp) => { - // print a string summary - console.log(resp.document.toString()); -}); diff --git a/docs/code_samples/v2_classification.txt b/docs/code_samples/v2_classification.txt new file mode 100644 index 000000000..4e16a17ba --- /dev/null +++ b/docs/code_samples/v2_classification.txt @@ -0,0 +1,33 @@ +import * as mindee from "mindee"; +// If you're on CommonJS: +// const mindee = require("mindee"); + +const apiKey = "MY_API_KEY"; +const filePath = "/path/to/the/file.ext"; +const modelId = "MY_MODEL_ID"; + +// Init a new client +const mindeeClient = new mindee.Client( + { apiKey: apiKey } +); + +// Set product parameters +const params = { + modelId: modelId, +}; + +// Load a file from disk +const inputSource = new mindee.PathInput({ inputPath: filePath }); + +// Send for processing +const response = await mindeeClient.enqueueAndGetResult( + mindee.v2.product.Classification, + inputSource, + params, +); + +// print a string summary +console.log(response.inference.toString()); + +// Access the classification result +const classification = response.inference.result.classification; diff --git a/docs/code_samples/v2_crop.txt b/docs/code_samples/v2_crop.txt new file mode 100644 index 000000000..1f4b2c4b7 --- /dev/null +++ b/docs/code_samples/v2_crop.txt @@ -0,0 +1,33 @@ +import * as mindee from "mindee"; +// If you're on CommonJS: +// const mindee = require("mindee"); + +const apiKey = "MY_API_KEY"; +const filePath = "/path/to/the/file.ext"; +const modelId = "MY_MODEL_ID"; + +// Init a new client +const mindeeClient = new mindee.Client( + { apiKey: apiKey } +); + +// Set product parameters +const params = { + modelId: modelId, +}; + +// Load a file from disk +const inputSource = new mindee.PathInput({ inputPath: filePath }); + +// Send for processing +const response = await mindeeClient.enqueueAndGetResult( + mindee.v2.product.Crop, + inputSource, + params, +); + +// print a string summary +console.log(response.inference.toString()); + +// Access the result crops +const crops = response.inference.result.crops; diff --git a/docs/code_samples/default_v2.txt b/docs/code_samples/v2_extraction.txt similarity index 61% rename from docs/code_samples/default_v2.txt rename to docs/code_samples/v2_extraction.txt index 4f43d600c..caba4b9b7 100644 --- a/docs/code_samples/default_v2.txt +++ b/docs/code_samples/v2_extraction.txt @@ -1,16 +1,18 @@ -const mindee = require("mindee"); -// for TS or modules: -// import * as mindee from "mindee"; +import * as mindee from "mindee"; +// If you're on CommonJS: +// const mindee = require("mindee"); const apiKey = "MY_API_KEY"; const filePath = "/path/to/the/file.ext"; const modelId = "MY_MODEL_ID"; // Init a new client -const mindeeClient = new mindee.ClientV2({ apiKey: apiKey }); +const mindeeClient = new mindee.Client( + { apiKey: apiKey } +); -// Set inference parameters -const inferenceParams = { +// Set product parameters +const params = { modelId: modelId, // Options: set to `true` or `false` to override defaults @@ -30,13 +32,14 @@ const inferenceParams = { const inputSource = new mindee.PathInput({ inputPath: filePath }); // Send for processing -const response = mindeeClient.enqueueAndGetInference( +const response = await mindeeClient.enqueueAndGetResult( + mindee.v2.product.Extraction, inputSource, - inferenceParams + params, ); -// Handle the response Promise -response.then((resp) => { - // print a string summary - console.log(resp.inference.toString()); -}); +// print a string summary +console.log(response.inference.toString()); + +// Access the result fields +const fields = response.inference.result.fields; diff --git a/docs/code_samples/v2_ocr.txt b/docs/code_samples/v2_ocr.txt new file mode 100644 index 000000000..5399665eb --- /dev/null +++ b/docs/code_samples/v2_ocr.txt @@ -0,0 +1,33 @@ +import * as mindee from "mindee"; +// If you're on CommonJS: +// const mindee = require("mindee"); + +const apiKey = "MY_API_KEY"; +const filePath = "/path/to/the/file.ext"; +const modelId = "MY_MODEL_ID"; + +// Init a new client +const mindeeClient = new mindee.Client( + { apiKey: apiKey } +); + +// Set product parameters +const params = { + modelId: modelId, +}; + +// Load a file from disk +const inputSource = new mindee.PathInput({ inputPath: filePath }); + +// Send for processing +const response = await mindeeClient.enqueueAndGetResult( + mindee.v2.product.Ocr, + inputSource, + params, +); + +// print a string summary +console.log(response.inference.toString()); + +// Access the result OCR pages +const crops = response.inference.result.pages; diff --git a/docs/code_samples/v2_split.txt b/docs/code_samples/v2_split.txt new file mode 100644 index 000000000..1cc964bd1 --- /dev/null +++ b/docs/code_samples/v2_split.txt @@ -0,0 +1,33 @@ +import * as mindee from "mindee"; +// If you're on CommonJS: +// const mindee = require("mindee"); + +const apiKey = "MY_API_KEY"; +const filePath = "/path/to/the/file.ext"; +const modelId = "MY_MODEL_ID"; + +// Init a new client +const mindeeClient = new mindee.Client( + { apiKey: apiKey } +); + +// Set product parameters +const params = { + modelId: modelId, +}; + +// Load a file from disk +const inputSource = new mindee.PathInput({ inputPath: filePath }); + +// Send for processing +const response = await mindeeClient.enqueueAndGetResult( + mindee.v2.product.Split, + inputSource, + params, +); + +// print a string summary +console.log(response.inference.toString()); + +// Access the result splits +const crops = response.inference.result.splits; diff --git a/docs/code_samples/workflow_execution.txt b/docs/code_samples/workflow_execution.txt index 8ee38248d..7ab2f67df 100644 --- a/docs/code_samples/workflow_execution.txt +++ b/docs/code_samples/workflow_execution.txt @@ -5,7 +5,9 @@ const mindee = require("mindee"); const workflowId: string = "workflow-id"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( diff --git a/docs/code_samples/workflow_ots_rag.txt b/docs/code_samples/workflow_ots_rag.txt index ad50a07a5..0cc72d3e4 100644 --- a/docs/code_samples/workflow_ots_rag.txt +++ b/docs/code_samples/workflow_ots_rag.txt @@ -5,7 +5,9 @@ const mindee = require("mindee"); const workflowId: string = "workflow-id"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -20,7 +22,7 @@ const workflowParams = { // Parse the file asynchronously on a workflow queue const asyncApiResponse = mindeeClient.enqueueAndParse( - mindee.product.FinancialDocumentV1, + mindee.v1.product.FinancialDocumentV1, inputSource, workflowParams ); diff --git a/docs/code_samples/workflow_polling.txt b/docs/code_samples/workflow_polling.txt index eba9a306e..9d6cf2c0b 100644 --- a/docs/code_samples/workflow_polling.txt +++ b/docs/code_samples/workflow_polling.txt @@ -5,7 +5,9 @@ const mindee = require("mindee"); const workflowId: string = "workflow-id"; // Init a new client -const mindeeClient = new mindee.Client({ apiKey: "my-api-key" }); +const mindeeClient = new mindee.v1.Client( + { apiKey: "my-api-key" } +); // Load a file from disk const inputSource = new mindee.PathInput( @@ -27,7 +29,7 @@ const workflowParams = { // Parse the file asynchronously on a workflow queue const asyncApiResponse = mindeeClient.enqueueAndParse( - mindee.product.GeneratedV1, + mindee.v1.product.GeneratedV1, inputSource, workflowParams ); diff --git a/example/invoiceSplitterCustomSplitsTutorial.js b/example/invoiceSplitterCustomSplitsTutorial.js index 0028a6eee..95c2df575 100644 --- a/example/invoiceSplitterCustomSplitsTutorial.js +++ b/example/invoiceSplitterCustomSplitsTutorial.js @@ -1,26 +1,31 @@ -const { Client, product, imageOperations, PathInput } = require("mindee"); const { setTimeout } = require("node:timers/promises"); +const mindee = require("mindee"); -async function parseInvoicesWithCustomSplitsThreshold(customSplits) { +async function parseInvoicesWithCustomSplitsThreshold(inputPath, customSplits) { // fill in your API key or add it as an environment variable - const mindeeClient = new Client(); + const mindeeClient = new mindee.v1.Client(); // Load a file from disk - const inputSource = new PathInput( - { inputPath: "/path/to/the/file.ext" } + const inputSource = new mindee.PathInput( + { inputPath: inputPath } ); - let invoices = await imageOperations.extractInvoices(inputSource, customSplits); + let invoices = await mindee.v1.extraction.extractInvoices(inputSource, customSplits); for (const invoice of invoices) { // optional: save the documents locally invoice.saveToFile(`/tmp/invoice_p_${invoice.pageIdMin}-${invoice.pageIdMax}.pdf`); - const respInvoice = await mindeeClient.parse(product.InvoiceV4, invoice.asSource()); + const respInvoice = await mindeeClient.parse( + mindee.v1.product.InvoiceV4, invoice.asSource() + ); console.log(respInvoice.document.toString()); - await setTimeout(1000); // wait some time between requests as to not overload the server + // wait some time between requests as to not overload the server + await setTimeout(1000); } - } const customSplits = [[0, 1], [1, 2]]; -parseInvoicesWithCustomSplitsThreshold(customSplits); +parseInvoicesWithCustomSplitsThreshold( + "/path/to/the/file.ext", + customSplits +); diff --git a/example/invoiceSplitterTutorial.js b/example/invoiceSplitterTutorial.js index 03b69c086..8bdece0ec 100644 --- a/example/invoiceSplitterTutorial.js +++ b/example/invoiceSplitterTutorial.js @@ -1,27 +1,32 @@ -const { Client, product, imageOperations, PathInput } = require("mindee"); const { setTimeout } = require("node:timers/promises"); +const mindee = require("mindee"); -async function parseInvoices() { +async function parseInvoices(inputPath) { // fill in your API key or add it as an environment variable - const mindeeClient = new Client(); + const mindeeClient = new mindee.v1.Client(); // Load a file from disk - const inputSource = new PathInput( - { inputPath: "/path/to/the/file.ext" } + const inputSource = new mindee.PathInput( + { inputPath: inputPath } ); const resp = await mindeeClient.enqueueAndParse( - product.InvoiceSplitterV1, inputSource + mindee.v1.product.InvoiceSplitterV1, + inputSource + ); + let invoices = await mindee.v1.extraction.extractInvoices( + inputSource, resp.document.inference ); - let invoices = await imageOperations.extractInvoices(inputSource, resp.document.inference); for (const invoice of invoices) { // optional: save the documents locally invoice.saveToFile(`/tmp/invoice_p_${invoice.pageIdMin}-${invoice.pageIdMax}.pdf`); - const respInvoice = await mindeeClient.parse(product.InvoiceV4, invoice.asSource()); + const respInvoice = await mindeeClient.parse( + mindee.v1.product.InvoiceV4, invoice.asSource() + ); console.log(respInvoice.document.toString()); - await setTimeout(1000); // wait some time between requests as to not overload the server + // wait some time between requests as to not overload the server + await setTimeout(1000); } - } -parseInvoices(); +parseInvoices("/path/to/the/file.ext"); diff --git a/example/multiReceiptsTutorial.js b/example/multiReceiptsTutorial.js index 96f77c6e7..f52654a8c 100644 --- a/example/multiReceiptsTutorial.js +++ b/example/multiReceiptsTutorial.js @@ -1,24 +1,32 @@ -const { Client, product, imageOperations, PathInput } = require("mindee"); const { setTimeout } = require("node:timers/promises"); +const mindee = require("mindee"); -async function parseReceipts() { +async function parseReceipts(inputPath) { // fill in your API key or add it as an environment variable - const mindeeClient = new Client(); + const mindeeClient = new mindee.v1.Client(); // Load a file from disk - const inputSource = new PathInput( - { inputPath: "/path/to/the/file.ext" } + const inputSource = new mindee.PathInput( + { inputPath: inputPath } ); - const resp = await mindeeClient.parse(product.MultiReceiptsDetectorV1, inputSource); - let receipts = await imageOperations.extractReceipts(inputSource, resp.document.inference); + const resp = await mindeeClient.parse( + mindee.v1.product.MultiReceiptsDetectorV1, inputSource + ); + let receipts = await mindee.v1.extraction.extractReceipts( + inputSource, resp.document.inference + ); for (const receipt of receipts) { - const respReceipt = await mindeeClient.parse(product.ReceiptV5, receipt.asSource()); + const respReceipt = await mindeeClient.parse( + mindee.v1.product.ReceiptV5, receipt.asSource() + ); console.log(respReceipt.document.toString()); - receipt.saveToFile(`/tmp/receipt_p${receipt.pageId}_${receipt.receiptId}.pdf`); //optional: save to a file - await setTimeout(1000); // wait some time between requests as to not overload the server - } + // optional: save to a file + receipt.saveToFile(`/tmp/receipt_p${receipt.pageId}_${receipt.receiptId}.pdf`); + // wait some time between requests as to not overload the server + await setTimeout(1000); + } } -parseReceipts(); +parseReceipts("/path/to/the/file.ext"); diff --git a/package-lock.json b/package-lock.json index cb60284ea..94d38729c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,15 +9,12 @@ "version": "4.36.3", "license": "MIT", "dependencies": { - "@cantoo/pdf-lib": "^2.3.2", "commander": "~9.4.1", - "file-type": "~16.5.4", - "form-data": "~3.0.1", - "node-poppler": "^7.2.2", - "pdf.js-extract": "^0.2.1", - "sharp": "^0.33.5", + "file-type": "^19.6.0", + "node-poppler": "^7.2.4", "tmp": "^0.2.3", - "tslib": "^2.8.1" + "tslib": "^2.8.1", + "undici": "^6.23.0" }, "bin": { "mindee": "bin/mindee.js" @@ -29,17 +26,32 @@ "@types/tmp": "^0.2.6", "@typescript-eslint/eslint-plugin": "^8.52.0", "@typescript-eslint/parser": "^8.52.0", - "chai": "^4.5.0", - "eslint": "^9.20.1", + "chai": "^6.2.2", + "eslint": "^9.39.2", "eslint-plugin-jsdoc": "^50.6.17", "mocha": "^11.7.5", - "nock": "^13.5.6", - "ts-node": "^10.9.2", + "tsc-alias": "^1.8.16", + "tsx": "^4.21.0", "typedoc": "~0.28.15", "typescript": "^5.9.3" }, "engines": { - "node": ">= 18" + "node": ">= 18.17" + }, + "optionalDependencies": { + "@cantoo/pdf-lib": "^2.3.2", + "pdf.js-extract": "^0.2.1", + "sharp": "~0.34.5" + } + }, + "node_modules/@borewit/text-codec": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.2.1.tgz", + "integrity": "sha512-k7vvKPbf7J2fZ5klGRD9AeKfUvojuZIQ3BT5u7Jfv+puwXkUBUT5PVyMDfJZpy30CBDXGMgw7fguK/lpOMBvgw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" } }, "node_modules/@cantoo/pdf-lib": { @@ -47,6 +59,7 @@ "resolved": "https://registry.npmjs.org/@cantoo/pdf-lib/-/pdf-lib-2.5.3.tgz", "integrity": "sha512-SBQp8i/XdWNUhLutn5P67Pwj4X9vU046BRpfOMODJZuYVrgChtsTfgdnlW2O7x8gdXs8j7NoTaWI/b78E2oVmQ==", "license": "MIT", + "optional": true, "dependencies": { "@pdf-lib/standard-fonts": "^1.0.0", "@pdf-lib/upng": "^1.0.1", @@ -57,42 +70,471 @@ "tslib": ">=2" } }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "node_modules/@emnapi/runtime": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@es-joy/jsdoccomment": { + "version": "0.50.2", + "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.50.2.tgz", + "integrity": "sha512-YAdE/IJSpwbOTiaURNCKECdAwqrJuFiZhylmesBcIRawtYKnBR2wxPhoIewMg+Yu+QuYvHfJNReWpoxGBKOChA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.6", + "@typescript-eslint/types": "^8.11.0", + "comment-parser": "1.4.1", + "esquery": "^1.6.0", + "jsdoc-type-pratt-parser": "~4.1.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" - }, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@emnapi/runtime": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", - "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "cpu": [ + "ia32" + ], + "dev": true, "license": "MIT", "optional": true, - "dependencies": { - "tslib": "^2.4.0" + "os": [ + "win32" + ], + "engines": { + "node": ">=18" } }, - "node_modules/@es-joy/jsdoccomment": { - "version": "0.50.2", - "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.50.2.tgz", - "integrity": "sha512-YAdE/IJSpwbOTiaURNCKECdAwqrJuFiZhylmesBcIRawtYKnBR2wxPhoIewMg+Yu+QuYvHfJNReWpoxGBKOChA==", + "node_modules/@esbuild/win32-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.6", - "@typescript-eslint/types": "^8.11.0", - "comment-parser": "1.4.1", - "esquery": "^1.6.0", - "jsdoc-type-pratt-parser": "~4.1.0" - }, + "optional": true, + "os": [ + "win32" + ], "engines": { "node": ">=18" } @@ -287,16 +729,16 @@ } }, "node_modules/@gerrit0/mini-shiki": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/@gerrit0/mini-shiki/-/mini-shiki-3.21.0.tgz", - "integrity": "sha512-9PrsT5DjZA+w3lur/aOIx3FlDeHdyCEFlv9U+fmsVyjPZh61G5SYURQ/1ebe2U63KbDmI2V8IhIUegWb8hjOyg==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/@gerrit0/mini-shiki/-/mini-shiki-3.22.0.tgz", + "integrity": "sha512-jMpciqEVUBKE1QwU64S4saNMzpsSza6diNCk4MWAeCxO2+LFi2FIFmL2S0VDLzEJCxuvCbU783xi8Hp/gkM5CQ==", "dev": true, "license": "MIT", "dependencies": { - "@shikijs/engine-oniguruma": "^3.21.0", - "@shikijs/langs": "^3.21.0", - "@shikijs/themes": "^3.21.0", - "@shikijs/types": "^3.21.0", + "@shikijs/engine-oniguruma": "^3.22.0", + "@shikijs/langs": "^3.22.0", + "@shikijs/themes": "^3.22.0", + "@shikijs/types": "^3.22.0", "@shikijs/vscode-textmate": "^10.0.2" } }, @@ -352,10 +794,20 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@img/colour": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz", + "integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=18" + } + }, "node_modules/@img/sharp-darwin-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", - "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", "cpu": [ "arm64" ], @@ -371,13 +823,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-darwin-arm64": "1.0.4" + "@img/sharp-libvips-darwin-arm64": "1.2.4" } }, "node_modules/@img/sharp-darwin-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", - "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", "cpu": [ "x64" ], @@ -393,13 +845,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-darwin-x64": "1.0.4" + "@img/sharp-libvips-darwin-x64": "1.2.4" } }, "node_modules/@img/sharp-libvips-darwin-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", - "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", "cpu": [ "arm64" ], @@ -413,9 +865,9 @@ } }, "node_modules/@img/sharp-libvips-darwin-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", - "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", "cpu": [ "x64" ], @@ -429,9 +881,9 @@ } }, "node_modules/@img/sharp-libvips-linux-arm": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", - "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", "cpu": [ "arm" ], @@ -445,9 +897,9 @@ } }, "node_modules/@img/sharp-libvips-linux-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", - "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", "cpu": [ "arm64" ], @@ -460,10 +912,42 @@ "url": "https://opencollective.com/libvips" } }, + "node_modules/@img/sharp-libvips-linux-ppc64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.4.tgz", + "integrity": "sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==", + "cpu": [ + "ppc64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-riscv64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-riscv64/-/sharp-libvips-linux-riscv64-1.2.4.tgz", + "integrity": "sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==", + "cpu": [ + "riscv64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, "node_modules/@img/sharp-libvips-linux-s390x": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", - "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", + "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", "cpu": [ "s390x" ], @@ -477,9 +961,9 @@ } }, "node_modules/@img/sharp-libvips-linux-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", - "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", "cpu": [ "x64" ], @@ -493,9 +977,9 @@ } }, "node_modules/@img/sharp-libvips-linuxmusl-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", - "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", "cpu": [ "arm64" ], @@ -509,9 +993,9 @@ } }, "node_modules/@img/sharp-libvips-linuxmusl-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", - "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", "cpu": [ "x64" ], @@ -525,9 +1009,9 @@ } }, "node_modules/@img/sharp-linux-arm": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", - "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", "cpu": [ "arm" ], @@ -543,13 +1027,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-arm": "1.0.5" + "@img/sharp-libvips-linux-arm": "1.2.4" } }, "node_modules/@img/sharp-linux-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", - "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", "cpu": [ "arm64" ], @@ -565,13 +1049,57 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-arm64": "1.0.4" + "@img/sharp-libvips-linux-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-ppc64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.5.tgz", + "integrity": "sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==", + "cpu": [ + "ppc64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-ppc64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-riscv64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-riscv64/-/sharp-linux-riscv64-0.34.5.tgz", + "integrity": "sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==", + "cpu": [ + "riscv64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-riscv64": "1.2.4" } }, "node_modules/@img/sharp-linux-s390x": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", - "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", + "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", "cpu": [ "s390x" ], @@ -587,13 +1115,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-s390x": "1.0.4" + "@img/sharp-libvips-linux-s390x": "1.2.4" } }, "node_modules/@img/sharp-linux-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", - "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", "cpu": [ "x64" ], @@ -609,13 +1137,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-x64": "1.0.4" + "@img/sharp-libvips-linux-x64": "1.2.4" } }, "node_modules/@img/sharp-linuxmusl-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", - "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", "cpu": [ "arm64" ], @@ -631,13 +1159,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" } }, "node_modules/@img/sharp-linuxmusl-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", - "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", "cpu": [ "x64" ], @@ -653,21 +1181,40 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-x64": "1.0.4" + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" } }, "node_modules/@img/sharp-wasm32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", - "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", + "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", "cpu": [ "wasm32" ], "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", "optional": true, "dependencies": { - "@emnapi/runtime": "^1.2.0" + "@emnapi/runtime": "^1.7.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.5.tgz", + "integrity": "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], "engines": { "node": "^18.17.0 || ^20.3.0 || >=21.0.0" }, @@ -676,9 +1223,9 @@ } }, "node_modules/@img/sharp-win32-ia32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", - "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", + "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", "cpu": [ "ia32" ], @@ -695,9 +1242,9 @@ } }, "node_modules/@img/sharp-win32-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", - "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", "cpu": [ "x64" ], @@ -731,32 +1278,42 @@ "node": ">=12" } }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dev": true, "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, "engines": { - "node": ">=6.0.0" + "node": ">= 8" } }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "dev": true, - "license": "MIT" + "license": "MIT", + "engines": { + "node": ">= 8" + } }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" } }, "node_modules/@pdf-lib/standard-fonts": { @@ -764,6 +1321,7 @@ "resolved": "https://registry.npmjs.org/@pdf-lib/standard-fonts/-/standard-fonts-1.0.0.tgz", "integrity": "sha512-hU30BK9IUN/su0Mn9VdlVKsWBS6GyhVfqjwl1FjZN4TxP6cCw0jP2w7V3Hf5uX7M0AZJ16vey9yE0ny7Sa59ZA==", "license": "MIT", + "optional": true, "dependencies": { "pako": "^1.0.6" } @@ -773,6 +1331,7 @@ "resolved": "https://registry.npmjs.org/@pdf-lib/upng/-/upng-1.0.1.tgz", "integrity": "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ==", "license": "MIT", + "optional": true, "dependencies": { "pako": "^1.0.10" } @@ -788,41 +1347,47 @@ "node": ">=14" } }, + "node_modules/@sec-ant/readable-stream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", + "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", + "license": "MIT" + }, "node_modules/@shikijs/engine-oniguruma": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.21.0.tgz", - "integrity": "sha512-OYknTCct6qiwpQDqDdf3iedRdzj6hFlOPv5hMvI+hkWfCKs5mlJ4TXziBG9nyabLwGulrUjHiCq3xCspSzErYQ==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.22.0.tgz", + "integrity": "sha512-DyXsOG0vGtNtl7ygvabHd7Mt5EY8gCNqR9Y7Lpbbd/PbJvgWrqaKzH1JW6H6qFkuUa8aCxoiYVv8/YfFljiQxA==", "dev": true, "license": "MIT", "dependencies": { - "@shikijs/types": "3.21.0", + "@shikijs/types": "3.22.0", "@shikijs/vscode-textmate": "^10.0.2" } }, "node_modules/@shikijs/langs": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.21.0.tgz", - "integrity": "sha512-g6mn5m+Y6GBJ4wxmBYqalK9Sp0CFkUqfNzUy2pJglUginz6ZpWbaWjDB4fbQ/8SHzFjYbtU6Ddlp1pc+PPNDVA==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.22.0.tgz", + "integrity": "sha512-x/42TfhWmp6H00T6uwVrdTJGKgNdFbrEdhaDwSR5fd5zhQ1Q46bHq9EO61SCEWJR0HY7z2HNDMaBZp8JRmKiIA==", "dev": true, "license": "MIT", "dependencies": { - "@shikijs/types": "3.21.0" + "@shikijs/types": "3.22.0" } }, "node_modules/@shikijs/themes": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.21.0.tgz", - "integrity": "sha512-BAE4cr9EDiZyYzwIHEk7JTBJ9CzlPuM4PchfcA5ao1dWXb25nv6hYsoDiBq2aZK9E3dlt3WB78uI96UESD+8Mw==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.22.0.tgz", + "integrity": "sha512-o+tlOKqsr6FE4+mYJG08tfCFDS+3CG20HbldXeVoyP+cYSUxDhrFf3GPjE60U55iOkkjbpY2uC3It/eeja35/g==", "dev": true, "license": "MIT", "dependencies": { - "@shikijs/types": "3.21.0" + "@shikijs/types": "3.22.0" } }, "node_modules/@shikijs/types": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.21.0.tgz", - "integrity": "sha512-zGrWOxZ0/+0ovPY7PvBU2gIS9tmhSUUt30jAcNV0Bq0gb2S98gwfjIs1vxlmH5zM7/4YxLamT6ChlqqAJmPPjA==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.22.0.tgz", + "integrity": "sha512-491iAekgKDBFE67z70Ok5a8KBMsQ2IJwOWw3us/7ffQkIBCyOQfm/aNwVMBUriP02QshIfgHCBSIYAl3u2eWjg==", "dev": true, "license": "MIT", "dependencies": { @@ -843,34 +1408,6 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", "license": "MIT" }, - "node_modules/@tsconfig/node10": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", - "integrity": "sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node12": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", - "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node14": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", - "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", - "dev": true, - "license": "MIT" - }, - "node_modules/@tsconfig/node16": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", - "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/chai": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", @@ -945,17 +1482,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz", - "integrity": "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.55.0.tgz", + "integrity": "sha512-1y/MVSz0NglV1ijHC8OT49mPJ4qhPYjiK08YUQVbIOyu+5k862LKUHFkpKHWu//zmr7hDR2rhwUm6gnCGNmGBQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/type-utils": "8.53.1", - "@typescript-eslint/utils": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/scope-manager": "8.55.0", + "@typescript-eslint/type-utils": "8.55.0", + "@typescript-eslint/utils": "8.55.0", + "@typescript-eslint/visitor-keys": "8.55.0", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -968,22 +1505,22 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.53.1", + "@typescript-eslint/parser": "^8.55.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/parser": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.1.tgz", - "integrity": "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.55.0.tgz", + "integrity": "sha512-4z2nCSBfVIMnbuu8uinj+f0o4qOeggYJLbjpPHka3KH1om7e+H9yLKTYgksTaHcGco+NClhhY2vyO3HsMH1RGw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/scope-manager": "8.55.0", + "@typescript-eslint/types": "8.55.0", + "@typescript-eslint/typescript-estree": "8.55.0", + "@typescript-eslint/visitor-keys": "8.55.0", "debug": "^4.4.3" }, "engines": { @@ -999,14 +1536,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.1.tgz", - "integrity": "sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.55.0.tgz", + "integrity": "sha512-zRcVVPFUYWa3kNnjaZGXSu3xkKV1zXy8M4nO/pElzQhFweb7PPtluDLQtKArEOGmjXoRjnUZ29NjOiF0eCDkcQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.53.1", - "@typescript-eslint/types": "^8.53.1", + "@typescript-eslint/tsconfig-utils": "^8.55.0", + "@typescript-eslint/types": "^8.55.0", "debug": "^4.4.3" }, "engines": { @@ -1021,14 +1558,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz", - "integrity": "sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.55.0.tgz", + "integrity": "sha512-fVu5Omrd3jeqeQLiB9f1YsuK/iHFOwb04bCtY4BSCLgjNbOD33ZdV6KyEqplHr+IlpgT0QTZ/iJ+wT7hvTx49Q==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1" + "@typescript-eslint/types": "8.55.0", + "@typescript-eslint/visitor-keys": "8.55.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -1039,9 +1576,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz", - "integrity": "sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.55.0.tgz", + "integrity": "sha512-1R9cXqY7RQd7WuqSN47PK9EDpgFUK3VqdmbYrvWJZYDd0cavROGn+74ktWBlmJ13NXUQKlZ/iAEQHI/V0kKe0Q==", "dev": true, "license": "MIT", "engines": { @@ -1056,15 +1593,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz", - "integrity": "sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.55.0.tgz", + "integrity": "sha512-x1iH2unH4qAt6I37I2CGlsNs+B9WGxurP2uyZLRz6UJoZWDBx9cJL1xVN/FiOmHEONEg6RIufdvyT0TEYIgC5g==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/utils": "8.53.1", + "@typescript-eslint/types": "8.55.0", + "@typescript-eslint/typescript-estree": "8.55.0", + "@typescript-eslint/utils": "8.55.0", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -1081,9 +1618,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.1.tgz", - "integrity": "sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.55.0.tgz", + "integrity": "sha512-ujT0Je8GI5BJWi+/mMoR0wxwVEQaxM+pi30xuMiJETlX80OPovb2p9E8ss87gnSVtYXtJoU9U1Cowcr6w2FE0w==", "dev": true, "license": "MIT", "engines": { @@ -1095,16 +1632,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz", - "integrity": "sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.55.0.tgz", + "integrity": "sha512-EwrH67bSWdx/3aRQhCoxDaHM+CrZjotc2UCCpEDVqfCE+7OjKAGWNY2HsCSTEVvWH2clYQK8pdeLp42EVs+xQw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.53.1", - "@typescript-eslint/tsconfig-utils": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/project-service": "8.55.0", + "@typescript-eslint/tsconfig-utils": "8.55.0", + "@typescript-eslint/types": "8.55.0", + "@typescript-eslint/visitor-keys": "8.55.0", "debug": "^4.4.3", "minimatch": "^9.0.5", "semver": "^7.7.3", @@ -1123,16 +1660,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.1.tgz", - "integrity": "sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.55.0.tgz", + "integrity": "sha512-BqZEsnPGdYpgyEIkDC1BadNY8oMwckftxBT+C8W0g1iKPdeqKZBtTfnvcq0nf60u7MkjFO8RBvpRGZBPw4L2ow==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1" + "@typescript-eslint/scope-manager": "8.55.0", + "@typescript-eslint/types": "8.55.0", + "@typescript-eslint/typescript-estree": "8.55.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -1147,13 +1684,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz", - "integrity": "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.55.0.tgz", + "integrity": "sha512-AxNRwEie8Nn4eFS1FzDMJWIISMGoXMb037sgCBJ3UR6o0fQTzr2tqN9WT+DkWJPhIdQCfV7T6D387566VtnCJA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/types": "8.55.0", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -1177,18 +1714,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/abort-controller": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "license": "MIT", - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "engines": { - "node": ">=6.5" - } - }, "node_modules/acorn": { "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", @@ -1212,19 +1737,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-walk": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", - "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "acorn": "^8.11.0" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -1271,6 +1783,33 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/anymatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/are-docs-informative": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/are-docs-informative/-/are-docs-informative-0.0.2.tgz", @@ -1281,13 +1820,6 @@ "node": ">=14" } }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true, - "license": "MIT" - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -1295,6 +1827,16 @@ "dev": true, "license": "Python-2.0" }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/assertion-error": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", @@ -1305,12 +1847,6 @@ "node": ">=12" } }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "license": "MIT" - }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -1318,80 +1854,49 @@ "dev": true, "license": "MIT" }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", "dev": true, "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } - }, - "node_modules/browser-stdout": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", - "dev": true, - "license": "ISC" - }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, "license": "MIT", "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" + "balanced-match": "^1.0.0" } }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, "license": "MIT", "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" + "fill-range": "^7.1.1" }, "engines": { - "node": ">= 0.4" + "node": ">=8" } }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true, + "license": "ISC" + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1415,32 +1920,13 @@ } }, "node_modules/chai": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", - "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", - "dev": true, - "license": "MIT", - "dependencies": { - "assertion-error": "^1.1.0", - "check-error": "^1.0.3", - "deep-eql": "^4.1.3", - "get-func-name": "^2.0.2", - "loupe": "^2.3.6", - "pathval": "^1.1.1", - "type-detect": "^4.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/chai/node_modules/assertion-error": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", - "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", "dev": true, "license": "MIT", "engines": { - "node": "*" + "node": ">=18" } }, "node_modules/chalk": { @@ -1460,19 +1946,6 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/check-error": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", - "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", - "dev": true, - "license": "MIT", - "dependencies": { - "get-func-name": "^2.0.2" - }, - "engines": { - "node": "*" - } - }, "node_modules/chokidar": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", @@ -1572,6 +2045,7 @@ "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", "license": "MIT", + "optional": true, "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" @@ -1584,6 +2058,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "devOptional": true, "license": "MIT", "dependencies": { "color-name": "~1.1.4" @@ -1596,6 +2071,7 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "devOptional": true, "license": "MIT" }, "node_modules/color-string": { @@ -1603,23 +2079,12 @@ "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", "license": "MIT", + "optional": true, "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "license": "MIT", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/commander": { "version": "9.4.1", "resolved": "https://registry.npmjs.org/commander/-/commander-9.4.1.tgz", @@ -1646,13 +2111,6 @@ "dev": true, "license": "MIT" }, - "node_modules/create-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", - "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "dev": true, - "license": "MIT" - }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -1672,7 +2130,8 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.2.0.tgz", "integrity": "sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==", - "license": "MIT" + "license": "MIT", + "optional": true }, "node_modules/debug": { "version": "4.4.3", @@ -1705,19 +2164,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/deep-eql": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", - "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", - "dev": true, - "license": "MIT", - "dependencies": { - "type-detect": "^4.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -1725,20 +2171,12 @@ "dev": true, "license": "MIT" }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/detect-libc": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", "license": "Apache-2.0", + "optional": true, "engines": { "node": ">=8" } @@ -1753,26 +2191,26 @@ "node": ">=0.3.1" } }, - "node_modules/dommatrix": { - "version": "0.0.24", - "resolved": "https://registry.npmjs.org/dommatrix/-/dommatrix-0.0.24.tgz", - "integrity": "sha512-PatEhAW5pIHr28MvFQGV5iiHNloqvecQZlxs7/8s/eulLqZI3uVqPkrO7YDuqsebovr/9mmcWDSWzVG4amEZgQ==", - "license": "MIT" - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, "license": "MIT", "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" + "path-type": "^4.0.0" }, "engines": { - "node": ">= 0.4" + "node": ">=8" } }, + "node_modules/dommatrix": { + "version": "0.0.24", + "resolved": "https://registry.npmjs.org/dommatrix/-/dommatrix-0.0.24.tgz", + "integrity": "sha512-PatEhAW5pIHr28MvFQGV5iiHNloqvecQZlxs7/8s/eulLqZI3uVqPkrO7YDuqsebovr/9mmcWDSWzVG4amEZgQ==", + "license": "MIT", + "optional": true + }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -1800,49 +2238,46 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "node_modules/esbuild": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "dev": true, + "hasInstallScript": true, "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" + "bin": { + "esbuild": "bin/esbuild" }, "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" + "node": ">=18" }, - "engines": { - "node": ">= 0.4" + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" } }, "node_modules/escalade": { @@ -2107,24 +2542,6 @@ "node": ">=0.10.0" } }, - "node_modules/event-target-shim": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "license": "MIT", - "engines": { - "node": ">=0.8.x" - } - }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -2132,6 +2549,36 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", @@ -2146,6 +2593,16 @@ "dev": true, "license": "MIT" }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/fdir": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", @@ -2178,22 +2635,36 @@ } }, "node_modules/file-type": { - "version": "16.5.4", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-16.5.4.tgz", - "integrity": "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw==", + "version": "19.6.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-19.6.0.tgz", + "integrity": "sha512-VZR5I7k5wkD0HgFnMsq5hOsSc710MJMu5Nc5QYsbe38NN5iPV/XTObYLc/cpttRTf6lX538+5uO1ZQRhYibiZQ==", "license": "MIT", "dependencies": { - "readable-web-to-node-stream": "^3.0.0", - "strtok3": "^6.2.4", - "token-types": "^4.1.1" + "get-stream": "^9.0.1", + "strtok3": "^9.0.1", + "token-types": "^6.0.0", + "uint8array-extras": "^1.3.0" }, "engines": { - "node": ">=10" + "node": ">=18" }, "funding": { "url": "https://github.com/sindresorhus/file-type?sponsor=1" } }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -2259,29 +2730,19 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/form-data": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.4.tgz", - "integrity": "sha512-f0cRzm6dkyVYV3nPoooP8XlccPQukegwhAnpoLcXy+X+A8KfpGOoXwDr9FLZd3wzgLaBGQBE3lY93Zm/i1JvIQ==", + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "hasown": "^2.0.2", - "mime-types": "^2.1.35" - }, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">= 6" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, "node_modules/get-caller-file": { @@ -2294,57 +2755,40 @@ "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/get-func-name": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", - "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "node_modules/get-stream": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", "license": "MIT", "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" }, "engines": { - "node": ">= 0.4" + "node": ">=18" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "node_modules/get-tsconfig": { + "version": "4.13.6", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz", + "integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==", + "dev": true, "license": "MIT", "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" + "resolve-pkg-maps": "^1.0.0" }, - "engines": { - "node": ">= 0.4" + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, "node_modules/glob": { "version": "10.5.0", "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, "license": "ISC", "dependencies": { @@ -2388,16 +2832,35 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, "engines": { - "node": ">= 0.4" + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby/node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" } }, "node_modules/has-flag": { @@ -2410,45 +2873,6 @@ "node": ">=8" } }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -2473,7 +2897,8 @@ "url": "https://patreon.com/mdevils" } ], - "license": "MIT" + "license": "MIT", + "optional": true }, "node_modules/ieee754": { "version": "1.2.1", @@ -2536,7 +2961,21 @@ "version": "0.3.4", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz", "integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==", - "license": "MIT" + "license": "MIT", + "optional": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } }, "node_modules/is-extglob": { "version": "2.1.1", @@ -2571,6 +3010,16 @@ "node": ">=0.10.0" } }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, "node_modules/is-path-inside": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", @@ -2591,6 +3040,18 @@ "node": ">=8" } }, + "node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", @@ -2671,13 +3132,6 @@ "dev": true, "license": "MIT" }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", - "dev": true, - "license": "ISC" - }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -2752,16 +3206,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/loupe": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", - "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", - "dev": true, - "license": "MIT", - "dependencies": { - "get-func-name": "^2.0.1" - } - }, "node_modules/lru-cache": { "version": "10.4.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", @@ -2776,17 +3220,10 @@ "dev": true, "license": "MIT" }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true, - "license": "ISC" - }, "node_modules/markdown-it": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", - "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.1.tgz", + "integrity": "sha512-BuU2qnTti9YKgK5N+IeMubp14ZUKUUw7yeJbkjtosvHiP0AZ5c8IAgEMk79D0eC8F23r4Ac/q8cAIFdm2FtyoA==", "dev": true, "license": "MIT", "dependencies": { @@ -2801,15 +3238,6 @@ "markdown-it": "bin/markdown-it.mjs" } }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, "node_modules/mdurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", @@ -2817,25 +3245,41 @@ "dev": true, "license": "MIT" }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">= 8" } }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, "license": "MIT", "dependencies": { - "mime-db": "1.52.0" + "braces": "^3.0.3", + "picomatch": "^2.3.1" }, "engines": { - "node": ">= 0.6" + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/minimatch": { @@ -2924,6 +3368,20 @@ "dev": true, "license": "MIT" }, + "node_modules/mylas": { + "version": "2.1.14", + "resolved": "https://registry.npmjs.org/mylas/-/mylas-2.1.14.tgz", + "integrity": "sha512-BzQguy9W9NJgoVn2mRWzbFrFWWztGCcng2QI9+41frfk+Athwgx3qhqhvStz7ExeUUu7Kzw427sNzHpEZNINog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/raouldeheer" + } + }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -2931,26 +3389,12 @@ "dev": true, "license": "MIT" }, - "node_modules/nock": { - "version": "13.5.6", - "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", - "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.1.0", - "json-stringify-safe": "^5.0.1", - "propagate": "^2.0.0" - }, - "engines": { - "node": ">= 10.13" - } - }, "node_modules/node-html-better-parser": { "version": "1.5.8", "resolved": "https://registry.npmjs.org/node-html-better-parser/-/node-html-better-parser-1.5.8.tgz", "integrity": "sha512-t/wAKvaTSKco43X+yf9+76RiMt18MtMmzd4wc7rKj+fWav6DV4ajDEKdWlLzSE8USDF5zr/06uGj0Wr/dGAFtw==", "license": "MIT", + "optional": true, "dependencies": { "html-entities": "^2.3.2" } @@ -2971,6 +3415,16 @@ "url": "https://github.com/sponsors/Fdawgs" } }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -3032,7 +3486,8 @@ "version": "1.0.11", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", - "license": "(MIT AND Zlib)" + "license": "(MIT AND Zlib)", + "optional": true }, "node_modules/parent-module": { "version": "1.0.1", @@ -3101,14 +3556,14 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/pathval": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", - "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "dev": true, "license": "MIT", "engines": { - "node": "*" + "node": ">=8" } }, "node_modules/pdf.js-extract": { @@ -3116,6 +3571,7 @@ "resolved": "https://registry.npmjs.org/pdf.js-extract/-/pdf.js-extract-0.2.1.tgz", "integrity": "sha512-oUs5KaTVCelIyiBajCx3zAZKurkN9oVwRdqbSeDqeofddxNuwJRur86fCETvKZ/tX5nZJUSZWq3ie76PsArz7A==", "license": "MIT", + "optional": true, "dependencies": { "dommatrix": "0.0.24", "web-streams-polyfill": "3.2.0" @@ -3125,12 +3581,12 @@ } }, "node_modules/peek-readable": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-4.1.0.tgz", - "integrity": "sha512-ZI3LnwUv5nOGbQzD9c2iDG6toheuXSZP5esSHBjopsXH4dg19soufvpUGA3uohi5anFtGb2lhAVdHzH6R/Evvg==", + "version": "5.4.2", + "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.4.2.tgz", + "integrity": "sha512-peBp3qZyuS6cNIJ2akRNG1uo1WJ1d0wTxg/fxMdZ0BqCVhx242bSFHM9eNqflfJVS9SsgkzgT/1UgnsurBOTMg==", "license": "MIT", "engines": { - "node": ">=8" + "node": ">=14.16" }, "funding": { "type": "github", @@ -3157,33 +3613,27 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "node_modules/plimit-lit": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/plimit-lit/-/plimit-lit-1.6.1.tgz", + "integrity": "sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==", "dev": true, "license": "MIT", + "dependencies": { + "queue-lit": "^1.5.1" + }, "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "license": "MIT", - "engines": { - "node": ">= 0.6.0" + "node": ">=12" } }, - "node_modules/propagate": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", - "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, "license": "MIT", "engines": { - "node": ">= 8" + "node": ">= 0.8.0" } }, "node_modules/punycode": { @@ -3206,6 +3656,37 @@ "node": ">=6" } }, + "node_modules/queue-lit": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/queue-lit/-/queue-lit-1.5.2.tgz", + "integrity": "sha512-tLc36IOPeMAubu8BkW8YDBV+WyIgKlYU7zUNs0J5Vk9skSZ4JfGlPOqplP0aHdfv7HL0B2Pg6nwiq60Qc6M2Hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -3216,38 +3697,6 @@ "safe-buffer": "^5.1.0" } }, - "node_modules/readable-stream": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", - "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", - "license": "MIT", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/readable-web-to-node-stream": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.4.tgz", - "integrity": "sha512-9nX56alTf5bwXQ3ZDipHJhusu9NTQJ/CVPtb/XHAJCXihZeitfJvIRS4GqQ/mfIoOE3IelHMrpayVrosdHBuLw==", - "license": "MIT", - "dependencies": { - "readable-stream": "^4.7.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - } - }, "node_modules/readdirp": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", @@ -3282,10 +3731,56 @@ "node": ">=4" } }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, "funding": [ { "type": "github", @@ -3303,9 +3798,9 @@ "license": "MIT" }, "node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -3325,15 +3820,16 @@ } }, "node_modules/sharp": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", - "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", + "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", "hasInstallScript": true, "license": "Apache-2.0", + "optional": true, "dependencies": { - "color": "^4.2.3", - "detect-libc": "^2.0.3", - "semver": "^7.6.3" + "@img/colour": "^1.0.0", + "detect-libc": "^2.1.2", + "semver": "^7.7.3" }, "engines": { "node": "^18.17.0 || ^20.3.0 || >=21.0.0" @@ -3342,25 +3838,30 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-darwin-arm64": "0.33.5", - "@img/sharp-darwin-x64": "0.33.5", - "@img/sharp-libvips-darwin-arm64": "1.0.4", - "@img/sharp-libvips-darwin-x64": "1.0.4", - "@img/sharp-libvips-linux-arm": "1.0.5", - "@img/sharp-libvips-linux-arm64": "1.0.4", - "@img/sharp-libvips-linux-s390x": "1.0.4", - "@img/sharp-libvips-linux-x64": "1.0.4", - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", - "@img/sharp-libvips-linuxmusl-x64": "1.0.4", - "@img/sharp-linux-arm": "0.33.5", - "@img/sharp-linux-arm64": "0.33.5", - "@img/sharp-linux-s390x": "0.33.5", - "@img/sharp-linux-x64": "0.33.5", - "@img/sharp-linuxmusl-arm64": "0.33.5", - "@img/sharp-linuxmusl-x64": "0.33.5", - "@img/sharp-wasm32": "0.33.5", - "@img/sharp-win32-ia32": "0.33.5", - "@img/sharp-win32-x64": "0.33.5" + "@img/sharp-darwin-arm64": "0.34.5", + "@img/sharp-darwin-x64": "0.34.5", + "@img/sharp-libvips-darwin-arm64": "1.2.4", + "@img/sharp-libvips-darwin-x64": "1.2.4", + "@img/sharp-libvips-linux-arm": "1.2.4", + "@img/sharp-libvips-linux-arm64": "1.2.4", + "@img/sharp-libvips-linux-ppc64": "1.2.4", + "@img/sharp-libvips-linux-riscv64": "1.2.4", + "@img/sharp-libvips-linux-s390x": "1.2.4", + "@img/sharp-libvips-linux-x64": "1.2.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", + "@img/sharp-libvips-linuxmusl-x64": "1.2.4", + "@img/sharp-linux-arm": "0.34.5", + "@img/sharp-linux-arm64": "0.34.5", + "@img/sharp-linux-ppc64": "0.34.5", + "@img/sharp-linux-riscv64": "0.34.5", + "@img/sharp-linux-s390x": "0.34.5", + "@img/sharp-linux-x64": "0.34.5", + "@img/sharp-linuxmusl-arm64": "0.34.5", + "@img/sharp-linuxmusl-x64": "0.34.5", + "@img/sharp-wasm32": "0.34.5", + "@img/sharp-win32-arm64": "0.34.5", + "@img/sharp-win32-ia32": "0.34.5", + "@img/sharp-win32-x64": "0.34.5" } }, "node_modules/shebang-command": { @@ -3404,10 +3905,21 @@ "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.4.tgz", "integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==", "license": "MIT", + "optional": true, "dependencies": { "is-arrayish": "^0.3.1" } }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/spdx-exceptions": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", @@ -3433,15 +3945,6 @@ "dev": true, "license": "CC0-1.0" }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, "node_modules/string-width": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", @@ -3560,16 +4063,16 @@ } }, "node_modules/strtok3": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-6.3.0.tgz", - "integrity": "sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw==", + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-9.1.1.tgz", + "integrity": "sha512-FhwotcEqjr241ZbjFzjlIYg6c5/L/s4yBGWSMvJ9UoExiSqL+FnFA/CaeZx17WGaZMS/4SOZp8wH18jSS4R4lw==", "license": "MIT", "dependencies": { "@tokenizer/token": "^0.3.0", - "peek-readable": "^4.1.0" + "peek-readable": "^5.3.1" }, "engines": { - "node": ">=10" + "node": ">=16" }, "funding": { "type": "github", @@ -3615,17 +4118,31 @@ "node": ">=14.14" } }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, "node_modules/token-types": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/token-types/-/token-types-4.2.1.tgz", - "integrity": "sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.2.tgz", + "integrity": "sha512-dRXchy+C0IgK8WPC6xvCHFRIWYUbqqdEIKPaKo/AcTUNzwLTK6AH7RjdLWsEZcAN/TBdtfUw3PYEgPr5VPr6ww==", "license": "MIT", "dependencies": { + "@borewit/text-codec": "^0.2.1", "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" }, "engines": { - "node": ">=10" + "node": ">=14.16" }, "funding": { "type": "github", @@ -3645,58 +4162,90 @@ "typescript": ">=4.8.4" } }, - "node_modules/ts-node": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", - "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "node_modules/tsc-alias": { + "version": "1.8.16", + "resolved": "https://registry.npmjs.org/tsc-alias/-/tsc-alias-1.8.16.tgz", + "integrity": "sha512-QjCyu55NFyRSBAl6+MTFwplpFcnm2Pq01rR/uxfqJoLMm6X3O14KEGtaSDZpJYaE1bJBGDjD0eSuiIWPe2T58g==", "dev": true, "license": "MIT", "dependencies": { - "@cspotcode/source-map-support": "^0.8.0", - "@tsconfig/node10": "^1.0.7", - "@tsconfig/node12": "^1.0.7", - "@tsconfig/node14": "^1.0.0", - "@tsconfig/node16": "^1.0.2", - "acorn": "^8.4.1", - "acorn-walk": "^8.1.1", - "arg": "^4.1.0", - "create-require": "^1.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "v8-compile-cache-lib": "^3.0.1", - "yn": "3.1.1" + "chokidar": "^3.5.3", + "commander": "^9.0.0", + "get-tsconfig": "^4.10.0", + "globby": "^11.0.4", + "mylas": "^2.1.9", + "normalize-path": "^3.0.0", + "plimit-lit": "^1.2.6" }, "bin": { - "ts-node": "dist/bin.js", - "ts-node-cwd": "dist/bin-cwd.js", - "ts-node-esm": "dist/bin-esm.js", - "ts-node-script": "dist/bin-script.js", - "ts-node-transpile-only": "dist/bin-transpile.js", - "ts-script": "dist/bin-script-deprecated.js" + "tsc-alias": "dist/bin/index.js" }, - "peerDependencies": { - "@swc/core": ">=1.2.50", - "@swc/wasm": ">=1.2.50", - "@types/node": "*", - "typescript": ">=2.7" + "engines": { + "node": ">=16.20.2" + } + }, + "node_modules/tsc-alias/node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "@swc/wasm": { - "optional": true - } + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" } }, - "node_modules/ts-node/node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "node_modules/tsc-alias/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, - "license": "BSD-3-Clause", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, "engines": { - "node": ">=0.3.1" + "node": ">= 6" + } + }, + "node_modules/tsc-alias/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/tsc-alias/node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" } }, "node_modules/tslib": { @@ -3705,6 +4254,26 @@ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -3718,16 +4287,6 @@ "node": ">= 0.8.0" } }, - "node_modules/type-detect": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", - "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/typedoc": { "version": "0.28.16", "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.28.16.tgz", @@ -3773,6 +4332,27 @@ "dev": true, "license": "MIT" }, + "node_modules/uint8array-extras": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.5.0.tgz", + "integrity": "sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/undici": { + "version": "6.23.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.23.0.tgz", + "integrity": "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==", + "license": "MIT", + "engines": { + "node": ">=18.17" + } + }, "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", @@ -3790,18 +4370,12 @@ "punycode": "^2.1.0" } }, - "node_modules/v8-compile-cache-lib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", - "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "dev": true, - "license": "MIT" - }, "node_modules/web-streams-polyfill": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.0.tgz", "integrity": "sha512-EqPmREeOzttaLRm5HS7io98goBgZ7IVz79aDvqjD0kYXLtFZTc0T/U6wHTPKyIjb+MdN7DFIIX6hgdBEpWmfPA==", "license": "MIT", + "optional": true, "engines": { "node": ">= 8" } @@ -4050,16 +4624,6 @@ "node": ">=8" } }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index 800da86fa..7250ec09c 100644 --- a/package.json +++ b/package.json @@ -5,13 +5,20 @@ "main": "src/index.js", "bin": "bin/mindee.js", "license": "MIT", + "type": "module", "scripts": { - "build": "tsc --build", - "build-for-dist": "tsc --build && cp LICENSE README.md CHANGELOG.md ./dist", + "build": "tsc --build && tsc-alias", + "build-for-dist": "tsc --build && tsc-alias && cp LICENSE README.md CHANGELOG.md ./dist", "clean": "rm -rf ./dist ./docs/_build", - "test": "mocha \"tests/**/*.spec.ts\" --config .mocharc.json", - "test-integration": "mocha \"tests/**/*.integration.ts\"", - "lint": "eslint './src/**/*.ts' --report-unused-disable-directives && echo 'Your .ts files look good.'", + "test": "mocha --grep \"#omitOptionalDeps\" --invert \"tests/**/*.spec.ts\"", + "test-light": "mocha --grep \"#includeOptionalDeps\" --invert \"tests/**/*.spec.ts\"", + "test-integration": "mocha --grep \"#omitOptionalDeps\" --invert \"tests/**/*.integration.ts\"", + "test-integration-light": "mocha --grep \"#includeOptionalDeps\" --invert \"tests/**/*.integration.ts\"", + "test-v1": "mocha --grep \"#omitOptionalDeps\" --invert \"tests/v1/**/*.spec.ts\"", + "test-v1-light": "mocha --grep \"#includeOptionalDeps\" --invert \"tests/v1/**/*.spec.ts\"", + "test-v2": "mocha --grep \"#omitOptionalDeps\" --invert \"tests/v2/**/*.spec.ts\"", + "test-v2-light": "mocha --grep \"#includeOptionalDeps\" --invert \"tests/v2/**/*.spec.ts\"", + "lint": "tsc --noEmit && eslint './src/**/*.ts' --report-unused-disable-directives && echo 'Your .ts files look good.'", "lint-fix": "eslint './src/**/*.ts' --fix", "docs": "typedoc --out docs/_build ./src/index.ts", "docs-for-dist": "typedoc --out docs/_build ./src/index.ts && cp -r ./docs/code_samples ./docs/_build/" @@ -24,7 +31,7 @@ "CHANGELOG.md" ], "engines": { - "node": ">= 18" + "node": ">= 18.17" }, "repository": { "type": "git", @@ -46,25 +53,27 @@ "@types/tmp": "^0.2.6", "@typescript-eslint/eslint-plugin": "^8.52.0", "@typescript-eslint/parser": "^8.52.0", - "chai": "^4.5.0", - "eslint": "^9.20.1", + "chai": "^6.2.2", + "eslint": "^9.39.2", "eslint-plugin-jsdoc": "^50.6.17", "mocha": "^11.7.5", - "nock": "^13.5.6", - "ts-node": "^10.9.2", + "tsc-alias": "^1.8.16", + "tsx": "^4.21.0", "typedoc": "~0.28.15", "typescript": "^5.9.3" }, "dependencies": { - "@cantoo/pdf-lib": "^2.3.2", "commander": "~9.4.1", - "file-type": "~16.5.4", - "form-data": "~3.0.1", - "node-poppler": "^7.2.2", - "pdf.js-extract": "^0.2.1", - "sharp": "^0.33.5", + "file-type": "^19.6.0", + "node-poppler": "^7.2.4", "tmp": "^0.2.3", - "tslib": "^2.8.1" + "tslib": "^2.8.1", + "undici": "^6.23.0" + }, + "optionalDependencies": { + "sharp": "~0.34.5", + "@cantoo/pdf-lib": "^2.3.2", + "pdf.js-extract": "^0.2.1" }, "keywords": [ "typescript", @@ -72,6 +81,7 @@ "api", "client", "client library", - "nodejs" + "nodejs", + "sdk" ] } diff --git a/src/clientV2.ts b/src/clientV2.ts deleted file mode 100644 index 9255b2191..000000000 --- a/src/clientV2.ts +++ /dev/null @@ -1,313 +0,0 @@ -import { DataSchema, InputSource } from "./input"; -import { errorHandler } from "./errors/handler"; -import { LOG_LEVELS, logger } from "./logger"; - -import { setTimeout } from "node:timers/promises"; -import { ErrorResponse, InferenceResponse, JobResponse } from "./parsing/v2"; -import { MindeeApiV2 } from "./http/mindeeApiV2"; -import { MindeeHttpErrorV2 } from "./errors/mindeeError"; -import { StringDict } from "./parsing/common"; - -/** - * Parameters for the internal polling loop in {@link ClientV2.enqueueAndGetInference | enqueueAndGetInference()} . - * - * Default behavior: - * - `initialDelaySec` = 2s - * - `delaySec` = 1.5s - * - `maxRetries` = 80 - * - * Validation rules: - * - `initialDelaySec` >= 1 - * - `delaySec` >= 1 - * - `maxRetries` >= 2 - * - * The `initialTimerOptions` and `recurringTimerOptions` objects let you pass an - * `AbortSignal` or make the timer `unref`-ed to the `setTimeout()`. - * - * @category ClientV2 - * @example - * const params = { - * initialDelaySec: 4, - * delaySec: 2, - * maxRetries: 50 - * }; - * - * const inference = await client.enqueueAndGetInference(inputDoc, params); - */ - -export interface PollingOptions { - /** Number of seconds to wait *before the first poll*. */ - initialDelaySec?: number; - /** Interval in seconds between two consecutive polls. */ - delaySec?: number; - /** Maximum number of polling attempts (including the first one). */ - maxRetries?: number; - /** Options passed to the initial `setTimeout()`. */ - initialTimerOptions?: { - ref?: boolean, - signal?: AbortSignal - }; - /** Options passed to every recurring `setTimeout()`. */ - recurringTimerOptions?: { - ref?: boolean, - signal?: AbortSignal - } -} - -interface ValidatedPollingOptions extends PollingOptions { - initialDelaySec: number; - delaySec: number; - maxRetries: number; -} - -/** - * Parameters accepted by the asynchronous **inference** v2 endpoint. - * - * All fields are optional except `modelId`. - * - * @category ClientV2 - * @example - * const params = { - * modelId: "YOUR_MODEL_ID", - * rag: true, - * alias: "YOUR_ALIAS", - * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], - * pollingOptions: { - * initialDelaySec: 2, - * delaySec: 1.5, - * } - * }; - */ -export interface InferenceParameters { - /** Model ID to use for the inference. **Required** */ - modelId: string; - /** Use Retrieval-Augmented Generation during inference. */ - rag?: boolean; - /** Extract the entire text from the document as strings, and fill the `rawText` attribute. */ - rawText?: boolean; - /** Calculate bounding box polygons for values, and fill the `locations` attribute of fields. */ - polygon?: boolean; - /** Calculate confidence scores for values, and fill the `confidence` attribute of fields. - * Useful for automation.*/ - confidence?: boolean; - /** Use an alias to link the file to your own DB. If empty, no alias will be used. */ - alias?: string; - /** Additional text context used by the model during inference. - * *Not recommended*, for specific use only. */ - textContext?: string; - /** Webhook IDs to call after all processing is finished. - * If empty, no webhooks will be used. */ - webhookIds?: string[]; - /** Client-side polling configuration (see {@link PollingOptions}). */ - pollingOptions?: PollingOptions; - /** By default, the file is closed once the upload is finished. - * Set to `false` to keep it open. */ - closeFile?: boolean; - /** - * Dynamic changes to the data schema of the model for this inference. - * Not recommended, for specific use only. - */ - dataSchema?: DataSchema|StringDict|string; -} - -/** - * Options for the V2 Mindee Client. - * - * @category ClientV2 - * @example - * const client = new MindeeClientV2({ - * apiKey: "YOUR_API_KEY", - * throwOnError: true, - * debug: false - * }); - */ -export interface ClientOptions { - /** Your API key for all endpoints. */ - apiKey?: string; - /** Raise an `Error` on errors. */ - throwOnError?: boolean; - /** Log debug messages. */ - debug?: boolean; -} - -/** - * Mindee Client V2 class that centralizes most basic operations. - * - * @category ClientV2 - */ -export class ClientV2 { - /** Mindee API handler. */ - protected mindeeApi: MindeeApiV2; - - /** - * @param {ClientOptions} options options for the initialization of a client. - */ - constructor( - { apiKey, throwOnError, debug }: ClientOptions = { - apiKey: "", - throwOnError: true, - debug: false, - } - ) { - this.mindeeApi = new MindeeApiV2(apiKey); - errorHandler.throwOnError = throwOnError ?? true; - logger.level = - debug ?? process.env.MINDEE_DEBUG - ? LOG_LEVELS["debug"] - : LOG_LEVELS["warn"]; - logger.debug("ClientV2 initialized"); - } - - /** - * Checks the Data Schema. - * @param params Input Inference parameters. - */ - validateDataSchema(params: InferenceParameters): void { - if (params.dataSchema !== undefined && params.dataSchema !== null){ - if (!(params.dataSchema instanceof DataSchema)){ - params.dataSchema = new DataSchema(params.dataSchema); - } - } - } - - /** - * Send the document to an asynchronous endpoint and return its ID in the queue. - * @param inputSource file or URL to parse. - * @param params parameters relating to prediction options. - * @category Asynchronous - * @returns a `Promise` containing the job (queue) corresponding to a document. - */ - async enqueueInference( - inputSource: InputSource, - params: InferenceParameters - ): Promise { - if (inputSource === undefined) { - throw new Error("The 'enqueue' function requires an input document."); - } - this.validateDataSchema(params); - await inputSource.init(); - - return await this.mindeeApi.reqPostInferenceEnqueue(inputSource, params); - } - - /** - * Retrieves an inference. - * - * @param inferenceId id of the queue to poll. - * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. - * @category Asynchronous - * @returns a `Promise` containing a `Job`, which also contains a `Document` if the - * parsing is complete. - */ - async getInference(inferenceId: string): Promise { - return await this.mindeeApi.reqGetInference(inferenceId); - } - - /** - * Get the status of an inference that was previously enqueued. - * Can be used for polling. - * - * @param jobId id of the queue to poll. - * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. - * @category Asynchronous - * @returns a `Promise` containing a `Job`, which also contains a `Document` if the - * parsing is complete. - */ - async getJob(jobId: string): Promise { - return await this.mindeeApi.reqGetJob(jobId); - } - - /** - * Checks the values for asynchronous parsing. Returns their corrected value if they are undefined. - * @param asyncParams parameters related to asynchronous parsing - * @returns A valid `AsyncOptions`. - */ - #setAsyncParams(asyncParams: PollingOptions | undefined = undefined): ValidatedPollingOptions { - const minDelaySec = 1; - const minInitialDelay = 1; - const minRetries = 2; - let newAsyncParams: PollingOptions; - if (asyncParams === undefined) { - newAsyncParams = { - delaySec: 1.5, - initialDelaySec: 2, - maxRetries: 80 - }; - } else { - newAsyncParams = { ...asyncParams }; - if ( - !newAsyncParams.delaySec || - !newAsyncParams.initialDelaySec || - !newAsyncParams.maxRetries - ) { - throw Error("Invalid polling options."); - } - if (newAsyncParams.delaySec < minDelaySec) { - throw Error(`Cannot set auto-parsing delay to less than ${minDelaySec} second(s).`); - } - if (newAsyncParams.initialDelaySec < minInitialDelay) { - throw Error(`Cannot set initial parsing delay to less than ${minInitialDelay} second(s).`); - } - if (newAsyncParams.maxRetries < minRetries) { - throw Error(`Cannot set retry to less than ${minRetries}.`); - } - } - return newAsyncParams as ValidatedPollingOptions; - } - - /** - * Send a document to an endpoint and poll the server until the result is sent or - * until the maximum number of tries is reached. - * - * @param inputSource file or URL to parse. - * @param params parameters relating to prediction options. - * - * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. - * @category Synchronous - * @returns a `Promise` containing parsing results. - */ - async enqueueAndGetInference( - inputSource: InputSource, - params: InferenceParameters - ): Promise { - const validatedAsyncParams = this.#setAsyncParams(params.pollingOptions); - const enqueueResponse: JobResponse = await this.enqueueInference(inputSource, params); - if (enqueueResponse.job.id === undefined || enqueueResponse.job.id.length === 0) { - logger.error(`Failed enqueueing:\n${enqueueResponse.getRawHttp()}`); - throw Error("Enqueueing of the document failed."); - } - const queueId: string = enqueueResponse.job.id; - logger.debug( - `Successfully enqueued document with job id: ${queueId}.` - ); - - await setTimeout(validatedAsyncParams.initialDelaySec * 1000, undefined, validatedAsyncParams.initialTimerOptions); - let retryCounter: number = 1; - let pollResults: JobResponse = await this.getJob(queueId); - while (retryCounter < validatedAsyncParams.maxRetries) { - if (pollResults.job.status === "Failed") { - break; - } - if (pollResults.job.status === "Processed") { - return this.getInference(pollResults.job.id); - } - logger.debug( - `Polling server for parsing result with queueId: ${queueId}. -Attempt no. ${retryCounter} of ${validatedAsyncParams.maxRetries}. -Job status: ${pollResults.job.status}.` - ); - await setTimeout(validatedAsyncParams.delaySec * 1000, undefined, validatedAsyncParams.recurringTimerOptions); - pollResults = await this.getJob(queueId); - retryCounter++; - } - const error: ErrorResponse | undefined = pollResults.job.error; - if (error) { - throw new MindeeHttpErrorV2(error); - } - throw Error( - "Asynchronous parsing request timed out after " + - validatedAsyncParams.delaySec * retryCounter + - " seconds" - ); - } -} diff --git a/src/dependency/index.ts b/src/dependency/index.ts new file mode 100644 index 000000000..0c1c54b57 --- /dev/null +++ b/src/dependency/index.ts @@ -0,0 +1 @@ +export { loadOptionalDependency } from "./optionalLoader.js"; diff --git a/src/dependency/optionalLoader.ts b/src/dependency/optionalLoader.ts new file mode 100644 index 000000000..1d3a831f6 --- /dev/null +++ b/src/dependency/optionalLoader.ts @@ -0,0 +1,23 @@ +/** + * Helper to load optional peer dependencies. + * @param packageName The name of the npm package to load + * @param featureName A human-readable name of the feature requiring this package + */ +export async function loadOptionalDependency(packageName: string, featureName: string): Promise { + try { + return await (new Function("specifier", "return import(specifier)")(packageName)); + } catch (error: any) { + if ( + error.code === "MODULE_NOT_FOUND" || + error.code === "ERR_MODULE_NOT_FOUND" || + error.message?.includes("Cannot find module") || + error.message?.includes("Cannot find package") + ) { + throw new Error( + `The feature '${featureName}' requires the optional dependency '${packageName}'. ` + + "Please install optional dependencies: `npm install --include=optional`" + ); + } + throw error; + } +} diff --git a/src/errors/handler.ts b/src/errors/handler.ts index 1f37deaca..3141adde8 100644 --- a/src/errors/handler.ts +++ b/src/errors/handler.ts @@ -1,4 +1,4 @@ -import { logger } from "../logger"; +import { logger } from "@/logger.js"; /** * Custom Error handling class. diff --git a/src/errors/index.ts b/src/errors/index.ts index 7cc7eeb3a..726e67f78 100644 --- a/src/errors/index.ts +++ b/src/errors/index.ts @@ -1 +1,8 @@ -export { MindeeError, MindeeMimeTypeError } from "./mindeeError"; +export { + MindeeError, + MindeeImageError, + MindeePdfError, + MindeeDeserializationError, + MindeeConfigurationError, + MindeeInputSourceError, +} from "./mindeeError.js"; diff --git a/src/errors/mindeeError.ts b/src/errors/mindeeError.ts index 53c90ff67..b3e65a2d1 100644 --- a/src/errors/mindeeError.ts +++ b/src/errors/mindeeError.ts @@ -1,5 +1,3 @@ -import { ErrorDetails, ErrorResponse, ErrorItem } from "../parsing/v2"; - /** * Main Mindee Error custom class. */ @@ -10,52 +8,37 @@ export class MindeeError extends Error { } } -/** - * Custom Mindee error relating to improper mimetypes in inputs. - */ -export class MindeeMimeTypeError extends MindeeError { +export class MindeeImageError extends MindeeError { constructor(message: string) { super(message); - this.name = "MindeeMimeTypeError"; + this.name = "MindeeImageError"; } } - -export class MindeeImageError extends MindeeError { +export class MindeeDeserializationError extends MindeeError { constructor(message: string) { super(message); - this.name = "MindeeImageError"; + this.name = "MindeeDeserializationError"; } } -export class MindeePdfError extends MindeeError { +export class MindeeConfigurationError extends MindeeError { constructor(message: string) { super(message); - this.name = "MindeePdfError"; + this.name = "MindeeConfigurationError"; } } -export class MindeeApiV2Error extends MindeeError { +export class MindeePdfError extends MindeeError { constructor(message: string) { super(message); - this.name = "MindeeApiV2Error"; + this.name = "MindeePdfError"; } } -export class MindeeHttpErrorV2 extends MindeeError implements ErrorDetails { - public status: number; - public detail: string; - public title: string; - public code: string; - public errors: ErrorItem[]; - - constructor(error: ErrorResponse) { - super(`HTTP ${error.status} - ${error.title} :: ${error.code} - ${error.detail}`); - this.status = error.status; - this.detail = error.detail; - this.title = error.title; - this.code = error.code; - this.errors = error.errors; - this.name = "MindeeHttpErrorV2"; +export class MindeeInputSourceError extends MindeeError { + constructor(message: string) { + super(message); + this.name = "MindeeInputSourceError"; } } diff --git a/src/geometry/boundingBox.ts b/src/geometry/boundingBox.ts index 717716dde..759fe6859 100644 --- a/src/geometry/boundingBox.ts +++ b/src/geometry/boundingBox.ts @@ -1,5 +1,5 @@ -import { Point } from "./point"; -import { Polygon } from "./polygon"; +import { Point } from "./point.js"; +import { Polygon } from "./polygon.js"; /** A simple bounding box defined by 4 coordinates: xMin, yMin, xMax, yMax */ export class BBox { diff --git a/src/geometry/boundingBoxUtils.ts b/src/geometry/boundingBoxUtils.ts index 4e8e89763..81d16522a 100644 --- a/src/geometry/boundingBoxUtils.ts +++ b/src/geometry/boundingBoxUtils.ts @@ -1,5 +1,5 @@ -import { BoundingBox, BBox } from "./boundingBox"; -import { Polygon } from "./polygon"; +import { BoundingBox, BBox } from "./boundingBox.js"; +import { Polygon } from "./polygon.js"; /** * Given a Polygon, calculate a polygon that encompasses all points. diff --git a/src/geometry/index.ts b/src/geometry/index.ts index a9d8af2bc..fb299af84 100644 --- a/src/geometry/index.ts +++ b/src/geometry/index.ts @@ -1,4 +1,4 @@ -export { Polygon } from "./polygon"; +export { Polygon } from "./polygon.js"; export { compareOnX, compareOnY, @@ -9,14 +9,14 @@ export { relativeY, getMinMaxX, getMinMaxY, -} from "./polygonUtils"; -export { BoundingBox, BBox } from "./boundingBox"; +} from "./polygonUtils.js"; +export { BoundingBox, BBox } from "./boundingBox.js"; export { getBbox, getBBoxForPolygons, getBoundingBox, getBoundingBoxFromBBox, mergeBbox, -} from "./boundingBoxUtils"; -export { MinMax } from "./minMax"; -export { Point } from "./point"; +} from "./boundingBoxUtils.js"; +export type { MinMax } from "./minMax.js"; +export type { Point } from "./point.js"; diff --git a/src/geometry/polygon.ts b/src/geometry/polygon.ts index f79c65946..002757f19 100644 --- a/src/geometry/polygon.ts +++ b/src/geometry/polygon.ts @@ -1,6 +1,6 @@ -import { Point } from "./point"; -import { getCentroid, getMinMaxX, getMinMaxY, isPointInX, isPointInY } from "./polygonUtils"; -import { MinMax } from "./minMax"; +import { Point } from "./point.js"; +import { getCentroid, getMinMaxX, getMinMaxY, isPointInX, isPointInY } from "./polygonUtils.js"; +import { MinMax } from "./minMax.js"; /** A polygon, composed of several Points. */ export class Polygon extends Array { diff --git a/src/geometry/polygonUtils.ts b/src/geometry/polygonUtils.ts index e1a3c38fe..062df1517 100644 --- a/src/geometry/polygonUtils.ts +++ b/src/geometry/polygonUtils.ts @@ -1,5 +1,5 @@ -import { MinMax } from "./minMax"; -import { Point } from "./point"; +import { MinMax } from "./minMax.js"; +import { Point } from "./point.js"; /** * Get the central point (centroid) given a list of points. diff --git a/src/http/apiCore.ts b/src/http/apiCore.ts new file mode 100644 index 000000000..98819997e --- /dev/null +++ b/src/http/apiCore.ts @@ -0,0 +1,77 @@ +import { logger } from "@/logger.js"; +import { request, Dispatcher } from "undici"; +import { InputSource, PageOptions, LocalInputSource } from "@/input/index.js"; + +export const TIMEOUT_DEFAULT: number = 120; + +export interface RequestOptions { + hostname: string; + path: string; + method: any; + timeout: number; + headers: any; + body?: any; +} + +export interface BaseHttpResponse { + messageObj: any; + data: { [key: string]: any }; +} + +/** + * Cuts a document's pages according to the given options. + * @param inputDoc input document. + * @param pageOptions page cutting options. + */ +export async function cutDocPages(inputDoc: InputSource, pageOptions: PageOptions) { + if (inputDoc instanceof LocalInputSource && inputDoc.isPdf()) { + await inputDoc.applyPageOptions(pageOptions); + } +} + +/** + * Reads a response from the API and processes it. + * @param dispatcher custom dispatcher to use for the request. + * @param options options related to the request itself. + * @returns the processed request. + */ +export async function sendRequestAndReadResponse( + dispatcher: Dispatcher, + options: RequestOptions, +): Promise { + const url: string = `https://${options.hostname}${options.path}`; + logger.debug(`${options.method}: ${url}`); + + const response = await request( + url, + { + method: options.method, + headers: options.headers, + bodyTimeout: options.timeout, + body: options.body, + throwOnError: false, + dispatcher: dispatcher + } + ); + logger.debug("Parsing the response ..."); + + let responseBody: string = await response.body.text(); + // handle empty responses from server, for example, in the case of redirects + if (!responseBody) { + responseBody = "{}"; + } + try { + const parsedResponse = JSON.parse(responseBody); + logger.debug("JSON parsed successfully, returning plain object."); + return { + messageObj: response, + data: parsedResponse, + }; + } catch { + logger.error("Could not parse the return as JSON."); + return { + messageObj: response, + data: { reconstructedResponse: responseBody }, + }; + } +} diff --git a/src/http/apiSettings.ts b/src/http/apiSettings.ts deleted file mode 100644 index ecd9c506f..000000000 --- a/src/http/apiSettings.ts +++ /dev/null @@ -1,56 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { logger } from "../logger"; -import { BaseSettings, MindeeApiConstructorProps } from "./baseSettings"; - -export const API_KEY_ENVVAR_NAME: string = "MINDEE_API_KEY"; -export const API_HOST_ENVVAR_NAME: string = "MINDEE_API_HOST"; -export const STANDARD_API_OWNER: string = "mindee"; -export const TIMEOUT_DEFAULT: number = 120; -const DEFAULT_MINDEE_API_HOST: string = "api.mindee.net"; - -export class ApiSettings extends BaseSettings { - apiKey: string; - baseHeaders: Record; - - constructor({ - apiKey = "", - }: MindeeApiConstructorProps) { - super(); - if (!apiKey || apiKey.length === 0) { - this.apiKey = this.apiKeyFromEnv(); - } else { - this.apiKey = apiKey; - } - if (!this.apiKey || this.apiKey.length === 0) { - throw new Error( - "Your API key could not be set, check your Client Configuration\n." - + `You can set this using the ${API_KEY_ENVVAR_NAME} environment variable.` - ); - } - this.baseHeaders = { - "User-Agent": this.getUserAgent(), - Authorization: `Token ${this.apiKey}`, - }; - } - - - protected apiKeyFromEnv(): string { - const envVarValue = process.env[API_KEY_ENVVAR_NAME]; - if (envVarValue) { - logger.debug( - `Set API key from environment: ${API_KEY_ENVVAR_NAME}` - ); - return envVarValue; - } - return ""; - } - - protected hostnameFromEnv(): string { - const envVarValue = process.env[API_HOST_ENVVAR_NAME]; - if (envVarValue) { - logger.debug(`Set the API hostname to ${envVarValue}`); - return envVarValue; - } - return DEFAULT_MINDEE_API_HOST; - } -} diff --git a/src/http/baseEndpoint.ts b/src/http/baseEndpoint.ts deleted file mode 100644 index 37b99ed12..000000000 --- a/src/http/baseEndpoint.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { ApiSettings } from "./apiSettings"; -import { logger } from "../logger"; -import { IncomingMessage, ClientRequest } from "http"; -import { request, RequestOptions } from "https"; -import { InputSource, PageOptions } from "../input"; -import { LocalInputSource } from "../input"; - - -export interface EndpointResponse { - messageObj: IncomingMessage; - data: { [key: string]: any }; -} - -/** - * Base endpoint for the Mindee API. - */ -export abstract class BaseEndpoint { - /** Settings relating to the API. */ - settings: ApiSettings; - - /** Entire root of the URL for API calls. */ - urlRoot: string; - - protected constructor( - settings: ApiSettings, - urlRoot: string - ) { - this.settings = settings; - this.urlRoot = urlRoot; - } - - /** - * Cuts a document's pages according to the given options. - * @param inputDoc input document. - * @param pageOptions page cutting options. - */ - public static async cutDocPages(inputDoc: InputSource, pageOptions: PageOptions) { - if (inputDoc instanceof LocalInputSource && inputDoc.isPdf()) { - await inputDoc.applyPageOptions(pageOptions); - } - } - - /** - * Reads a response from the API and processes it. - * @param options options related to the request itself. - * @param resolve the resolved response - * @param reject promise rejection reason. - * @returns the processed request. - */ - public static readResponse( - options: RequestOptions, - resolve: (value: EndpointResponse | PromiseLike) => void, - reject: (reason?: any) => void - ): ClientRequest { - logger.debug( - `${options.method}: https://${options.hostname}${options.path}` - ); - - const req = request(options, function (res: IncomingMessage) { - // when the encoding is set, data chunks will be strings - res.setEncoding("utf-8"); - - let responseBody = ""; - res.on("data", function (chunk: string) { - logger.debug("Receiving data ..."); - responseBody += chunk; - }); - res.on("end", function () { - logger.debug("Parsing the response ..."); - // handle empty responses from server, for example in the case of redirects - if (!responseBody) { - responseBody = "{}"; - } - try { - const parsedResponse = JSON.parse(responseBody); - try { - resolve({ - messageObj: res, - data: parsedResponse, - }); - } catch (error) { - logger.error("Could not construct the return object."); - reject(error); - } - } catch { - logger.error("Could not parse the return as JSON."); - logger.debug(responseBody); - resolve({ - messageObj: res, - data: { reconstructedResponse: responseBody }, - }); - } - }); - }); - req.on("error", (err: any) => { - reject(err); - }); - return req; - } -} diff --git a/src/http/baseSettings.ts b/src/http/baseSettings.ts index 90be69302..ba275b36b 100644 --- a/src/http/baseSettings.ts +++ b/src/http/baseSettings.ts @@ -1,16 +1,26 @@ -import { version as sdkVersion } from "../../package.json"; +import { Dispatcher, getGlobalDispatcher } from "undici"; +import packageJson from "../../package.json" with { type: "json" }; import * as os from "os"; -import { TIMEOUT_DEFAULT } from "./apiSettings"; +import { TIMEOUT_DEFAULT } from "./apiCore.js"; export interface MindeeApiConstructorProps { apiKey?: string; + dispatcher?: Dispatcher; } export abstract class BaseSettings { + apiKey: string; hostname: string; timeout: number; + dispatcher: Dispatcher; - protected constructor() { + protected constructor(apiKey?: string, dispatcher?: Dispatcher) { + if (apiKey === undefined || !apiKey || apiKey.length === 0) { + this.apiKey = this.apiKeyFromEnv(); + } else { + this.apiKey = apiKey; + } + this.dispatcher = dispatcher ?? getGlobalDispatcher(); this.hostname = this.hostnameFromEnv(); this.timeout = process.env.MINDEE_REQUEST_TIMEOUT ? parseInt(process.env.MINDEE_REQUEST_TIMEOUT) : TIMEOUT_DEFAULT; } @@ -26,7 +36,7 @@ export abstract class BaseSettings { else if (platform.includes("bsd")) { platform = "bsd"; } - return `mindee-api-nodejs@v${sdkVersion} nodejs-${ + return `mindee-api-nodejs@v${packageJson.version} nodejs-${ process.version } ${platform}`; } diff --git a/src/http/index.ts b/src/http/index.ts index cb44dd566..d740b7854 100644 --- a/src/http/index.ts +++ b/src/http/index.ts @@ -1,25 +1,7 @@ -export { Endpoint } from "./endpoint"; -export { EndpointResponse } from "./baseEndpoint"; -export { - STANDARD_API_OWNER, - API_KEY_ENVVAR_NAME, - ApiSettings, -} from "./apiSettings"; -export { - MindeeHttpError, - MindeeHttp400Error, - MindeeHttp401Error, - MindeeHttp403Error, - MindeeHttp404Error, - MindeeHttp413Error, - MindeeHttp429Error, - MindeeHttp500Error, - MindeeHttp504Error, - handleError, -} from "./error"; +export type { BaseHttpResponse } from "./apiCore.js"; export { isValidSyncResponse, isValidAsyncResponse, cleanRequestData, -} from "./responseValidation"; -export { PredictParams, WorkflowParams } from "./httpParams"; +} from "../v1/http/responseValidation.js"; +export { BaseSettings } from "./baseSettings.js"; diff --git a/src/http/mindeeApiV2.ts b/src/http/mindeeApiV2.ts deleted file mode 100644 index b918b9f2d..000000000 --- a/src/http/mindeeApiV2.ts +++ /dev/null @@ -1,166 +0,0 @@ -import { ApiSettingsV2 } from "./apiSettingsV2"; -import { InferenceParameters } from "../clientV2"; -import { ErrorResponse, InferenceResponse, JobResponse } from "../parsing/v2"; -import FormData from "form-data"; -import { RequestOptions } from "https"; -import { BaseEndpoint, EndpointResponse } from "./baseEndpoint"; -import { InputSource, LocalInputSource, UrlInput } from "../input"; -import { MindeeApiV2Error, MindeeHttpErrorV2 } from "../errors/mindeeError"; -import { logger } from "../logger"; - -export class MindeeApiV2 { - settings: ApiSettingsV2; - - constructor(apiKey?: string) { - this.settings = new ApiSettingsV2({ apiKey: apiKey }); - } - - /** - * Sends a file to the inference queue. - * @param inputSource Local file loaded as an input. - * @param params {InferenceParameters} parameters relating to the enqueueing options. - * @category V2 - * @throws Error if the server's response contains one. - * @returns a `Promise` containing a job response. - */ - async reqPostInferenceEnqueue(inputSource: InputSource, params: InferenceParameters): Promise { - await inputSource.init(); - if (params.modelId === undefined || params.modelId === null || params.modelId === "") { - throw new Error("Model ID must be provided"); - } - const result: EndpointResponse = await this.#documentEnqueuePost(inputSource, params); - if (result.data.error !== undefined) { - throw new MindeeHttpErrorV2(result.data.error); - } - return this.#processResponse(result, JobResponse); - } - - - /** - * Requests the job of a queued document from the API. - * Throws an error if the server's response contains one. - * @param inferenceId The document's ID in the queue. - * @category Asynchronous - * @returns a `Promise` containing either the parsed result, or information on the queue. - */ - async reqGetInference(inferenceId: string): Promise { - const queueResponse: EndpointResponse = await this.#inferenceResultReqGet(inferenceId, "inferences"); - return this.#processResponse(queueResponse, InferenceResponse); - } - - /** - * Requests the results of a queued document from the API. - * Throws an error if the server's response contains one. - * @param jobId The document's ID in the queue. - * @category Asynchronous - * @returns a `Promise` containing information on the queue. - */ - async reqGetJob(jobId: string): Promise { - const queueResponse: EndpointResponse = await this.#inferenceResultReqGet(jobId, "jobs"); - return this.#processResponse(queueResponse, JobResponse); - } - - #processResponse - (result: EndpointResponse, responseType: new (data: { [key: string]: any; }) => T): T { - if (result.messageObj?.statusCode && (result.messageObj?.statusCode > 399 || result.messageObj?.statusCode < 200)) { - if (result.data?.status !== null) { - throw new MindeeHttpErrorV2(new ErrorResponse(result.data)); - } - throw new MindeeHttpErrorV2( - new ErrorResponse( - { - status: result.messageObj?.statusCode ?? -1, - title: "Unknown Error", - detail: result.data?.detail ?? "The server returned an Unknown error.", - code: `${result.messageObj?.statusCode ?? -1}-000`, - } - ) - ); - } - try { - return new responseType(result.data); - } catch (e) { - logger.error(`Raised '${e}' Couldn't deserialize response object:\n${JSON.stringify(result.data)}`); - throw new MindeeApiV2Error("Couldn't deserialize response object."); - } - } - - /** - * Sends a document to the inference queue. - * - * @param inputSource Local or remote file as an input. - * @param params {InferenceParameters} parameters relating to the enqueueing options. - */ - #documentEnqueuePost( - inputSource: InputSource, - params: InferenceParameters - ): Promise { - const form = new FormData(); - - form.append("model_id", params.modelId); - if (params.rag !== undefined && params.rag !== null) { - form.append("rag", params.rag.toString()); - } - if (params.polygon !== undefined && params.polygon !== null) { - form.append("polygon", params.polygon.toString().toLowerCase()); - } - if (params.confidence !== undefined && params.confidence !== null) { - form.append("confidence", params.confidence.toString().toLowerCase()); - } - if (params.rawText !== undefined && params.rawText !== null) { - form.append("raw_text", params.rawText.toString().toLowerCase()); - } - if (params.textContext !== undefined && params.textContext !== null) { - form.append("text_context", params.textContext); - } - if (params.dataSchema !== undefined && params.dataSchema !== null) { - form.append("data_schema", params.dataSchema.toString()); - } - if (params.webhookIds && params.webhookIds.length > 0) { - form.append("webhook_ids", params.webhookIds.join(",")); - } - if (inputSource instanceof LocalInputSource) { - form.append("file", inputSource.fileObject, { - filename: inputSource.filename, - }); - } else { - form.append("url", (inputSource as UrlInput).url); - } - const path = "/v2/inferences/enqueue"; - const headers = { ...this.settings.baseHeaders, ...form.getHeaders() }; - const options: RequestOptions = { - method: "POST", - headers: headers, - hostname: this.settings.hostname, - path: path, - timeout: this.settings.timeout, - }; - return new Promise((resolve, reject) => { - const req = BaseEndpoint.readResponse(options, resolve, reject); - form.pipe(req); - // potential ECONNRESET if we don't end the request. - req.end(); - }); - } - - /** - * Make a request to GET the status of a document in the queue. - * @param queueId ID of either the job or the inference. - * @param slug "jobs" or "inferences"... - * @category Asynchronous - * @returns a `Promise` containing either the parsed result, or information on the queue. - */ - #inferenceResultReqGet(queueId: string, slug: string): Promise { - return new Promise((resolve, reject) => { - const options = { - method: "GET", - headers: this.settings.baseHeaders, - hostname: this.settings.hostname, - path: `/v2/${slug}/${queueId}`, - }; - const req = BaseEndpoint.readResponse(options, resolve, reject); - // potential ECONNRESET if we don't end the request. - req.end(); - }); - } -} diff --git a/src/http/workflowEndpoint.ts b/src/http/workflowEndpoint.ts deleted file mode 100644 index 44e1a6cba..000000000 --- a/src/http/workflowEndpoint.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { BaseEndpoint, EndpointResponse } from "./baseEndpoint"; -import { ApiSettings } from "./apiSettings"; -import { InputSource, LocalInputSource } from "../input"; -import { URLSearchParams } from "url"; -import FormData from "form-data"; -import { RequestOptions } from "https"; -import { isValidSyncResponse } from "./responseValidation"; -import { handleError } from "./error"; - -import { WorkflowParams } from "./httpParams"; -import { ExecutionPriority } from "../parsing/common"; - -/** - * Endpoint for a workflow. - */ -export class WorkflowEndpoint extends BaseEndpoint { - constructor( - settings: ApiSettings, - workflowId: string - ) { - super(settings, `/v1/workflows/${workflowId}/executions`); - } - - - /** - * Sends a document to a workflow execution. - * Throws an error if the server's response contains one. - * @param {WorkflowParams} params parameters relating to prediction options. - * @category Synchronous - * @returns a `Promise` containing parsing results. - */ - async executeWorkflow(params: WorkflowParams): Promise { - await params.inputDoc.init(); - if (params.pageOptions !== undefined) { - await BaseEndpoint.cutDocPages(params.inputDoc, params.pageOptions); - } - const response = await this.#workflowReqPost(params); - if (!isValidSyncResponse(response)) { - handleError(this.urlRoot, response, response.messageObj?.statusMessage); - } - - return response; - } - - /** - * Make a request to POST a document for workflow. - * - * @param {WorkflowParams} params parameters relating to prediction options. - */ - #workflowReqPost(params: WorkflowParams): Promise { - return this.sendFileForPrediction( - params.inputDoc, - params.alias, - params.priority, - params.fullText, - params.publicUrl, - params.rag - ); - } - - /** - * Send a file to a prediction API. - * @param input - * @param alias - * @param priority - * @param fullText - * @param publicUrl - * @param rag - */ - protected sendFileForPrediction( - input: InputSource, - alias: string | null = null, - priority: ExecutionPriority | null = null, - fullText: boolean = false, - publicUrl: string | null = null, - rag: boolean | null = null, - ): Promise { - return new Promise((resolve, reject) => { - const searchParams = new URLSearchParams(); - - if (fullText) { - searchParams.append("full_text_ocr", "true"); - } - - if (rag) { - searchParams.append("rag", "true"); - } - - const form = new FormData(); - if (input instanceof LocalInputSource && input.fileObject instanceof Buffer) { - form.append("document", input.fileObject, { - filename: input.filename, - }); - } else { - form.append("document", input.fileObject); - } - - if (alias) { - form.append("alias", alias); - } - if (publicUrl) { - form.append("public_url", publicUrl); - } - if (priority) { - form.append("priority", priority.toString()); - } - const headers = { ...this.settings.baseHeaders, ...form.getHeaders() }; - - let path = this.urlRoot; - if (searchParams.toString().length > 0) { - path += `?${searchParams}`; - } - const options: RequestOptions = { - method: "POST", - headers: headers, - hostname: this.settings.hostname, - path: path, - timeout: this.settings.timeout, - }; - const req = BaseEndpoint.readResponse(options, resolve, reject); - form.pipe(req); - // potential ECONNRESET if we don't end the request. - req.end(); - }); - } -} diff --git a/src/imageOperations/common/extractedImage.ts b/src/image/extractedImage.ts similarity index 82% rename from src/imageOperations/common/extractedImage.ts rename to src/image/extractedImage.ts index 175dde35c..f2a267c35 100644 --- a/src/imageOperations/common/extractedImage.ts +++ b/src/image/extractedImage.ts @@ -1,12 +1,12 @@ import { Buffer } from "node:buffer"; -import { MindeeError } from "../../errors"; +import { MindeeError } from "@/errors/index.js"; import { writeFileSync } from "node:fs"; import path from "node:path"; -import { logger } from "../../logger"; -import { BufferInput } from "../../input"; -import { MIMETYPES } from "../../input/sources/localInputSource"; -import { Poppler } from "node-poppler"; +import { logger } from "@/logger.js"; +import { BufferInput, MIMETYPES } from "@/input/index.js"; +import type * as popplerTypes from "node-poppler"; import { writeFile } from "fs/promises"; +import { loadOptionalDependency } from "@/dependency/index.js"; /** * Generic class for image extraction @@ -15,7 +15,6 @@ export class ExtractedImage { public buffer: Buffer; protected internalFileName: string; - protected constructor(buffer: Uint8Array, fileName: string) { this.buffer = Buffer.from(buffer); this.internalFileName = fileName; @@ -35,7 +34,9 @@ export class ExtractedImage { try { let outputBuffer: Buffer = this.buffer; if (fileExt !== ".pdf") { - const poppler = new Poppler(); + const popplerImport = await loadOptionalDependency("node-poppler", "Image Processing"); + const poppler = (popplerImport as any).default || popplerImport; + const popplerInstance = new poppler.Poppler(); const options: Record = { firstPageToConvert: 1, lastPageToConvert: 1, @@ -50,7 +51,7 @@ export class ExtractedImage { options.tiffFile = true; } - const result = await poppler.pdfToCairo(this.buffer, undefined, options); + const result = await popplerInstance.pdfToCairo(this.buffer, undefined, options); outputBuffer = Buffer.from(result, "latin1"); } @@ -65,7 +66,6 @@ export class ExtractedImage { } } - /** * Attempts to saves the document to a file synchronously. * Throws an error if the file extension is not supported or if the file could not be saved to disk for some reason. @@ -92,7 +92,6 @@ export class ExtractedImage { } } - /** * Return the file as a Mindee-compatible BufferInput source. * diff --git a/src/imageOperations/imageCompressor.ts b/src/image/imageCompressor.ts similarity index 62% rename from src/imageOperations/imageCompressor.ts rename to src/image/imageCompressor.ts index df23e82c5..729a542ff 100644 --- a/src/imageOperations/imageCompressor.ts +++ b/src/image/imageCompressor.ts @@ -1,6 +1,10 @@ -import sharp from "sharp"; -import { Sharp, Metadata } from "sharp"; -import { MindeeImageError } from "../errors/mindeeError"; +import { loadOptionalDependency } from "@/dependency/index.js"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as SharpTypes from "sharp"; + +import { MindeeImageError } from "@/errors/index.js"; + /** * Compresses an image with the given parameters. @@ -16,9 +20,11 @@ export async function compressImage( maxWidth:number|null = null, maxHeight:number|null = null, ) { - let sharpImage: Sharp = sharp(imageBuffer); + const sharpLoaded = await loadOptionalDependency("sharp", "Image compression"); + const sharp = (sharpLoaded as any).default || sharpLoaded; + let sharpImage: SharpTypes.Sharp = sharp(imageBuffer); - const metadata: Metadata = await sharpImage.metadata(); + const metadata: SharpTypes.Metadata = await sharpImage.metadata(); if (metadata.width === undefined || metadata.height === undefined){ throw new MindeeImageError("Source image has invalid dimensions."); } diff --git a/src/imageOperations/common/imageExtractor.ts b/src/image/imageExtractor.ts similarity index 74% rename from src/imageOperations/common/imageExtractor.ts rename to src/image/imageExtractor.ts index a0dea5057..b6d9d3a34 100644 --- a/src/imageOperations/common/imageExtractor.ts +++ b/src/image/imageExtractor.ts @@ -1,6 +1,19 @@ -import { PDFDocument, PDFPage, degrees } from "@cantoo/pdf-lib"; -import { getMinMaxX, getMinMaxY, Polygon } from "../../geometry"; -import { adjustForRotation } from "../../geometry/polygonUtils"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as pdfLibTypes from "@cantoo/pdf-lib"; +import { getMinMaxX, getMinMaxY, Polygon } from "@/geometry/index.js"; +import { adjustForRotation } from "@/geometry/polygonUtils.js"; +import { loadOptionalDependency } from "@/dependency/index.js"; + +let pdfLib: typeof pdfLibTypes | null = null; + +async function getPdfLib(): Promise { + if (!pdfLib) { + const pdfLibImport = await loadOptionalDependency("@cantoo/pdf-lib", "Text Embedding"); + pdfLib = (pdfLibImport as any).default || pdfLibImport; + } + return pdfLib!; +} /** * Extracts elements from a page based off of a list of bounding boxes. @@ -9,8 +22,9 @@ import { adjustForRotation } from "../../geometry/polygonUtils"; * @param polygons List of coordinates to pull the elements from. */ export async function extractFromPage( - pdfPage: PDFPage, + pdfPage: pdfLibTypes.PDFPage, polygons: Polygon[]) { + const pdfLib = await getPdfLib(); const { width, height } = pdfPage.getSize(); const extractedElements :Uint8Array[] = []; // Manual upscale. @@ -21,7 +35,7 @@ export async function extractFromPage( for (const origPolygon of polygons) { const polygon = adjustForRotation(origPolygon, orientation); - const tempPdf = await PDFDocument.create(); + const tempPdf = await pdfLib.PDFDocument.create(); const newWidth = width * (getMinMaxX(polygon).max - getMinMaxX(polygon).min); const newHeight = height * (getMinMaxY(polygon).max - getMinMaxY(polygon).min); @@ -65,7 +79,7 @@ export async function extractFromPage( y: finalHeight, width: newWidth * qualityScale, height: newHeight * qualityScale, - rotate: degrees(270), + rotate: pdfLib.degrees(270), }); } else if (orientation === 180) { samplePage.drawPage(cropped, { @@ -73,7 +87,7 @@ export async function extractFromPage( y: finalHeight, width: newWidth * qualityScale, height: newHeight * qualityScale, - rotate: degrees(180), + rotate: pdfLib.degrees(180), }); } else if (orientation === 270) { samplePage.drawPage(cropped, { @@ -81,7 +95,7 @@ export async function extractFromPage( y: 0, width: newWidth * qualityScale, height: newHeight * qualityScale, - rotate: degrees(90), + rotate: pdfLib.degrees(90), }); } diff --git a/src/image/index.ts b/src/image/index.ts new file mode 100644 index 000000000..5ace9a21c --- /dev/null +++ b/src/image/index.ts @@ -0,0 +1,3 @@ +export { compressImage } from "./imageCompressor.js"; +export { ExtractedImage } from "./extractedImage.js"; +export { extractFromPage } from "./imageExtractor.js"; diff --git a/src/imageOperations/common/index.ts b/src/imageOperations/common/index.ts deleted file mode 100644 index ef207f84f..000000000 --- a/src/imageOperations/common/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { ExtractedImage } from "./extractedImage"; -export { extractFromPage } from "./imageExtractor"; diff --git a/src/imageOperations/index.ts b/src/imageOperations/index.ts deleted file mode 100644 index 573ec2ad7..000000000 --- a/src/imageOperations/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { extractReceipts, ExtractedMultiReceiptImage } from "./multiReceiptsExtractor"; -export { extractInvoices, ExtractedInvoiceSplitterImage } from "./invoiceSplitterExtractor"; -export { compressImage } from "./imageCompressor"; diff --git a/src/imageOperations/invoiceSplitterExtractor/index.ts b/src/imageOperations/invoiceSplitterExtractor/index.ts deleted file mode 100644 index ec4cf2140..000000000 --- a/src/imageOperations/invoiceSplitterExtractor/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { extractInvoices } from "./invoiceSplitterExtractor"; -export { ExtractedInvoiceSplitterImage } from "./extractedInvoiceSplitterImage"; diff --git a/src/imageOperations/multiReceiptsExtractor/index.ts b/src/imageOperations/multiReceiptsExtractor/index.ts deleted file mode 100644 index 9800ec307..000000000 --- a/src/imageOperations/multiReceiptsExtractor/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { extractReceipts } from "./multiReceiptsExtractor"; -export { ExtractedMultiReceiptImage } from "./extractedMultiReceiptImage"; diff --git a/src/index.ts b/src/index.ts index e1db59180..f39c7949b 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,21 +1,3 @@ -export * as product from "./product"; -export { Client, PredictOptions, WorkflowOptions } from "./client"; -export { ClientV2, InferenceParameters, PollingOptions } from "./clientV2"; -export { - AsyncPredictResponse, - PredictResponse, - Inference, - Prediction, - Document, - Page, -} from "./parsing/common"; -export { - InferenceFile, - InferenceResponse, - JobResponse, - RawText, - RagMetadata, -} from "./parsing/v2"; export { InputSource, Base64Input, @@ -24,9 +6,21 @@ export { PathInput, StreamInput, UrlInput, - PageOptions, PageOptionsOperation, - LocalResponse -} from "./input"; -export * as internal from "./internal"; -export * as imageOperations from "./imageOperations"; +} from "./input/index.js"; +export type { PageOptions } from "./input/index.js"; +export * as image from "./image/index.js"; + +// V1 +export * as v1 from "./v1/index.js"; + +// V2 +export * as v2 from "./v2/index.js"; +export { + Client, + InferenceFile, + InferenceModel, + JobResponse, + ErrorResponse, +} from "./v2/index.js"; +export type { PollingOptions } from "./v2/index.js"; diff --git a/src/input/sources/base64Input.ts b/src/input/base64Input.ts similarity index 82% rename from src/input/sources/base64Input.ts rename to src/input/base64Input.ts index c4ce6c110..d8c67fc3f 100644 --- a/src/input/sources/base64Input.ts +++ b/src/input/base64Input.ts @@ -1,6 +1,6 @@ -import { LocalInputSource } from "./localInputSource"; -import { INPUT_TYPE_BASE64 } from "./inputSource"; -import { logger } from "../../logger"; +import { LocalInputSource } from "./localInputSource.js"; +import { INPUT_TYPE_BASE64 } from "./inputSource.js"; +import { logger } from "@/logger.js"; interface Base64InputProps { inputString: string; diff --git a/src/input/sources/bufferInput.ts b/src/input/bufferInput.ts similarity index 77% rename from src/input/sources/bufferInput.ts rename to src/input/bufferInput.ts index aed8908ec..f3cd71354 100644 --- a/src/input/sources/bufferInput.ts +++ b/src/input/bufferInput.ts @@ -1,6 +1,6 @@ -import { LocalInputSource } from "./localInputSource"; -import { INPUT_TYPE_BUFFER } from "./inputSource"; -import { logger } from "../../logger"; +import { LocalInputSource } from "./localInputSource.js"; +import { INPUT_TYPE_BUFFER } from "./inputSource.js"; +import { logger } from "@/logger.js"; interface BufferInputProps { buffer: Buffer; diff --git a/src/input/sources/bytesInput.ts b/src/input/bytesInput.ts similarity index 81% rename from src/input/sources/bytesInput.ts rename to src/input/bytesInput.ts index d61eab469..3387b34cd 100644 --- a/src/input/sources/bytesInput.ts +++ b/src/input/bytesInput.ts @@ -1,6 +1,6 @@ -import { INPUT_TYPE_BYTES } from "./inputSource"; -import { LocalInputSource } from "./localInputSource"; -import { logger } from "../../logger"; +import { INPUT_TYPE_BYTES } from "./inputSource.js"; +import { LocalInputSource } from "./localInputSource.js"; +import { logger } from "@/logger.js"; interface BytesInputProps { inputBytes: Uint8Array; diff --git a/src/input/index.ts b/src/input/index.ts index 3b8e1e66b..00baaa60d 100644 --- a/src/input/index.ts +++ b/src/input/index.ts @@ -1,4 +1,18 @@ -export { DataSchema, DataSchemaField, DataSchemaReplace } from "./dataSchema"; -export * from "./sources"; -export { LocalResponse } from "./localResponse"; -export { PageOptions, PageOptionsOperation } from "./pageOptions"; +export { Base64Input } from "./base64Input.js"; +export { BufferInput } from "./bufferInput.js"; +export { BytesInput } from "./bytesInput.js"; +export { + InputSource, + INPUT_TYPE_PATH, + INPUT_TYPE_STREAM, + INPUT_TYPE_BUFFER, + INPUT_TYPE_BASE64, + INPUT_TYPE_BYTES +} from "./inputSource.js"; +export type { InputConstructor } from "./inputSource.js"; +export { LocalInputSource, MIMETYPES } from "./localInputSource.js"; +export { PathInput } from "./pathInput.js"; +export { StreamInput } from "./streamInput.js"; +export { UrlInput } from "./urlInput.js"; +export { PageOptionsOperation } from "./pageOptions.js"; +export type { PageOptions } from "./pageOptions.js"; diff --git a/src/input/sources/inputSource.ts b/src/input/inputSource.ts similarity index 84% rename from src/input/sources/inputSource.ts rename to src/input/inputSource.ts index 5513e38d4..8ad78358e 100644 --- a/src/input/sources/inputSource.ts +++ b/src/input/inputSource.ts @@ -1,3 +1,5 @@ +import { MindeeInputSourceError } from "@/errors/index.js"; + /** * @param {string} inputType - the type of input used in file ("base64", "path", "dummy"). * NB: dummy is only used for tests purposes @@ -17,7 +19,7 @@ export abstract class InputSource { protected initialized: boolean = false; async init() { - throw new Error("not Implemented"); + throw new MindeeInputSourceError("not Implemented"); } public isInitialized() { diff --git a/src/input/sources/localInputSource.ts b/src/input/localInputSource.ts similarity index 83% rename from src/input/sources/localInputSource.ts rename to src/input/localInputSource.ts index 203b6c39e..2a0e0dea9 100644 --- a/src/input/sources/localInputSource.ts +++ b/src/input/localInputSource.ts @@ -1,11 +1,11 @@ -import { errorHandler } from "../../errors/handler"; -import { logger } from "../../logger"; -import { compressImage } from "../../imageOperations"; -import { compressPdf, countPages } from "../../pdf"; import path from "path"; -import * as fileType from "file-type"; -import { PageOptions } from "../pageOptions"; -import { extractPages, hasSourceText } from "../../pdf"; +import { MindeeInputSourceError } from "@/errors/index.js"; +import { errorHandler } from "@/errors/handler.js"; +import { logger } from "@/logger.js"; +import { compressImage } from "@/image/index.js"; +import { compressPdf, countPages, extractPages, hasSourceText } from "@/pdf/index.js"; +import { fileTypeFromBuffer } from "file-type"; +import { PageOptions } from "../input/pageOptions.js"; import { InputSource, InputConstructor, @@ -13,7 +13,7 @@ import { INPUT_TYPE_BASE64, INPUT_TYPE_BYTES, INPUT_TYPE_PATH, INPUT_TYPE_BUFFER -} from "./inputSource"; +} from "./inputSource.js"; export const MIMETYPES = new Map([ [".pdf", "application/pdf"], @@ -25,6 +25,7 @@ export const MIMETYPES = new Map([ [".tiff", "image/tiff"], [".webp", "image/webp"], ]); + const ALLOWED_INPUT_TYPES = [ INPUT_TYPE_STREAM, INPUT_TYPE_BASE64, @@ -49,16 +50,16 @@ export abstract class LocalInputSource extends InputSource { if (!ALLOWED_INPUT_TYPES.includes(inputType)) { const allowed = Array.from(ALLOWED_INPUT_TYPES.keys()).join(", "); errorHandler.throw( - new Error(`Invalid input type, must be one of ${allowed}.`) + new MindeeInputSourceError(`Invalid input type, must be one of ${allowed}.`) ); } this.inputType = inputType; - logger.debug(`Loading file from: ${inputType}`); + logger.debug(`Initialized local input source of type: ${inputType}`); } protected async checkMimetype(): Promise { if (!(this.fileObject instanceof Buffer)) { - throw new Error( + throw new MindeeInputSourceError( `MIME type cannot be verified on input source of type ${this.inputType}.` ); } @@ -67,7 +68,7 @@ export abstract class LocalInputSource extends InputSource { if (fileExt) { mimeType = MIMETYPES.get(fileExt.toLowerCase()) || ""; } else { - const guess = await fileType.fromBuffer(this.fileObject); + const guess = await fileTypeFromBuffer(this.fileObject); if (guess !== undefined) { mimeType = guess.mime; } else { @@ -76,7 +77,9 @@ export abstract class LocalInputSource extends InputSource { } if (!mimeType) { const allowed = Array.from(MIMETYPES.keys()).join(", "); - const err = new Error(`Invalid file type, must be one of ${allowed}.`); + const err = new MindeeInputSourceError( + `Invalid file type, must be one of ${allowed}.` + ); errorHandler.throw(err); } logger.debug(`File is of type: ${mimeType}`); @@ -101,7 +104,7 @@ export abstract class LocalInputSource extends InputSource { */ isPdf(): boolean { if (!this.initialized) { - throw new Error( + throw new MindeeInputSourceError( "The `init()` method must be called before calling `isPdf()`." ); } @@ -119,15 +122,6 @@ export abstract class LocalInputSource extends InputSource { this.fileObject = processedPdf.file; } - /** - * Cut PDF pages. - * @param pageOptions - * @deprecated Deprecated in favor of {@link LocalInputSource.applyPageOptions}. - */ - async cutPdf(pageOptions: PageOptions) { - return this.applyPageOptions(pageOptions); - } - /** * Compresses the file object, either as a PDF or an image. * diff --git a/src/input/pageOptions.ts b/src/input/pageOptions.ts index 172774677..1781c9369 100644 --- a/src/input/pageOptions.ts +++ b/src/input/pageOptions.ts @@ -1,4 +1,3 @@ -/* eslint-disable @typescript-eslint/naming-convention */ /** * Options to pass to the `parse` method for cutting multi-page documents. */ @@ -8,13 +7,13 @@ export interface PageOptions { * * A negative index can be used, indicating an offset from the end of the document. * - * [0, -1] represents the fist and last pages of the document. + * [0, -1] represents the first and last pages of the document. */ pageIndexes: number[]; /** Operation to apply on the document, given the `pageIndexes` specified. */ operation: PageOptionsOperation; /** - * Apply the operation only if document has at least this many pages. + * Apply the operation only if the document has at least this many pages. */ onMinPages: number; } @@ -22,6 +21,7 @@ export interface PageOptions { /** * Operation to apply on the document, given the page indexes specified. */ +/* eslint-disable @typescript-eslint/naming-convention */ export enum PageOptionsOperation { /** Only keep pages matching the provided indexes. */ KeepOnly = "KEEP_ONLY", diff --git a/src/input/sources/pathInput.ts b/src/input/pathInput.ts similarity index 82% rename from src/input/sources/pathInput.ts rename to src/input/pathInput.ts index 7c39244d1..c6f058510 100644 --- a/src/input/sources/pathInput.ts +++ b/src/input/pathInput.ts @@ -1,7 +1,7 @@ -import { INPUT_TYPE_PATH } from "./inputSource"; -import { LocalInputSource } from "./localInputSource"; +import { INPUT_TYPE_PATH } from "./inputSource.js"; +import { LocalInputSource } from "./localInputSource.js"; import path from "path"; -import { logger } from "../../logger"; +import { logger } from "@/logger.js"; import { promises as fs } from "fs"; interface PathInputProps { diff --git a/src/input/sources/index.ts b/src/input/sources/index.ts deleted file mode 100644 index 1f9b59b7a..000000000 --- a/src/input/sources/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -export { Base64Input } from "./base64Input"; -export { BufferInput } from "./bufferInput"; -export { BytesInput } from "./bytesInput"; -export { - InputSource, - INPUT_TYPE_PATH, - INPUT_TYPE_STREAM, - InputConstructor, - INPUT_TYPE_BUFFER, - INPUT_TYPE_BASE64, - INPUT_TYPE_BYTES -} from "./inputSource"; -export { LocalInputSource } from "./localInputSource"; -export { PathInput } from "./pathInput"; -export { StreamInput } from "./streamInput"; -export { UrlInput } from "./urlInput"; diff --git a/src/input/sources/streamInput.ts b/src/input/sources/streamInput.ts deleted file mode 100644 index 71037612c..000000000 --- a/src/input/sources/streamInput.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { Readable } from "stream"; -import { LocalInputSource } from "./localInputSource"; -import { INPUT_TYPE_STREAM } from "./inputSource"; -import { logger } from "../../logger"; -import { MindeeError } from "../../errors"; - -interface StreamInputProps { - inputStream: Readable; - filename: string; -} - -export class StreamInput extends LocalInputSource { - private readonly inputStream: Readable; - fileObject: Buffer = Buffer.alloc(0); - - constructor({ inputStream, filename }: StreamInputProps) { - super({ - inputType: INPUT_TYPE_STREAM, - }); - this.filename = filename; - this.inputStream = inputStream; - } - - async init() { - if (this.initialized) { - return; - } - logger.debug("Loading from stream"); - this.fileObject = await this.stream2buffer(this.inputStream); - this.mimeType = await this.checkMimetype(); - this.initialized = true; - } - - async stream2buffer(stream: Readable): Promise { - return new Promise((resolve, reject) => { - if (stream.closed || stream.destroyed) { - return reject(new MindeeError("Stream is already closed")); - } - - const _buf: Buffer[] = []; - stream.pause(); - stream.on("data", (chunk) => _buf.push(chunk)); - stream.on("end", () => resolve(Buffer.concat(_buf))); - stream.on("error", (err) => reject(new Error(`Error converting stream - ${err}`))); - stream.resume(); - }); - } -} diff --git a/src/input/streamInput.ts b/src/input/streamInput.ts new file mode 100644 index 000000000..5d5fea40c --- /dev/null +++ b/src/input/streamInput.ts @@ -0,0 +1,68 @@ +import { Readable } from "stream"; +import { LocalInputSource } from "./localInputSource.js"; +import { INPUT_TYPE_STREAM } from "./inputSource.js"; +import { logger } from "@/logger.js"; +import { MindeeInputSourceError } from "@/errors/index.js"; + +interface StreamInputProps { + inputStream: Readable; + filename: string; +} + +export class StreamInput extends LocalInputSource { + private readonly inputStream: Readable; + fileObject: Buffer = Buffer.alloc(0); + + constructor({ inputStream, filename }: StreamInputProps) { + super({ + inputType: INPUT_TYPE_STREAM, + }); + this.filename = filename; + this.inputStream = inputStream; + } + + async init() { + if (this.initialized) { + return; + } + logger.debug("Loading from stream"); + this.fileObject = await this.stream2buffer(this.inputStream); + this.mimeType = await this.checkMimetype(); + this.initialized = true; + } + + async stream2buffer(stream: Readable, signal?: AbortSignal): Promise { + return new Promise((resolve, reject) => { + if (stream.closed || stream.destroyed) { + return reject(new MindeeInputSourceError("Stream is already closed")); + } + + if (signal?.aborted) { + return reject(new MindeeInputSourceError("Operation aborted")); + } + const onAbort = () => { + stream.destroy(); + reject(new MindeeInputSourceError("Operation aborted")); + }; + if (signal) { + signal.addEventListener("abort", onAbort, { once: true }); + } + const cleanup = () => { + signal?.removeEventListener("abort", onAbort); + }; + + const _buf: Buffer[] = []; + stream.pause(); + stream.on("data", (chunk) => _buf.push(chunk)); + stream.on("end", () => { + cleanup(); + resolve(Buffer.concat(_buf)); + }); + stream.on("error", (err) => { + cleanup(); + reject(new MindeeInputSourceError(`Error converting stream - ${err}`)); + }); + stream.resume(); + }); + } +} diff --git a/src/input/sources/urlInput.ts b/src/input/urlInput.ts similarity index 74% rename from src/input/sources/urlInput.ts rename to src/input/urlInput.ts index 708fcd16c..b205d7860 100644 --- a/src/input/sources/urlInput.ts +++ b/src/input/urlInput.ts @@ -1,19 +1,24 @@ -import { InputSource } from "./inputSource"; +import { InputSource } from "./inputSource.js"; import { URL } from "url"; import { basename, extname } from "path"; import { randomBytes } from "crypto"; import { writeFile } from "fs/promises"; -import { request as httpsRequest } from "https"; -import { IncomingMessage } from "http"; -import { BytesInput } from "./bytesInput"; -import { logger } from "../../logger"; +import { request, Dispatcher, getGlobalDispatcher } from "undici"; +import { logger } from "@/logger.js"; +import { MindeeInputSourceError } from "@/errors/index.js"; +import { BytesInput } from "./bytesInput.js"; export class UrlInput extends InputSource { public readonly url: string; + public readonly dispatcher; - constructor({ url }: { url: string }) { + constructor( + { url, dispatcher }: { url: string, dispatcher?: Dispatcher } + ) { super(); this.url = url; + this.dispatcher = dispatcher ?? getGlobalDispatcher(); + logger.debug("Initialized URL input source."); } async init() { @@ -22,7 +27,7 @@ export class UrlInput extends InputSource { } logger.debug(`source URL: ${this.url}`); if (!this.url.toLowerCase().startsWith("https")) { - throw new Error("URL must be HTTPS"); + throw new MindeeInputSourceError("URL must be HTTPS"); } this.fileObject = this.url; this.initialized = true; @@ -102,7 +107,6 @@ export class UrlInput extends InputSource { UrlInput.getFileExtension(filename || "") || undefined ); } - return filename; } @@ -114,38 +118,36 @@ export class UrlInput extends InputSource { maxRedirects: number ): Promise<{ content: Buffer; finalUrl: string }> { const parsedUrl = new URL(url); - const options = { - hostname: parsedUrl.hostname, - path: parsedUrl.pathname + parsedUrl.search, - method: "GET", - headers: headers, - auth: auth, - }; - - const response = await new Promise((resolve, reject) => { - const req = httpsRequest(options, resolve); - req.on("error", reject); - req.end(); - }); + + const response = await request( + parsedUrl, + { + method: "GET", + headers: headers, + throwOnError: false, + dispatcher: this.dispatcher, + } + ); if (response.statusCode && response.statusCode >= 300 && response.statusCode < 400) { + logger.debug(`Redirecting to: ${response.headers.location}`); if (redirects === maxRedirects) { - throw new Error(`Can't reach URL after ${redirects} out of ${maxRedirects} redirects, aborting operation.`); + throw new MindeeInputSourceError( + `Can't reach URL after ${redirects} out of ${maxRedirects} redirects, aborting operation.` + ); } if (response.headers.location) { - return await this.makeRequest(response.headers.location, auth, headers, redirects + 1, maxRedirects); + return await this.makeRequest( + response.headers.location.toString(), auth, headers, redirects + 1, maxRedirects + ); } - throw new Error("Redirect location not found"); + throw new MindeeInputSourceError("Redirect location not found"); } if (!response.statusCode || response.statusCode >= 400 || response.statusCode < 200) { throw new Error(`Couldn't retrieve file from server, error code ${response.statusCode}.`); } - - const chunks: Buffer[] = []; - for await (const chunk of response) { - chunks.push(chunk); - } - return { content: Buffer.concat(chunks), finalUrl: url }; + const arrayBuffer = await response.body.arrayBuffer(); + return { content: Buffer.from(arrayBuffer), finalUrl: url }; } } diff --git a/src/internal.ts b/src/internal.ts deleted file mode 100644 index bb7ffc12f..000000000 --- a/src/internal.ts +++ /dev/null @@ -1,7 +0,0 @@ -export * as input from "./input"; -export * as geometry from "./geometry"; -export * as http from "./http"; -export * as imageOperations from "./imageOperations/internal"; -export * as parsing from "./parsing"; -export * as pdf from "./pdf"; -export * as product from "./product/internal"; diff --git a/src/parsing/common/extras/index.ts b/src/parsing/common/extras/index.ts deleted file mode 100644 index c2485ff04..000000000 --- a/src/parsing/common/extras/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { CropperExtra } from "./cropperExtra"; -export { Extras } from "./extras"; -export { FullTextOcrExtra } from "./fullTextOcrExtra"; diff --git a/src/parsing/common/index.ts b/src/parsing/common/index.ts deleted file mode 100644 index 914cf4ccd..000000000 --- a/src/parsing/common/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -export { Document } from "./document"; -export { Execution } from "./execution"; -export { ExecutionFile } from "./executionFile"; -export { ExecutionPriority } from "./executionPriority"; -export { Inference } from "./inference"; -export { FeedbackResponse } from "./feedback/feedbackResponse"; -export { OrientationField } from "./orientation"; -export { StringDict } from "./stringDict"; -export { AsyncPredictResponse } from "./asyncPredictResponse"; -export { PredictResponse } from "./predictResponse"; -export { Prediction } from "./prediction"; -export { Page } from "./page"; -export { cleanOutString, lineSeparator } from "./summaryHelper"; -export * as extras from "./extras"; -export { floatToString, cleanSpecialChars } from "./summaryHelper"; -export { parseDate } from "./dateParser"; diff --git a/src/parsing/custom/classificationField.ts b/src/parsing/custom/classificationField.ts deleted file mode 100644 index a4452b5f2..000000000 --- a/src/parsing/custom/classificationField.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { StringDict } from "../common"; - -export class ClassificationField { - /** The value for the classification. */ - value: string; - /** - * The confidence score of the prediction. - * Note: Score is calculated on **word selection**, not its textual content (OCR). - */ - confidence: number; - pageId?: number; - constructor({ - prediction, - pageId, - }: { - prediction: StringDict; - pageId?: number; - }) { - this.value = prediction["value"]; - this.confidence = prediction["confidence"]; - this.pageId ??= pageId; - } - - /** - * Default string representation. - */ - toString(): string { - return `${this.value}`; - } -} diff --git a/src/parsing/custom/index.ts b/src/parsing/custom/index.ts deleted file mode 100644 index 21f655374..000000000 --- a/src/parsing/custom/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { CustomLine, CustomLines, getLineItems } from "./lineItems"; -export { ClassificationField } from "./classificationField"; -export { ListField, ListFieldValue } from "./listField"; diff --git a/src/parsing/custom/lineItems.ts b/src/parsing/custom/lineItems.ts deleted file mode 100644 index 89a9121b4..000000000 --- a/src/parsing/custom/lineItems.ts +++ /dev/null @@ -1,198 +0,0 @@ -import { errorHandler } from "../../errors/handler"; -import { MindeeError } from "../../errors"; -import { - BBox, - Polygon, - getBbox, - getBBoxForPolygons, - getMinMaxY, - mergeBbox, - getBoundingBoxFromBBox, - MinMax, -} from "../../geometry"; -import { ListField, ListFieldValue } from "./listField"; - -export class CustomLine { - /** - * Number of the current line. - * Starts at 1. - */ - rowNumber: number; - /** - * List of the fields associated to the current line, identified by their column name. - */ - fields: Map; - /** - * The BBox of the entire line, all fields included. - */ - bbox: BBox; - - constructor(rowNumber: number) { - this.rowNumber = rowNumber; - this.bbox = new BBox(1, 1, 0, 0); - this.fields = new Map(); - } - - /** - * Extends the current bbox of the line with the bbox. - */ - extendWithBbox(bbox: BBox): void { - this.bbox = mergeBbox(this.bbox, bbox); - } - - /** - * Extends the current bbox of the line with the polygon. - */ - extendWith(polygon: Polygon): void { - this.bbox = mergeBbox(this.bbox, getBbox(polygon)); - } - - updateField(name: string, fieldValue: ListFieldValue): void { - if (!this.fields.has(name)) { - this.fields.set(name, fieldValue); - } else { - const existingField = this.fields.get(name); - - if (existingField === undefined) { - errorHandler.throw( - new MindeeError(`The field '${name}' should exist but was not found.`) - ); - return; - } - - const mergedContent = - existingField?.content === undefined - ? fieldValue.content - : existingField.content + " " + fieldValue.content; - - const mergedBbox = getBBoxForPolygons([ - existingField.polygon, - fieldValue.polygon, - ]); - - this.fields.set( - name, - new ListFieldValue({ - content: mergedContent, - confidence: existingField.confidence * fieldValue.confidence, - polygon: getBoundingBoxFromBBox(mergedBbox), - }) - ); - } - } -} - -export class CustomLines extends Array {} - -/** - * Get line items from fields. - */ -export function getLineItems( - anchorNames: string[], - fieldNames: string[], - fields: Map, - heightLineTolerance: number, -): CustomLines { - const fieldsToTransformIntoLines: Map = new Map( - [...fields].filter(([k]) => fieldNames.includes(k)) - ); - - const anchorName: string = findBestAnchor(anchorNames, fieldsToTransformIntoLines); - const linesPrepared: CustomLine[] = prepare( - anchorName, - fieldsToTransformIntoLines, - heightLineTolerance - ); - - linesPrepared.forEach((currentLine) => { - fieldsToTransformIntoLines.forEach((field, fieldName) => { - field.values.forEach((listFieldValue) => { - const minMaxY: MinMax = getMinMaxY(listFieldValue.polygon); - if ( - Math.abs(minMaxY.max - currentLine.bbox.yMax) <= heightLineTolerance && - Math.abs(minMaxY.min - currentLine.bbox.yMin) <= heightLineTolerance - ) { - currentLine.updateField(fieldName, listFieldValue); - } - }); - }); - }); - return linesPrepared; -} - -/** - * Loop through the possible anchor fields and find the one with the most values. - */ -function findBestAnchor( - possibleAnchorNames: string[], - fields: Map -): string { - let anchorName = ""; - let anchorRows = 0; - - possibleAnchorNames.forEach((fieldName) => { - const fieldValues: ListFieldValue[]|undefined = fields.get(fieldName)?.values; - if (fieldValues !== undefined && fieldValues.length > anchorRows) { - anchorRows = fieldValues.length; - anchorName = fieldName; - } - }); - - if (anchorName === "") { - errorHandler.throw(new MindeeError("No anchor was found.")); - } - - return anchorName; -} - -/** - * Check if the bbox fits inside the line. - */ -function isBboxInLine(line: CustomLine, bbox: BBox, heightTolerance: number): boolean { - if (Math.abs(bbox.yMin - line.bbox.yMin) <= heightTolerance) { - return true; - } - return Math.abs(line.bbox.yMin - bbox.yMin) <= heightTolerance; -} - -function prepare( - anchorName: string, - fields: Map, - heightLineTolerance: number -): CustomLine[] { - const linesPrepared: CustomLine[] = []; - - const anchorField = fields.get(anchorName); - if (anchorField === undefined || anchorField.values.length === 0) { - errorHandler.throw(new MindeeError("No lines have been detected.")); - } - - let currentLineNumber: number = 1; - let currentLine: CustomLine = new CustomLine(currentLineNumber); - - if (anchorField !== undefined) { - let currentValue: ListFieldValue = anchorField.values[0]; - currentLine.extendWith(currentValue.polygon); - - for (let index = 1; index < anchorField.values.length; index++) { - currentValue = anchorField.values[index]; - const currentFieldBbox = getBbox(currentValue.polygon); - - if (!isBboxInLine(currentLine, currentFieldBbox, heightLineTolerance)) { - linesPrepared.push(currentLine); - currentLineNumber++; - currentLine = new CustomLine(currentLineNumber); - } - currentLine.extendWithBbox(currentFieldBbox); - } - - if ( - linesPrepared.filter((line) => line.rowNumber === currentLineNumber) - .length === 0 - ) { - linesPrepared.push(currentLine); - } - } - - return linesPrepared; -} diff --git a/src/parsing/custom/listField.ts b/src/parsing/custom/listField.ts deleted file mode 100644 index f961fa21a..000000000 --- a/src/parsing/custom/listField.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { BaseFieldConstructor } from "../standard"; -import { Polygon, getBoundingBox, BoundingBox } from "../../geometry"; -import { StringDict } from "../common"; - -export class ListFieldValue { - /** Extracted content of the prediction */ - content: string; - /** - * The confidence score of the prediction. - * Note: Score is calculated on **word selection**, not its textual content (OCR). - */ - confidence: number; - /** - * Contains exactly 4 relative vertices coordinates (points) of a right - * rectangle containing the word in the document. - */ - bbox?: BoundingBox; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the word in the document. - */ - polygon: Polygon = new Polygon(); - /** The document page on which the information was found. */ - pageId?: number; - - constructor(prediction: StringDict, pageId?: number) { - this.content = prediction["content"]; - this.confidence = prediction["confidence"]; - if (prediction["polygon"]) { - this.polygon = prediction["polygon"]; - this.bbox = getBoundingBox(prediction["polygon"]); - } - if (pageId !== undefined) { - this.pageId = pageId; - } - } - - /** - * Default string representation. - */ - toString(): string { - return `${this.content}`; - } -} - -export class ListField { - readonly values: ListFieldValue[]; - confidence: number; - /** True if the field was reconstructed or computed using other fields. */ - reconstructed: boolean; - - /** - * @param {BaseFieldConstructor} constructor Constructor parameters. - */ - constructor({ - prediction = {}, - reconstructed = false, - pageId, - }: BaseFieldConstructor) { - this.values = []; - this.confidence = prediction["confidence"]; - this.reconstructed = reconstructed; - - if (prediction["values"] !== undefined) { - prediction["values"].forEach((field: StringDict) => { - if (pageId === undefined) { - pageId = field["page_id"]; - } - this.values.push(new ListFieldValue(field, pageId)); - }); - } - } - - contentsList(): Array { - return this.values.map((item) => item.content); - } - - contentsString(separator: string = " "): string { - return this.values.map((item) => `${item.content}`).join(separator); - } - - /** - * Default string representation. - */ - toString(): string { - return this.contentsString(); - } -} diff --git a/src/parsing/common/dateParser.ts b/src/parsing/dateParser.ts similarity index 100% rename from src/parsing/common/dateParser.ts rename to src/parsing/dateParser.ts diff --git a/src/parsing/generated/index.ts b/src/parsing/generated/index.ts deleted file mode 100644 index 258cd3028..000000000 --- a/src/parsing/generated/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { GeneratedObjectField, isGeneratedObject } from "./generatedObject"; -export { GeneratedListField } from "./generatedList"; diff --git a/src/parsing/index.ts b/src/parsing/index.ts index 6960d97be..a7add5bcf 100644 --- a/src/parsing/index.ts +++ b/src/parsing/index.ts @@ -1,5 +1,2 @@ -export * as common from "./common"; -export * as custom from "./custom"; -export * as standard from "./standard"; -export * as generated from "./generated"; -export * as v2 from "./v2"; +export type { StringDict } from "./stringDict.js"; +export { parseDate } from "./dateParser.js"; diff --git a/src/input/localResponse.ts b/src/parsing/localResponseBase.ts similarity index 72% rename from src/input/localResponse.ts rename to src/parsing/localResponseBase.ts index c13ca8038..d1f6cfa10 100644 --- a/src/input/localResponse.ts +++ b/src/parsing/localResponseBase.ts @@ -1,15 +1,14 @@ import * as crypto from "crypto"; import * as fs from "node:fs/promises"; -import { StringDict } from "../parsing/common"; -import { MindeeError } from "../errors"; +import { StringDict } from "@/parsing/stringDict.js"; +import { MindeeError } from "../errors/index.js"; import { Buffer } from "buffer"; -import { CommonResponse } from "../parsing/v2"; /** * Local response loaded from a file. * Note: Has to be initialized through init() before use. */ -export class LocalResponse { +export abstract class LocalResponseBase { private file: Buffer; private readonly inputHandle: Buffer | string; protected initialized = false; @@ -26,6 +25,9 @@ export class LocalResponse { /** * @param inputFile - The input file, which can be a Buffer, string, or PathLike. */ + if (this.initialized) { + return; + } if (Buffer.isBuffer(this.inputHandle)) { this.file = this.inputHandle; } else if (typeof this.inputHandle === "string") { @@ -60,7 +62,7 @@ export class LocalResponse { } /** - * Returns the HMAC signature of the local response, from the secret key provided. + * Returns the HMAC signature of the local response from the secret key provided. * @param secretKey - Secret key, either a string or a byte/byte array. * @returns The HMAC signature of the local response. */ @@ -94,24 +96,4 @@ export class LocalResponse { } return signature === this.getHmacSignature(secretKey); } - - /** - * Deserialize the loaded local response into the requested CommonResponse-derived class. - * - * Typically used when dealing with V2 webhook callbacks. - * - * @typeParam ResponseT - A class that extends `CommonResponse`. - * @param responseClass - The constructor of the class into which the payload should be deserialized. - * @returns An instance of `responseClass` populated with the file content. - * @throws MindeeError If the provided class cannot be instantiated. - */ - public async deserializeResponse( - responseClass: new (serverResponse: StringDict) => ResponseT - ): Promise { - try { - return new responseClass(await this.asDict()); - } catch { - throw new MindeeError("Invalid response provided."); - } - } } diff --git a/src/parsing/standard/index.ts b/src/parsing/standard/index.ts deleted file mode 100644 index a04b929d1..000000000 --- a/src/parsing/standard/index.ts +++ /dev/null @@ -1,14 +0,0 @@ -export { AddressField } from "./addressField"; -export { AmountField } from "./amount"; -export { BaseField, BaseFieldConstructor } from "./base"; -export { BooleanField } from "./boolean"; -export { ClassificationField } from "./classification"; -export { CompanyRegistrationField } from "./companyRegistration"; -export { DateField } from "./date"; -export { LocaleField } from "./locale"; -export { Field } from "./field"; -export { Taxes, TaxField } from "./tax"; -export { StringField } from "./text"; -export { PaymentDetailsField } from "./paymentDetails"; -export { PositionField } from "./position"; -export { Word } from "./word"; diff --git a/src/parsing/common/stringDict.ts b/src/parsing/stringDict.ts similarity index 100% rename from src/parsing/common/stringDict.ts rename to src/parsing/stringDict.ts diff --git a/src/parsing/v2/field/index.ts b/src/parsing/v2/field/index.ts deleted file mode 100644 index bfbd648fa..000000000 --- a/src/parsing/v2/field/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -export { InferenceFields } from "./inferenceFields"; -export { FieldConfidence } from "./fieldConfidence"; -export { FieldLocation } from "./fieldLocation"; -export { ListField } from "./listField"; -export { ObjectField } from "./objectField"; -export { SimpleField } from "./simpleField"; diff --git a/src/parsing/v2/index.ts b/src/parsing/v2/index.ts deleted file mode 100644 index b459c00ce..000000000 --- a/src/parsing/v2/index.ts +++ /dev/null @@ -1,14 +0,0 @@ -export { CommonResponse } from "./commonResponse"; -export { ErrorResponse, ErrorDetails } from "./errorResponse"; -export { ErrorItem } from "./errorItem"; -export { Inference } from "./inference"; -export { InferenceActiveOptions } from "./inferenceActiveOptions"; -export { InferenceFile } from "./inferenceFile"; -export { InferenceModel } from "./inferenceModel"; -export { InferenceResponse } from "./inferenceResponse"; -export { InferenceResult } from "./inferenceResult"; -export { Job } from "./job"; -export { JobResponse } from "./jobResponse"; -export { RawText } from "./rawText"; -export { JobWebhook } from "./jobWebhook"; -export { RagMetadata } from "./ragMetadata"; diff --git a/src/parsing/v2/inference.ts b/src/parsing/v2/inference.ts deleted file mode 100644 index 34006575d..000000000 --- a/src/parsing/v2/inference.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { StringDict } from "../common"; -import { InferenceModel } from "./inferenceModel"; -import { InferenceResult } from "./inferenceResult"; -import { InferenceFile } from "./inferenceFile"; -import { InferenceActiveOptions } from "./inferenceActiveOptions"; - -export class Inference { - /** - * Model info for the inference. - */ - public model: InferenceModel; - /** - * File info for the inference. - */ - public file: InferenceFile; - /** - * Result of the inference. - */ - public result: InferenceResult; - /** - * ID of the inference. - */ - public id?: string; - /** - * Active options for the inference. - */ - public activeOptions: InferenceActiveOptions; - - constructor(serverResponse: StringDict) { - this.model = new InferenceModel(serverResponse["model"]); - this.file = new InferenceFile(serverResponse["file"]); - this.result = new InferenceResult(serverResponse["result"]); - this.activeOptions = new InferenceActiveOptions(serverResponse["active_options"]); - } - - toString(): string { - return ( - "Inference\n" + - "#########\n" + - this.model.toString() + "\n" + - this.file.toString() + "\n" + - this.activeOptions.toString() + "\n" + - this.result + "\n" - ); - } -} diff --git a/src/parsing/v2/inferenceResponse.ts b/src/parsing/v2/inferenceResponse.ts deleted file mode 100644 index 3e6a55e8b..000000000 --- a/src/parsing/v2/inferenceResponse.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { CommonResponse } from "./commonResponse"; -import { Inference } from "./inference"; -import { StringDict } from "../common"; - -export class InferenceResponse extends CommonResponse { - /** - * Inference result. - */ - public inference: Inference; - - constructor(serverResponse: StringDict) { - super(serverResponse); - this.inference = new Inference(serverResponse["inference"]); - } -} diff --git a/src/parsing/v2/jobResponse.ts b/src/parsing/v2/jobResponse.ts deleted file mode 100644 index f8405ae19..000000000 --- a/src/parsing/v2/jobResponse.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { CommonResponse } from "./commonResponse"; -import { StringDict } from "../common"; -import { Job } from "./job"; - -export class JobResponse extends CommonResponse { - /** - * Job for the polling. - */ - public job: Job; - - constructor(serverResponse: StringDict) { - super(serverResponse); - this.job = new Job(serverResponse["job"]); - } -} diff --git a/src/pdf/index.ts b/src/pdf/index.ts index 9c57e895c..752c5ffd0 100644 --- a/src/pdf/index.ts +++ b/src/pdf/index.ts @@ -1,3 +1,4 @@ -export { extractPages, countPages, SplitPdf } from "./pdfOperation"; -export { compressPdf } from "./pdfCompressor"; -export { hasSourceText } from "./pdfUtils"; +export { extractPages, countPages } from "./pdfOperation.js"; +export type { SplitPdf } from "./pdfOperation.js"; +export { compressPdf } from "./pdfCompressor.js"; +export { hasSourceText } from "./pdfUtils.js"; diff --git a/src/pdf/pdfCompressor.ts b/src/pdf/pdfCompressor.ts index d9dae77a8..645cd6295 100644 --- a/src/pdf/pdfCompressor.ts +++ b/src/pdf/pdfCompressor.ts @@ -1,10 +1,23 @@ -import { logger } from "../logger"; +import { logger } from "@/logger.js"; import tmp from "tmp"; -import { ExtractedPdfInfo, extractTextFromPdf, hasSourceText } from "./pdfUtils"; +import { ExtractedPdfInfo, extractTextFromPdf, hasSourceText } from "./pdfUtils.js"; import * as fs from "node:fs"; -import { Poppler } from "node-poppler"; -import { PDFDocument, PDFFont, PDFPage, rgb, StandardFonts } from "@cantoo/pdf-lib"; -import { compressImage } from "../imageOperations"; +import type * as popplerTypes from "node-poppler"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as pdfLibTypes from "@cantoo/pdf-lib"; +import { compressImage } from "@/image/index.js"; +import { loadOptionalDependency } from "@/dependency/index.js"; + +let pdfLib: typeof pdfLibTypes | null = null; + +async function getPdfLib(): Promise { + if (!pdfLib) { + const pdfLibImport = await loadOptionalDependency("@cantoo/pdf-lib", "Text Embedding"); + pdfLib = (pdfLibImport as any).default || pdfLibImport; + } + return pdfLib!; +} /** * Compresses each page of a provided PDF buffer. @@ -128,7 +141,8 @@ async function compressPagesWithQuality( disableSourceText: boolean, extractedText: ExtractedPdfInfo | null ): Promise { - const pdfDoc = await PDFDocument.load(pdfData, { + const pdfLib = await getPdfLib(); + const pdfDoc = await pdfLib.PDFDocument.load(pdfData, { ignoreEncryption: true, password: "" }); @@ -180,7 +194,8 @@ function isCompressionSuccessful(totalCompressedSize: number, originalSize: numb * @returns A Promise resolving to the new PDF as a Buffer. */ async function createNewPdfFromCompressedPages(compressedPages: Buffer[]): Promise { - const newPdfDoc = await PDFDocument.create(); + const pdfLib = await getPdfLib(); + const newPdfDoc = await pdfLib.PDFDocument.create(); for (const compressedPage of compressedPages) { const image = await newPdfDoc.embedJpg(compressedPage); @@ -198,32 +213,36 @@ async function createNewPdfFromCompressedPages(compressedPages: Buffer[]): Promi } async function addTextToPdfPage( - page: PDFPage, + page: pdfLibTypes.PDFPage, textInfo: ExtractedPdfInfo | null ): Promise { if (textInfo === null) { return; } + const pdfLib = await getPdfLib(); for (const textPages of textInfo.pages) { for (const textPage of textPages.content) { page.drawText(textPage.str, { x: textPage.x, y: textPage.y, size: textPage.height, - color: rgb(0, 0, 0), + color: pdfLib.rgb(0, 0, 0), font: await getFontFromName(textPage.fontName) }); } } } -async function getFontFromName(fontName: string): Promise { - const pdfDoc = await PDFDocument.create(); - let font: PDFFont; - if (Object.values(StandardFonts).map(value => value.toString()).includes(fontName)) { +async function getFontFromName(fontName: string): Promise { + const pdfLib = await getPdfLib(); + const pdfDoc = await pdfLib.PDFDocument.create(); + let font: pdfLibTypes.PDFFont; + const standardFontValues = Object.values(pdfLib.StandardFonts) as string[]; + + if (standardFontValues.includes(fontName)) { font = await pdfDoc.embedFont(fontName); } else { - font = await pdfDoc.embedFont(StandardFonts.Helvetica); + font = await pdfDoc.embedFont(pdfLib.StandardFonts.Helvetica); } return font; @@ -237,7 +256,10 @@ async function getFontFromName(fontName: string): Promise { * @param quality Quality to apply during rasterization. */ async function rasterizePage(pdfData: Buffer, index: number, quality = 85): Promise { - const poppler = new Poppler(); + + const popplerImport = await loadOptionalDependency("node-poppler", "Image Processing"); + const poppler = (popplerImport as any).default || popplerImport; + const popplerInstance = new poppler.Poppler(); const tmpPdf = tmp.fileSync(); const tempPdfPath = tmpPdf.name; const antialiasOption: "fast" | "best" | "default" | "good" | "gray" | "none" | "subpixel" = "best"; @@ -252,7 +274,7 @@ async function rasterizePage(pdfData: Buffer, index: number, quality = 85): Prom singleFile: true }; - const jpegBuffer = await poppler.pdfToCairo(tempPdfPath, undefined, options); + const jpegBuffer = await popplerInstance.pdfToCairo(tempPdfPath, undefined, options); await fs.promises.unlink(tempPdfPath); diff --git a/src/pdf/pdfOperation.ts b/src/pdf/pdfOperation.ts index 50999812e..ebc22bb42 100644 --- a/src/pdf/pdfOperation.ts +++ b/src/pdf/pdfOperation.ts @@ -1,8 +1,21 @@ -import { errorHandler } from "../errors/handler"; -import { PDFDocument } from "@cantoo/pdf-lib"; -import { PageOptions, PageOptionsOperation } from "../input"; -import { MindeeError } from "../errors"; -import { logger } from "../logger"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as pdfLibTypes from "@cantoo/pdf-lib"; +import { errorHandler } from "@/errors/handler.js"; +import { PageOptions, PageOptionsOperation } from "@/input/pageOptions.js"; +import { MindeeError } from "@/errors/index.js"; +import { logger } from "@/logger.js"; +import { loadOptionalDependency } from "@/dependency/index.js"; + +let pdfLib: typeof pdfLibTypes | null = null; + +async function getPdfLib(): Promise { + if (!pdfLib) { + const pdfLibImport = await loadOptionalDependency("@cantoo/pdf-lib", "Text Embedding"); + pdfLib = (pdfLibImport as any).default || pdfLibImport; + } + return pdfLib!; +} export interface SplitPdf { file: Buffer; @@ -10,21 +23,22 @@ export interface SplitPdf { } /** - * Cut pages from a pdf file. If pages index are out of bound, it will throw an error. + * Cut pages from a PDF file. If pages indexes are out of bounds, it will throw an error. * @param file * @param pageOptions - * @returns the new cut pdf file. + * @returns the new cut PDF file. */ export async function extractPages( file: Buffer, pageOptions: PageOptions ): Promise { - const currentPdf = await PDFDocument.load(file, { + const pdfLib = await getPdfLib(); + const currentPdf = await pdfLib.PDFDocument.load(file, { ignoreEncryption: true, password: "" }); - const newPdf = await PDFDocument.create(); + const newPdf = await pdfLib.PDFDocument.create(); const pageCount = currentPdf.getPageCount(); @@ -65,15 +79,15 @@ export async function extractPages( if (pageOptions.operation === PageOptionsOperation.KeepOnly) { const keptPages = await newPdf.copyPages(currentPdf, pageIndexes); - keptPages.forEach((keptPage) => { + keptPages.forEach((keptPage: pdfLibTypes.PDFPage) => { newPdf.addPage(keptPage); }); } else if (pageOptions.operation === PageOptionsOperation.Remove) { const pagesToKeep = currentPdf .getPageIndices() - .filter((v) => !pageIndexes.includes(v)); + .filter((v:number) => !pageIndexes.includes(v)); const keptPages = await newPdf.copyPages(currentPdf, pagesToKeep); - keptPages.forEach((keptPage) => { + keptPages.forEach((keptPage: pdfLibTypes.PDFPage) => { newPdf.addPage(keptPage); }); } else { @@ -85,12 +99,13 @@ export async function extractPages( } /** - * Count the number of pages in a pdf file. + * Count the number of pages in a PDF file. * @param file * @returns the number of pages in the file. */ export async function countPages(file: Buffer): Promise { - const currentPdf = await PDFDocument.load(file, { + const pdfLib = await getPdfLib(); + const currentPdf = await pdfLib.PDFDocument.load(file, { ignoreEncryption: true, password: "" }); diff --git a/src/pdf/pdfUtils.ts b/src/pdf/pdfUtils.ts index 3618d9f9a..ea32f595c 100644 --- a/src/pdf/pdfUtils.ts +++ b/src/pdf/pdfUtils.ts @@ -1,5 +1,8 @@ -import { PDFExtract, PDFExtractOptions, PDFExtractResult } from "pdf.js-extract"; -import { MindeePdfError } from "../errors/mindeeError"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as pdfJsExtractTypes from "pdf.js-extract"; +import { MindeePdfError } from "@/errors/index.js"; +import { loadOptionalDependency } from "@/dependency/index.js"; export interface PageTextInfo { @@ -34,21 +37,28 @@ function getConcatenatedText(pages: PageTextInfo[]): string { * @param pdfBuffer PDF handle, as a buffer. */ export async function extractTextFromPdf(pdfBuffer: Buffer): Promise { - const pdfExtract = new PDFExtract(); - const options: PDFExtractOptions = {}; + const pdfJsExtract = await loadOptionalDependency( + "pdf.js-extract", "PDF text extraction" + ); + const pdfJs = (pdfJsExtract as any).pdfjs || pdfJsExtract; + const pdfExtract: pdfJsExtractTypes.PDFExtract = new pdfJs.PDFExtract(); + const options: pdfJsExtractTypes.PDFExtractOptions = {}; - const pdf = await new Promise((resolve, reject) => { - pdfExtract.extractBuffer(pdfBuffer, options, (err, result) => { - if (err) reject(err); - if (result === undefined) - reject(new MindeePdfError("Couldn't process result.")); - else resolve(result); - }); + const pdf = await new Promise((resolve, reject) => { + pdfExtract.extractBuffer( + pdfBuffer, options, ( + err: Error | null, result: pdfJsExtractTypes.PDFExtractResult | undefined + ) => { + if (err) reject(err); + if (result === undefined) + reject(new MindeePdfError("Couldn't process result.")); + else resolve(result); + }); }); - const pages = pdf.pages.map((page, index) => ({ + const pages = pdf.pages.map((page: pdfJsExtractTypes.PDFExtractPage, index: number) => ({ pageNumber: index + 1, - content: page.content.map(item => ({ + content: page.content.map((item: pdfJsExtractTypes.PDFExtractText) => ({ str: item.str, x: item.x, y: item.y, diff --git a/src/product/barcodeReader/index.ts b/src/product/barcodeReader/index.ts deleted file mode 100644 index 58042c9a5..000000000 --- a/src/product/barcodeReader/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { BarcodeReaderV1 } from "./barcodeReaderV1"; diff --git a/src/product/billOfLading/billOfLadingV1.ts b/src/product/billOfLading/billOfLadingV1.ts deleted file mode 100644 index 7cecccd19..000000000 --- a/src/product/billOfLading/billOfLadingV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { BillOfLadingV1Document } from "./billOfLadingV1Document"; - -/** - * Bill of Lading API version 1 inference prediction. - */ -export class BillOfLadingV1 extends Inference { - /** The endpoint's name. */ - endpointName = "bill_of_lading"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: BillOfLadingV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new BillOfLadingV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - BillOfLadingV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/billOfLading/billOfLadingV1Carrier.ts b/src/product/billOfLading/billOfLadingV1Carrier.ts deleted file mode 100644 index 4506be109..000000000 --- a/src/product/billOfLading/billOfLadingV1Carrier.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The shipping company responsible for transporting the goods. - */ -export class BillOfLadingV1Carrier { - /** The name of the carrier. */ - name: string | null; - /** The professional number of the carrier. */ - professionalNumber: string | null; - /** The Standard Carrier Alpha Code (SCAC) of the carrier. */ - scac: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.name = prediction["name"]; - this.professionalNumber = prediction["professional_number"]; - this.scac = prediction["scac"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - name: this.name ?? "", - professionalNumber: this.professionalNumber ?? "", - scac: this.scac ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Name: " + - printable.name + - ", Professional Number: " + - printable.professionalNumber + - ", SCAC: " + - printable.scac - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Name: ${printable.name} - :Professional Number: ${printable.professionalNumber} - :SCAC: ${printable.scac}`.trimEnd(); - } -} diff --git a/src/product/billOfLading/billOfLadingV1CarrierItem.ts b/src/product/billOfLading/billOfLadingV1CarrierItem.ts deleted file mode 100644 index 045d63ded..000000000 --- a/src/product/billOfLading/billOfLadingV1CarrierItem.ts +++ /dev/null @@ -1,139 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The goods being shipped. - */ -export class BillOfLadingV1CarrierItem { - /** A description of the item. */ - description: string | null; - /** The gross weight of the item. */ - grossWeight: number | null; - /** The measurement of the item. */ - measurement: number | null; - /** The unit of measurement for the measurement. */ - measurementUnit: string | null; - /** The quantity of the item being shipped. */ - quantity: number | null; - /** The unit of measurement for weights. */ - weightUnit: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.description = prediction["description"]; - if ( - prediction["gross_weight"] !== undefined && - prediction["gross_weight"] !== null && - !isNaN(prediction["gross_weight"]) - ) { - this.grossWeight = +parseFloat(prediction["gross_weight"]); - } else { - this.grossWeight = null; - } - if ( - prediction["measurement"] !== undefined && - prediction["measurement"] !== null && - !isNaN(prediction["measurement"]) - ) { - this.measurement = +parseFloat(prediction["measurement"]); - } else { - this.measurement = null; - } - this.measurementUnit = prediction["measurement_unit"]; - if ( - prediction["quantity"] !== undefined && - prediction["quantity"] !== null && - !isNaN(prediction["quantity"]) - ) { - this.quantity = +parseFloat(prediction["quantity"]); - } else { - this.quantity = null; - } - this.weightUnit = prediction["weight_unit"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - description: this.description ? - this.description.length <= 36 ? - cleanSpecialChars(this.description) : - cleanSpecialChars(this.description).slice(0, 33) + "..." : - "", - grossWeight: - this.grossWeight !== undefined ? floatToString(this.grossWeight) : "", - measurement: - this.measurement !== undefined ? floatToString(this.measurement) : "", - measurementUnit: this.measurementUnit ? - this.measurementUnit.length <= 16 ? - cleanSpecialChars(this.measurementUnit) : - cleanSpecialChars(this.measurementUnit).slice(0, 13) + "..." : - "", - quantity: this.quantity !== undefined ? floatToString(this.quantity) : "", - weightUnit: this.weightUnit ? - this.weightUnit.length <= 11 ? - cleanSpecialChars(this.weightUnit) : - cleanSpecialChars(this.weightUnit).slice(0, 8) + "..." : - "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Description: " + - printable.description + - ", Gross Weight: " + - printable.grossWeight + - ", Measurement: " + - printable.measurement + - ", Measurement Unit: " + - printable.measurementUnit + - ", Quantity: " + - printable.quantity + - ", Weight Unit: " + - printable.weightUnit - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.description.padEnd(36) + - " | " + - printable.grossWeight.padEnd(12) + - " | " + - printable.measurement.padEnd(11) + - " | " + - printable.measurementUnit.padEnd(16) + - " | " + - printable.quantity.padEnd(8) + - " | " + - printable.weightUnit.padEnd(11) + - " |" - ); - } -} diff --git a/src/product/billOfLading/billOfLadingV1Consignee.ts b/src/product/billOfLading/billOfLadingV1Consignee.ts deleted file mode 100644 index 5807ca799..000000000 --- a/src/product/billOfLading/billOfLadingV1Consignee.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The party to whom the goods are being shipped. - */ -export class BillOfLadingV1Consignee { - /** The address of the consignee. */ - address: string | null; - /** The email of the shipper. */ - email: string | null; - /** The name of the consignee. */ - name: string | null; - /** The phone number of the consignee. */ - phone: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.email = prediction["email"]; - this.name = prediction["name"]; - this.phone = prediction["phone"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - email: this.email ?? "", - name: this.name ?? "", - phone: this.phone ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Email: " + - printable.email + - ", Name: " + - printable.name + - ", Phone: " + - printable.phone - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Email: ${printable.email} - :Name: ${printable.name} - :Phone: ${printable.phone}`.trimEnd(); - } -} diff --git a/src/product/billOfLading/billOfLadingV1Document.ts b/src/product/billOfLading/billOfLadingV1Document.ts deleted file mode 100644 index 954001608..000000000 --- a/src/product/billOfLading/billOfLadingV1Document.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { BillOfLadingV1Shipper } from "./billOfLadingV1Shipper"; -import { BillOfLadingV1Consignee } from "./billOfLadingV1Consignee"; -import { BillOfLadingV1NotifyParty } from "./billOfLadingV1NotifyParty"; -import { BillOfLadingV1Carrier } from "./billOfLadingV1Carrier"; -import { BillOfLadingV1CarrierItem } from "./billOfLadingV1CarrierItem"; -import { DateField, StringField } from "../../parsing/standard"; - -/** - * Bill of Lading API version 1.1 document data. - */ -export class BillOfLadingV1Document implements Prediction { - /** A unique identifier assigned to a Bill of Lading document. */ - billOfLadingNumber: StringField; - /** The shipping company responsible for transporting the goods. */ - carrier: BillOfLadingV1Carrier; - /** The goods being shipped. */ - carrierItems: BillOfLadingV1CarrierItem[] = []; - /** The party to whom the goods are being shipped. */ - consignee: BillOfLadingV1Consignee; - /** The date when the bill of lading is issued. */ - dateOfIssue: DateField; - /** The date when the vessel departs from the port of loading. */ - departureDate: DateField; - /** The party to be notified of the arrival of the goods. */ - notifyParty: BillOfLadingV1NotifyParty; - /** The place where the goods are to be delivered. */ - placeOfDelivery: StringField; - /** The port where the goods are unloaded from the vessel. */ - portOfDischarge: StringField; - /** The port where the goods are loaded onto the vessel. */ - portOfLoading: StringField; - /** The party responsible for shipping the goods. */ - shipper: BillOfLadingV1Shipper; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.billOfLadingNumber = new StringField({ - prediction: rawPrediction["bill_of_lading_number"], - pageId: pageId, - }); - this.carrier = new BillOfLadingV1Carrier({ - prediction: rawPrediction["carrier"], - pageId: pageId, - }); - rawPrediction["carrier_items"] && - rawPrediction["carrier_items"].map( - (itemPrediction: StringDict) => - this.carrierItems.push( - new BillOfLadingV1CarrierItem({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.consignee = new BillOfLadingV1Consignee({ - prediction: rawPrediction["consignee"], - pageId: pageId, - }); - this.dateOfIssue = new DateField({ - prediction: rawPrediction["date_of_issue"], - pageId: pageId, - }); - this.departureDate = new DateField({ - prediction: rawPrediction["departure_date"], - pageId: pageId, - }); - this.notifyParty = new BillOfLadingV1NotifyParty({ - prediction: rawPrediction["notify_party"], - pageId: pageId, - }); - this.placeOfDelivery = new StringField({ - prediction: rawPrediction["place_of_delivery"], - pageId: pageId, - }); - this.portOfDischarge = new StringField({ - prediction: rawPrediction["port_of_discharge"], - pageId: pageId, - }); - this.portOfLoading = new StringField({ - prediction: rawPrediction["port_of_loading"], - pageId: pageId, - }); - this.shipper = new BillOfLadingV1Shipper({ - prediction: rawPrediction["shipper"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - let carrierItemsSummary:string = ""; - if (this.carrierItems && this.carrierItems.length > 0) { - const carrierItemsColSizes:number[] = [38, 14, 13, 18, 10, 13]; - carrierItemsSummary += "\n" + lineSeparator(carrierItemsColSizes, "-") + "\n "; - carrierItemsSummary += "| Description "; - carrierItemsSummary += "| Gross Weight "; - carrierItemsSummary += "| Measurement "; - carrierItemsSummary += "| Measurement Unit "; - carrierItemsSummary += "| Quantity "; - carrierItemsSummary += "| Weight Unit "; - carrierItemsSummary += "|\n" + lineSeparator(carrierItemsColSizes, "="); - carrierItemsSummary += this.carrierItems.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(carrierItemsColSizes, "-") - ).join(""); - } - const outStr = `:Bill of Lading Number: ${this.billOfLadingNumber} -:Shipper: ${this.shipper.toFieldList()} -:Consignee: ${this.consignee.toFieldList()} -:Notify Party: ${this.notifyParty.toFieldList()} -:Carrier: ${this.carrier.toFieldList()} -:Items: ${carrierItemsSummary} -:Port of Loading: ${this.portOfLoading} -:Port of Discharge: ${this.portOfDischarge} -:Place of Delivery: ${this.placeOfDelivery} -:Date of issue: ${this.dateOfIssue} -:Departure Date: ${this.departureDate}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/billOfLading/billOfLadingV1NotifyParty.ts b/src/product/billOfLading/billOfLadingV1NotifyParty.ts deleted file mode 100644 index f1dd05d3d..000000000 --- a/src/product/billOfLading/billOfLadingV1NotifyParty.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The party to be notified of the arrival of the goods. - */ -export class BillOfLadingV1NotifyParty { - /** The address of the notify party. */ - address: string | null; - /** The email of the shipper. */ - email: string | null; - /** The name of the notify party. */ - name: string | null; - /** The phone number of the notify party. */ - phone: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.email = prediction["email"]; - this.name = prediction["name"]; - this.phone = prediction["phone"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - email: this.email ?? "", - name: this.name ?? "", - phone: this.phone ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Email: " + - printable.email + - ", Name: " + - printable.name + - ", Phone: " + - printable.phone - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Email: ${printable.email} - :Name: ${printable.name} - :Phone: ${printable.phone}`.trimEnd(); - } -} diff --git a/src/product/billOfLading/billOfLadingV1Shipper.ts b/src/product/billOfLading/billOfLadingV1Shipper.ts deleted file mode 100644 index de70a1c80..000000000 --- a/src/product/billOfLading/billOfLadingV1Shipper.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The party responsible for shipping the goods. - */ -export class BillOfLadingV1Shipper { - /** The address of the shipper. */ - address: string | null; - /** The email of the shipper. */ - email: string | null; - /** The name of the shipper. */ - name: string | null; - /** The phone number of the shipper. */ - phone: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.email = prediction["email"]; - this.name = prediction["name"]; - this.phone = prediction["phone"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - email: this.email ?? "", - name: this.name ?? "", - phone: this.phone ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Email: " + - printable.email + - ", Name: " + - printable.name + - ", Phone: " + - printable.phone - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Email: ${printable.email} - :Name: ${printable.name} - :Phone: ${printable.phone}`.trimEnd(); - } -} diff --git a/src/product/billOfLading/index.ts b/src/product/billOfLading/index.ts deleted file mode 100644 index c68593aeb..000000000 --- a/src/product/billOfLading/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { BillOfLadingV1 } from "./billOfLadingV1"; diff --git a/src/product/billOfLading/internal.ts b/src/product/billOfLading/internal.ts deleted file mode 100644 index 3fe053d07..000000000 --- a/src/product/billOfLading/internal.ts +++ /dev/null @@ -1,7 +0,0 @@ -export { BillOfLadingV1 } from "./billOfLadingV1"; -export { BillOfLadingV1Carrier } from "./billOfLadingV1Carrier"; -export { BillOfLadingV1CarrierItem } from "./billOfLadingV1CarrierItem"; -export { BillOfLadingV1Consignee } from "./billOfLadingV1Consignee"; -export { BillOfLadingV1Document } from "./billOfLadingV1Document"; -export { BillOfLadingV1NotifyParty } from "./billOfLadingV1NotifyParty"; -export { BillOfLadingV1Shipper } from "./billOfLadingV1Shipper"; diff --git a/src/product/businessCard/businessCardV1.ts b/src/product/businessCard/businessCardV1.ts deleted file mode 100644 index 10e0772f3..000000000 --- a/src/product/businessCard/businessCardV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { BusinessCardV1Document } from "./businessCardV1Document"; - -/** - * Business Card API version 1 inference prediction. - */ -export class BusinessCardV1 extends Inference { - /** The endpoint's name. */ - endpointName = "business_card"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: BusinessCardV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new BusinessCardV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - BusinessCardV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/businessCard/businessCardV1Document.ts b/src/product/businessCard/businessCardV1Document.ts deleted file mode 100644 index bce693797..000000000 --- a/src/product/businessCard/businessCardV1Document.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString, -} from "../../parsing/common"; -import { StringField } from "../../parsing/standard"; - -/** - * Business Card API version 1.0 document data. - */ -export class BusinessCardV1Document implements Prediction { - /** The address of the person. */ - address: StringField; - /** The company the person works for. */ - company: StringField; - /** The email address of the person. */ - email: StringField; - /** The Fax number of the person. */ - faxNumber: StringField; - /** The given name of the person. */ - firstname: StringField; - /** The job title of the person. */ - jobTitle: StringField; - /** The lastname of the person. */ - lastname: StringField; - /** The mobile number of the person. */ - mobileNumber: StringField; - /** The phone number of the person. */ - phoneNumber: StringField; - /** The social media profiles of the person or company. */ - socialMedia: StringField[] = []; - /** The website of the person or company. */ - website: StringField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.address = new StringField({ - prediction: rawPrediction["address"], - pageId: pageId, - }); - this.company = new StringField({ - prediction: rawPrediction["company"], - pageId: pageId, - }); - this.email = new StringField({ - prediction: rawPrediction["email"], - pageId: pageId, - }); - this.faxNumber = new StringField({ - prediction: rawPrediction["fax_number"], - pageId: pageId, - }); - this.firstname = new StringField({ - prediction: rawPrediction["firstname"], - pageId: pageId, - }); - this.jobTitle = new StringField({ - prediction: rawPrediction["job_title"], - pageId: pageId, - }); - this.lastname = new StringField({ - prediction: rawPrediction["lastname"], - pageId: pageId, - }); - this.mobileNumber = new StringField({ - prediction: rawPrediction["mobile_number"], - pageId: pageId, - }); - this.phoneNumber = new StringField({ - prediction: rawPrediction["phone_number"], - pageId: pageId, - }); - rawPrediction["social_media"] && - rawPrediction["social_media"].map( - (itemPrediction: StringDict) => - this.socialMedia.push( - new StringField({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.website = new StringField({ - prediction: rawPrediction["website"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const socialMedia = this.socialMedia.join("\n "); - const outStr = `:Firstname: ${this.firstname} -:Lastname: ${this.lastname} -:Job Title: ${this.jobTitle} -:Company: ${this.company} -:Email: ${this.email} -:Phone Number: ${this.phoneNumber} -:Mobile Number: ${this.mobileNumber} -:Fax Number: ${this.faxNumber} -:Address: ${this.address} -:Website: ${this.website} -:Social Media: ${socialMedia}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/businessCard/index.ts b/src/product/businessCard/index.ts deleted file mode 100644 index 3fabf887f..000000000 --- a/src/product/businessCard/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { BusinessCardV1 } from "./businessCardV1"; diff --git a/src/product/businessCard/internal.ts b/src/product/businessCard/internal.ts deleted file mode 100644 index 410740986..000000000 --- a/src/product/businessCard/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { BusinessCardV1 } from "./businessCardV1"; -export { BusinessCardV1Document } from "./businessCardV1Document"; diff --git a/src/product/cropper/index.ts b/src/product/cropper/index.ts deleted file mode 100644 index 53be02fcd..000000000 --- a/src/product/cropper/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { CropperV1 } from "./cropperV1"; diff --git a/src/product/cropper/internal.ts b/src/product/cropper/internal.ts deleted file mode 100644 index 898b1d4ef..000000000 --- a/src/product/cropper/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { CropperV1 } from "./cropperV1"; -export { CropperV1Document } from "./cropperV1Document"; -export { CropperV1Page } from "./cropperV1Page"; diff --git a/src/product/custom/customV1.ts b/src/product/custom/customV1.ts deleted file mode 100644 index a77f72a4d..000000000 --- a/src/product/custom/customV1.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Inference, Page, StringDict } from "../../parsing/common"; -import { CustomV1Document } from "./customV1Document"; -import { CustomV1Page } from "./customV1Page"; - -/** - * Inference prediction for Custom builds. - */ -export class CustomV1 extends Inference { - /** The endpoint's name. Note: placeholder for custom APIs. */ - endpointName = "custom"; - /** The endpoint's version. Note: placeholder for custom APIs. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: CustomV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new CustomV1Document(rawPrediction["prediction"]); - this.pages = rawPrediction["pages"].map( - (page: StringDict) => - new Page(CustomV1Page, page, page["id"], page["orientation"]) - ); - } -} diff --git a/src/product/custom/customV1Document.ts b/src/product/custom/customV1Document.ts deleted file mode 100644 index 55c781a5f..000000000 --- a/src/product/custom/customV1Document.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { cleanOutString } from "../../parsing/common"; -import { StringDict, Prediction } from "../../parsing/common"; -import { ClassificationField, ListField, getLineItems, CustomLines } from "../../parsing/custom"; - -/** - * Document data for Custom builds. - */ -export class CustomV1Document implements Prediction { - /** Map of fields for a Custom build. */ - fields: Map = new Map(); - /** Map of classification fields for a Custom build. */ - classifications: Map = new Map(); - - constructor(rawPrediction: StringDict, pageId?: number) { - Object.entries(rawPrediction).forEach( - ([fieldName, fieldValue]: [string, any]) => { - this.setField(fieldName, fieldValue, pageId); - } - ); - } - - /** - * Sorts and sets fields between classification fields and regular fields. - * Note: Currently, two types of fields possible in a custom API response: - * fields having a list of values, and classification fields. - * @param fieldName name of the field. - * @param fieldValue value of the field. - * @param pageId page the field was found on. - */ - protected setField(fieldName: string, fieldValue: any, pageId?: number) { - if (fieldValue && fieldValue["values"] !== undefined) { - // Only value lists have the 'values' attribute. - this.fields.set( - fieldName, - new ListField({ - prediction: fieldValue as StringDict, - pageId: pageId, - }) - ); - } else if (fieldValue && fieldValue["value"] !== undefined) { - // Only classifications have the 'value' attribute. - this.classifications.set( - fieldName, - new ClassificationField({ prediction: fieldValue }) - ); - } else { - throw new Error( - `Unknown API field type for field ${fieldName} : ${fieldValue}` - ); - } - } - - /** - * Order column fields into line items. - * @param anchorNames list of possible anchor fields. - * @param fieldNames list of all column fields. - * @param heightTolerance height tolerance to apply to lines. - */ - columnsToLineItems(anchorNames: string[], fieldNames: string[], heightTolerance: number = 0.01): CustomLines { - return getLineItems( - anchorNames, - fieldNames, - this.fields, - heightTolerance - ); - } - - /** - * Default string representation. - */ - toString(): string { - let outStr = ""; - this.classifications.forEach((fieldData, name) => { - outStr += `:${name}: ${fieldData}\n`; - }); - this.fields.forEach((fieldData, name) => { - outStr += `:${name}: ${fieldData}\n`; - }); - return cleanOutString(outStr).trimEnd(); - } -} diff --git a/src/product/custom/customV1Page.ts b/src/product/custom/customV1Page.ts deleted file mode 100644 index 3f432ae24..000000000 --- a/src/product/custom/customV1Page.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { cleanOutString } from "../../parsing/common"; -import { StringDict, Prediction } from "../../parsing/common"; -import { ListField, CustomLines, getLineItems } from "../../parsing/custom"; - -/** - * Page data for Custom builds. - */ -export class CustomV1Page implements Prediction { - /** Map of page-specific fields for a Custom build. Cannot include Classification fields. */ - fields: Map = new Map(); - - constructor(rawPrediction: StringDict, pageId?: number) { - Object.entries(rawPrediction).forEach( - ([fieldName, fieldValue]: [string, any]) => { - this.fields.set( - fieldName, - new ListField({ - prediction: fieldValue as StringDict, - pageId: pageId, - }) - ); - } - ); - } - - /** - * Order column fields into line items. - * @param anchorNames list of possible anchor fields. - * @param fieldNames list of all column fields. - * @param heightTolerance height tolerance to apply to lines. - */ - columnsToLineItems(anchorNames: string[], fieldNames: string[], heightTolerance: number = 0.01): CustomLines { - return getLineItems( - anchorNames, - fieldNames, - this.fields, - heightTolerance - ); - } - - /** - * Default string representation. - */ - toString(): string { - let outStr = ""; - this.fields.forEach((fieldData, name) => { - outStr += `:${name}: ${fieldData}\n`; - }); - return cleanOutString(outStr).trimEnd(); - } -} diff --git a/src/product/custom/internal.ts b/src/product/custom/internal.ts deleted file mode 100644 index daf01bdd8..000000000 --- a/src/product/custom/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { CustomV1 } from "./customV1"; -export { CustomV1Document } from "./customV1Document"; -export { CustomV1Page } from "./customV1Page"; diff --git a/src/product/deliveryNote/deliveryNoteV1.ts b/src/product/deliveryNote/deliveryNoteV1.ts deleted file mode 100644 index c2e253017..000000000 --- a/src/product/deliveryNote/deliveryNoteV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { DeliveryNoteV1Document } from "./deliveryNoteV1Document"; - -/** - * Delivery note API version 1 inference prediction. - */ -export class DeliveryNoteV1 extends Inference { - /** The endpoint's name. */ - endpointName = "delivery_notes"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: DeliveryNoteV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new DeliveryNoteV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - DeliveryNoteV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/deliveryNote/deliveryNoteV1Document.ts b/src/product/deliveryNote/deliveryNoteV1Document.ts deleted file mode 100644 index e3b2febf7..000000000 --- a/src/product/deliveryNote/deliveryNoteV1Document.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString, -} from "../../parsing/common"; -import { - AmountField, - DateField, - StringField, -} from "../../parsing/standard"; - -/** - * Delivery note API version 1.2 document data. - */ -export class DeliveryNoteV1Document implements Prediction { - /** The address of the customer receiving the goods. */ - customerAddress: StringField; - /** The name of the customer receiving the goods. */ - customerName: StringField; - /** The date on which the delivery is scheduled to arrive. */ - deliveryDate: DateField; - /** A unique identifier for the delivery note. */ - deliveryNumber: StringField; - /** The address of the supplier providing the goods. */ - supplierAddress: StringField; - /** The name of the supplier providing the goods. */ - supplierName: StringField; - /** The total monetary value of the goods being delivered. */ - totalAmount: AmountField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.customerAddress = new StringField({ - prediction: rawPrediction["customer_address"], - pageId: pageId, - }); - this.customerName = new StringField({ - prediction: rawPrediction["customer_name"], - pageId: pageId, - }); - this.deliveryDate = new DateField({ - prediction: rawPrediction["delivery_date"], - pageId: pageId, - }); - this.deliveryNumber = new StringField({ - prediction: rawPrediction["delivery_number"], - pageId: pageId, - }); - this.supplierAddress = new StringField({ - prediction: rawPrediction["supplier_address"], - pageId: pageId, - }); - this.supplierName = new StringField({ - prediction: rawPrediction["supplier_name"], - pageId: pageId, - }); - this.totalAmount = new AmountField({ - prediction: rawPrediction["total_amount"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const outStr = `:Delivery Date: ${this.deliveryDate} -:Delivery Number: ${this.deliveryNumber} -:Supplier Name: ${this.supplierName} -:Supplier Address: ${this.supplierAddress} -:Customer Name: ${this.customerName} -:Customer Address: ${this.customerAddress} -:Total Amount: ${this.totalAmount}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/deliveryNote/index.ts b/src/product/deliveryNote/index.ts deleted file mode 100644 index 46266a468..000000000 --- a/src/product/deliveryNote/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { DeliveryNoteV1 } from "./deliveryNoteV1"; diff --git a/src/product/deliveryNote/internal.ts b/src/product/deliveryNote/internal.ts deleted file mode 100644 index 887d45d6e..000000000 --- a/src/product/deliveryNote/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { DeliveryNoteV1 } from "./deliveryNoteV1"; -export { DeliveryNoteV1Document } from "./deliveryNoteV1Document"; diff --git a/src/product/driverLicense/index.ts b/src/product/driverLicense/index.ts deleted file mode 100644 index 8213a78cf..000000000 --- a/src/product/driverLicense/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { DriverLicenseV1 } from "./driverLicenseV1"; diff --git a/src/product/financialDocument/index.ts b/src/product/financialDocument/index.ts deleted file mode 100644 index e431c1b26..000000000 --- a/src/product/financialDocument/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { FinancialDocumentV1 } from "./financialDocumentV1"; diff --git a/src/product/fr/bankAccountDetails/index.ts b/src/product/fr/bankAccountDetails/index.ts deleted file mode 100644 index bc20b9c1f..000000000 --- a/src/product/fr/bankAccountDetails/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { BankAccountDetailsV1 } from "./bankAccountDetailsV1"; -export { BankAccountDetailsV2 } from "./bankAccountDetailsV2"; diff --git a/src/product/fr/carteGrise/index.ts b/src/product/fr/carteGrise/index.ts deleted file mode 100644 index f60c57238..000000000 --- a/src/product/fr/carteGrise/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { CarteGriseV1 } from "./carteGriseV1"; diff --git a/src/product/fr/carteGrise/internal.ts b/src/product/fr/carteGrise/internal.ts deleted file mode 100644 index c9f0cc465..000000000 --- a/src/product/fr/carteGrise/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { CarteGriseV1 } from "./carteGriseV1"; -export { CarteGriseV1Document } from "./carteGriseV1Document"; diff --git a/src/product/fr/energyBill/energyBillV1.ts b/src/product/fr/energyBill/energyBillV1.ts deleted file mode 100644 index a24f0d42d..000000000 --- a/src/product/fr/energyBill/energyBillV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { EnergyBillV1Document } from "./energyBillV1Document"; - -/** - * Energy Bill API version 1 inference prediction. - */ -export class EnergyBillV1 extends Inference { - /** The endpoint's name. */ - endpointName = "energy_bill_fra"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: EnergyBillV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new EnergyBillV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - EnergyBillV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/fr/energyBill/energyBillV1Document.ts b/src/product/fr/energyBill/energyBillV1Document.ts deleted file mode 100644 index 87a9f281a..000000000 --- a/src/product/fr/energyBill/energyBillV1Document.ts +++ /dev/null @@ -1,201 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../../parsing/common"; -import { EnergyBillV1EnergySupplier } from "./energyBillV1EnergySupplier"; -import { EnergyBillV1EnergyConsumer } from "./energyBillV1EnergyConsumer"; -import { EnergyBillV1Subscription } from "./energyBillV1Subscription"; -import { EnergyBillV1EnergyUsage } from "./energyBillV1EnergyUsage"; -import { EnergyBillV1TaxesAndContribution } from "./energyBillV1TaxesAndContribution"; -import { EnergyBillV1MeterDetail } from "./energyBillV1MeterDetail"; -import { - AmountField, - DateField, - StringField, -} from "../../../parsing/standard"; - -/** - * Energy Bill API version 1.2 document data. - */ -export class EnergyBillV1Document implements Prediction { - /** The unique identifier associated with a specific contract. */ - contractId: StringField; - /** - * The unique identifier assigned to each electricity or gas consumption point. It specifies the exact location where - * the energy is delivered. - */ - deliveryPoint: StringField; - /** The date by which the payment for the energy invoice is due. */ - dueDate: DateField; - /** The entity that consumes the energy. */ - energyConsumer: EnergyBillV1EnergyConsumer; - /** The company that supplies the energy. */ - energySupplier: EnergyBillV1EnergySupplier; - /** Details of energy consumption. */ - energyUsage: EnergyBillV1EnergyUsage[] = []; - /** The date when the energy invoice was issued. */ - invoiceDate: DateField; - /** The unique identifier of the energy invoice. */ - invoiceNumber: StringField; - /** Information about the energy meter. */ - meterDetails: EnergyBillV1MeterDetail; - /** The subscription details fee for the energy service. */ - subscription: EnergyBillV1Subscription[] = []; - /** Details of Taxes and Contributions. */ - taxesAndContributions: EnergyBillV1TaxesAndContribution[] = []; - /** The total amount to be paid for the energy invoice. */ - totalAmount: AmountField; - /** The total amount to be paid for the energy invoice before taxes. */ - totalBeforeTaxes: AmountField; - /** Total of taxes applied to the invoice. */ - totalTaxes: AmountField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.contractId = new StringField({ - prediction: rawPrediction["contract_id"], - pageId: pageId, - }); - this.deliveryPoint = new StringField({ - prediction: rawPrediction["delivery_point"], - pageId: pageId, - }); - this.dueDate = new DateField({ - prediction: rawPrediction["due_date"], - pageId: pageId, - }); - this.energyConsumer = new EnergyBillV1EnergyConsumer({ - prediction: rawPrediction["energy_consumer"], - pageId: pageId, - }); - this.energySupplier = new EnergyBillV1EnergySupplier({ - prediction: rawPrediction["energy_supplier"], - pageId: pageId, - }); - rawPrediction["energy_usage"] && - rawPrediction["energy_usage"].map( - (itemPrediction: StringDict) => - this.energyUsage.push( - new EnergyBillV1EnergyUsage({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.invoiceDate = new DateField({ - prediction: rawPrediction["invoice_date"], - pageId: pageId, - }); - this.invoiceNumber = new StringField({ - prediction: rawPrediction["invoice_number"], - pageId: pageId, - }); - this.meterDetails = new EnergyBillV1MeterDetail({ - prediction: rawPrediction["meter_details"], - pageId: pageId, - }); - rawPrediction["subscription"] && - rawPrediction["subscription"].map( - (itemPrediction: StringDict) => - this.subscription.push( - new EnergyBillV1Subscription({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - rawPrediction["taxes_and_contributions"] && - rawPrediction["taxes_and_contributions"].map( - (itemPrediction: StringDict) => - this.taxesAndContributions.push( - new EnergyBillV1TaxesAndContribution({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.totalAmount = new AmountField({ - prediction: rawPrediction["total_amount"], - pageId: pageId, - }); - this.totalBeforeTaxes = new AmountField({ - prediction: rawPrediction["total_before_taxes"], - pageId: pageId, - }); - this.totalTaxes = new AmountField({ - prediction: rawPrediction["total_taxes"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - let subscriptionSummary:string = ""; - if (this.subscription && this.subscription.length > 0) { - const subscriptionColSizes:number[] = [38, 12, 12, 10, 11, 12]; - subscriptionSummary += "\n" + lineSeparator(subscriptionColSizes, "-") + "\n "; - subscriptionSummary += "| Description "; - subscriptionSummary += "| End Date "; - subscriptionSummary += "| Start Date "; - subscriptionSummary += "| Tax Rate "; - subscriptionSummary += "| Total "; - subscriptionSummary += "| Unit Price "; - subscriptionSummary += "|\n" + lineSeparator(subscriptionColSizes, "="); - subscriptionSummary += this.subscription.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(subscriptionColSizes, "-") - ).join(""); - } - let energyUsageSummary:string = ""; - if (this.energyUsage && this.energyUsage.length > 0) { - const energyUsageColSizes:number[] = [13, 38, 12, 12, 10, 11, 17, 12]; - energyUsageSummary += "\n" + lineSeparator(energyUsageColSizes, "-") + "\n "; - energyUsageSummary += "| Consumption "; - energyUsageSummary += "| Description "; - energyUsageSummary += "| End Date "; - energyUsageSummary += "| Start Date "; - energyUsageSummary += "| Tax Rate "; - energyUsageSummary += "| Total "; - energyUsageSummary += "| Unit of Measure "; - energyUsageSummary += "| Unit Price "; - energyUsageSummary += "|\n" + lineSeparator(energyUsageColSizes, "="); - energyUsageSummary += this.energyUsage.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(energyUsageColSizes, "-") - ).join(""); - } - let taxesAndContributionsSummary:string = ""; - if (this.taxesAndContributions && this.taxesAndContributions.length > 0) { - const taxesAndContributionsColSizes:number[] = [38, 12, 12, 10, 11, 12]; - taxesAndContributionsSummary += "\n" + lineSeparator(taxesAndContributionsColSizes, "-") + "\n "; - taxesAndContributionsSummary += "| Description "; - taxesAndContributionsSummary += "| End Date "; - taxesAndContributionsSummary += "| Start Date "; - taxesAndContributionsSummary += "| Tax Rate "; - taxesAndContributionsSummary += "| Total "; - taxesAndContributionsSummary += "| Unit Price "; - taxesAndContributionsSummary += "|\n" + lineSeparator(taxesAndContributionsColSizes, "="); - taxesAndContributionsSummary += this.taxesAndContributions.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(taxesAndContributionsColSizes, "-") - ).join(""); - } - const outStr = `:Invoice Number: ${this.invoiceNumber} -:Contract ID: ${this.contractId} -:Delivery Point: ${this.deliveryPoint} -:Invoice Date: ${this.invoiceDate} -:Due Date: ${this.dueDate} -:Total Before Taxes: ${this.totalBeforeTaxes} -:Total Taxes: ${this.totalTaxes} -:Total Amount: ${this.totalAmount} -:Energy Supplier: ${this.energySupplier.toFieldList()} -:Energy Consumer: ${this.energyConsumer.toFieldList()} -:Subscription: ${subscriptionSummary} -:Energy Usage: ${energyUsageSummary} -:Taxes and Contributions: ${taxesAndContributionsSummary} -:Meter Details: ${this.meterDetails.toFieldList()}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/fr/energyBill/energyBillV1EnergyConsumer.ts b/src/product/fr/energyBill/energyBillV1EnergyConsumer.ts deleted file mode 100644 index ceb202eed..000000000 --- a/src/product/fr/energyBill/energyBillV1EnergyConsumer.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * The entity that consumes the energy. - */ -export class EnergyBillV1EnergyConsumer { - /** The address of the energy consumer. */ - address: string | null; - /** The name of the energy consumer. */ - name: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.name = prediction["name"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - name: this.name ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Name: " + - printable.name - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Name: ${printable.name}`.trimEnd(); - } -} diff --git a/src/product/fr/energyBill/energyBillV1EnergySupplier.ts b/src/product/fr/energyBill/energyBillV1EnergySupplier.ts deleted file mode 100644 index badfdf7e8..000000000 --- a/src/product/fr/energyBill/energyBillV1EnergySupplier.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * The company that supplies the energy. - */ -export class EnergyBillV1EnergySupplier { - /** The address of the energy supplier. */ - address: string | null; - /** The name of the energy supplier. */ - name: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.name = prediction["name"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - name: this.name ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Name: " + - printable.name - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Name: ${printable.name}`.trimEnd(); - } -} diff --git a/src/product/fr/energyBill/energyBillV1EnergyUsage.ts b/src/product/fr/energyBill/energyBillV1EnergyUsage.ts deleted file mode 100644 index d2e90d2af..000000000 --- a/src/product/fr/energyBill/energyBillV1EnergyUsage.ts +++ /dev/null @@ -1,166 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Details of energy consumption. - */ -export class EnergyBillV1EnergyUsage { - /** The price per unit of energy consumed. */ - consumption: number | null; - /** Description or details of the energy usage. */ - description: string | null; - /** The end date of the energy usage. */ - endDate: string | null; - /** The start date of the energy usage. */ - startDate: string | null; - /** The rate of tax applied to the total cost. */ - taxRate: number | null; - /** The total cost of energy consumed. */ - total: number | null; - /** The unit of measurement for energy consumption. */ - unit: string | null; - /** The price per unit of energy consumed. */ - unitPrice: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["consumption"] !== undefined && - prediction["consumption"] !== null && - !isNaN(prediction["consumption"]) - ) { - this.consumption = +parseFloat(prediction["consumption"]); - } else { - this.consumption = null; - } - this.description = prediction["description"]; - this.endDate = prediction["end_date"]; - this.startDate = prediction["start_date"]; - if ( - prediction["tax_rate"] !== undefined && - prediction["tax_rate"] !== null && - !isNaN(prediction["tax_rate"]) - ) { - this.taxRate = +parseFloat(prediction["tax_rate"]); - } else { - this.taxRate = null; - } - if ( - prediction["total"] !== undefined && - prediction["total"] !== null && - !isNaN(prediction["total"]) - ) { - this.total = +parseFloat(prediction["total"]); - } else { - this.total = null; - } - this.unit = prediction["unit"]; - if ( - prediction["unit_price"] !== undefined && - prediction["unit_price"] !== null && - !isNaN(prediction["unit_price"]) - ) { - this.unitPrice = +parseFloat(prediction["unit_price"]); - } else { - this.unitPrice = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - consumption: - this.consumption !== undefined ? floatToString(this.consumption) : "", - description: this.description ? - this.description.length <= 36 ? - cleanSpecialChars(this.description) : - cleanSpecialChars(this.description).slice(0, 33) + "..." : - "", - endDate: this.endDate ? - this.endDate.length <= 10 ? - cleanSpecialChars(this.endDate) : - cleanSpecialChars(this.endDate).slice(0, 7) + "..." : - "", - startDate: this.startDate ? - this.startDate.length <= 10 ? - cleanSpecialChars(this.startDate) : - cleanSpecialChars(this.startDate).slice(0, 7) + "..." : - "", - taxRate: this.taxRate !== undefined ? floatToString(this.taxRate) : "", - total: this.total !== undefined ? floatToString(this.total) : "", - unit: this.unit ? - this.unit.length <= 15 ? - cleanSpecialChars(this.unit) : - cleanSpecialChars(this.unit).slice(0, 12) + "..." : - "", - unitPrice: this.unitPrice !== undefined ? floatToString(this.unitPrice) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Consumption: " + - printable.consumption + - ", Description: " + - printable.description + - ", End Date: " + - printable.endDate + - ", Start Date: " + - printable.startDate + - ", Tax Rate: " + - printable.taxRate + - ", Total: " + - printable.total + - ", Unit of Measure: " + - printable.unit + - ", Unit Price: " + - printable.unitPrice - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.consumption.padEnd(11) + - " | " + - printable.description.padEnd(36) + - " | " + - printable.endDate.padEnd(10) + - " | " + - printable.startDate.padEnd(10) + - " | " + - printable.taxRate.padEnd(8) + - " | " + - printable.total.padEnd(9) + - " | " + - printable.unit.padEnd(15) + - " | " + - printable.unitPrice.padEnd(10) + - " |" - ); - } -} diff --git a/src/product/fr/energyBill/energyBillV1MeterDetail.ts b/src/product/fr/energyBill/energyBillV1MeterDetail.ts deleted file mode 100644 index db04ab240..000000000 --- a/src/product/fr/energyBill/energyBillV1MeterDetail.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the energy meter. - */ -export class EnergyBillV1MeterDetail { - /** The unique identifier of the energy meter. */ - meterNumber: string | null; - /** The type of energy meter. */ - meterType: string | null; - /** The unit of power for energy consumption. */ - unit: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.meterNumber = prediction["meter_number"]; - this.meterType = prediction["meter_type"]; - this.unit = prediction["unit"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - meterNumber: this.meterNumber ?? "", - meterType: this.meterType ?? "", - unit: this.unit ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Meter Number: " + - printable.meterNumber + - ", Meter Type: " + - printable.meterType + - ", Unit of Power: " + - printable.unit - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Meter Number: ${printable.meterNumber} - :Meter Type: ${printable.meterType} - :Unit of Power: ${printable.unit}`.trimEnd(); - } -} diff --git a/src/product/fr/energyBill/energyBillV1Subscription.ts b/src/product/fr/energyBill/energyBillV1Subscription.ts deleted file mode 100644 index 43c470ddc..000000000 --- a/src/product/fr/energyBill/energyBillV1Subscription.ts +++ /dev/null @@ -1,137 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * The subscription details fee for the energy service. - */ -export class EnergyBillV1Subscription { - /** Description or details of the subscription. */ - description: string | null; - /** The end date of the subscription. */ - endDate: string | null; - /** The start date of the subscription. */ - startDate: string | null; - /** The rate of tax applied to the total cost. */ - taxRate: number | null; - /** The total cost of subscription. */ - total: number | null; - /** The price per unit of subscription. */ - unitPrice: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.description = prediction["description"]; - this.endDate = prediction["end_date"]; - this.startDate = prediction["start_date"]; - if ( - prediction["tax_rate"] !== undefined && - prediction["tax_rate"] !== null && - !isNaN(prediction["tax_rate"]) - ) { - this.taxRate = +parseFloat(prediction["tax_rate"]); - } else { - this.taxRate = null; - } - if ( - prediction["total"] !== undefined && - prediction["total"] !== null && - !isNaN(prediction["total"]) - ) { - this.total = +parseFloat(prediction["total"]); - } else { - this.total = null; - } - if ( - prediction["unit_price"] !== undefined && - prediction["unit_price"] !== null && - !isNaN(prediction["unit_price"]) - ) { - this.unitPrice = +parseFloat(prediction["unit_price"]); - } else { - this.unitPrice = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - description: this.description ? - this.description.length <= 36 ? - cleanSpecialChars(this.description) : - cleanSpecialChars(this.description).slice(0, 33) + "..." : - "", - endDate: this.endDate ? - this.endDate.length <= 10 ? - cleanSpecialChars(this.endDate) : - cleanSpecialChars(this.endDate).slice(0, 7) + "..." : - "", - startDate: this.startDate ? - this.startDate.length <= 10 ? - cleanSpecialChars(this.startDate) : - cleanSpecialChars(this.startDate).slice(0, 7) + "..." : - "", - taxRate: this.taxRate !== undefined ? floatToString(this.taxRate) : "", - total: this.total !== undefined ? floatToString(this.total) : "", - unitPrice: this.unitPrice !== undefined ? floatToString(this.unitPrice) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Description: " + - printable.description + - ", End Date: " + - printable.endDate + - ", Start Date: " + - printable.startDate + - ", Tax Rate: " + - printable.taxRate + - ", Total: " + - printable.total + - ", Unit Price: " + - printable.unitPrice - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.description.padEnd(36) + - " | " + - printable.endDate.padEnd(10) + - " | " + - printable.startDate.padEnd(10) + - " | " + - printable.taxRate.padEnd(8) + - " | " + - printable.total.padEnd(9) + - " | " + - printable.unitPrice.padEnd(10) + - " |" - ); - } -} diff --git a/src/product/fr/energyBill/energyBillV1TaxesAndContribution.ts b/src/product/fr/energyBill/energyBillV1TaxesAndContribution.ts deleted file mode 100644 index 97b377abf..000000000 --- a/src/product/fr/energyBill/energyBillV1TaxesAndContribution.ts +++ /dev/null @@ -1,137 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Details of Taxes and Contributions. - */ -export class EnergyBillV1TaxesAndContribution { - /** Description or details of the Taxes and Contributions. */ - description: string | null; - /** The end date of the Taxes and Contributions. */ - endDate: string | null; - /** The start date of the Taxes and Contributions. */ - startDate: string | null; - /** The rate of tax applied to the total cost. */ - taxRate: number | null; - /** The total cost of Taxes and Contributions. */ - total: number | null; - /** The price per unit of Taxes and Contributions. */ - unitPrice: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.description = prediction["description"]; - this.endDate = prediction["end_date"]; - this.startDate = prediction["start_date"]; - if ( - prediction["tax_rate"] !== undefined && - prediction["tax_rate"] !== null && - !isNaN(prediction["tax_rate"]) - ) { - this.taxRate = +parseFloat(prediction["tax_rate"]); - } else { - this.taxRate = null; - } - if ( - prediction["total"] !== undefined && - prediction["total"] !== null && - !isNaN(prediction["total"]) - ) { - this.total = +parseFloat(prediction["total"]); - } else { - this.total = null; - } - if ( - prediction["unit_price"] !== undefined && - prediction["unit_price"] !== null && - !isNaN(prediction["unit_price"]) - ) { - this.unitPrice = +parseFloat(prediction["unit_price"]); - } else { - this.unitPrice = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - description: this.description ? - this.description.length <= 36 ? - cleanSpecialChars(this.description) : - cleanSpecialChars(this.description).slice(0, 33) + "..." : - "", - endDate: this.endDate ? - this.endDate.length <= 10 ? - cleanSpecialChars(this.endDate) : - cleanSpecialChars(this.endDate).slice(0, 7) + "..." : - "", - startDate: this.startDate ? - this.startDate.length <= 10 ? - cleanSpecialChars(this.startDate) : - cleanSpecialChars(this.startDate).slice(0, 7) + "..." : - "", - taxRate: this.taxRate !== undefined ? floatToString(this.taxRate) : "", - total: this.total !== undefined ? floatToString(this.total) : "", - unitPrice: this.unitPrice !== undefined ? floatToString(this.unitPrice) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Description: " + - printable.description + - ", End Date: " + - printable.endDate + - ", Start Date: " + - printable.startDate + - ", Tax Rate: " + - printable.taxRate + - ", Total: " + - printable.total + - ", Unit Price: " + - printable.unitPrice - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.description.padEnd(36) + - " | " + - printable.endDate.padEnd(10) + - " | " + - printable.startDate.padEnd(10) + - " | " + - printable.taxRate.padEnd(8) + - " | " + - printable.total.padEnd(9) + - " | " + - printable.unitPrice.padEnd(10) + - " |" - ); - } -} diff --git a/src/product/fr/energyBill/index.ts b/src/product/fr/energyBill/index.ts deleted file mode 100644 index 291672c2b..000000000 --- a/src/product/fr/energyBill/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { EnergyBillV1 } from "./energyBillV1"; diff --git a/src/product/fr/energyBill/internal.ts b/src/product/fr/energyBill/internal.ts deleted file mode 100644 index 734b7e74b..000000000 --- a/src/product/fr/energyBill/internal.ts +++ /dev/null @@ -1,8 +0,0 @@ -export { EnergyBillV1 } from "./energyBillV1"; -export { EnergyBillV1Document } from "./energyBillV1Document"; -export { EnergyBillV1EnergyConsumer } from "./energyBillV1EnergyConsumer"; -export { EnergyBillV1EnergySupplier } from "./energyBillV1EnergySupplier"; -export { EnergyBillV1EnergyUsage } from "./energyBillV1EnergyUsage"; -export { EnergyBillV1MeterDetail } from "./energyBillV1MeterDetail"; -export { EnergyBillV1Subscription } from "./energyBillV1Subscription"; -export { EnergyBillV1TaxesAndContribution } from "./energyBillV1TaxesAndContribution"; diff --git a/src/product/fr/healthCard/healthCardV1.ts b/src/product/fr/healthCard/healthCardV1.ts deleted file mode 100644 index 59c4ed200..000000000 --- a/src/product/fr/healthCard/healthCardV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { HealthCardV1Document } from "./healthCardV1Document"; - -/** - * Health Card API version 1 inference prediction. - */ -export class HealthCardV1 extends Inference { - /** The endpoint's name. */ - endpointName = "french_healthcard"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: HealthCardV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new HealthCardV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - HealthCardV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/fr/healthCard/healthCardV1Document.ts b/src/product/fr/healthCard/healthCardV1Document.ts deleted file mode 100644 index 2c50e641d..000000000 --- a/src/product/fr/healthCard/healthCardV1Document.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString, -} from "../../../parsing/common"; -import { DateField, StringField } from "../../../parsing/standard"; - -/** - * Health Card API version 1.0 document data. - */ -export class HealthCardV1Document implements Prediction { - /** The given names of the card holder. */ - givenNames: StringField[] = []; - /** The date when the carte vitale document was issued. */ - issuanceDate: DateField; - /** The social security number of the card holder. */ - socialSecurity: StringField; - /** The surname of the card holder. */ - surname: StringField; - - constructor(rawPrediction: StringDict, pageId?: number) { - rawPrediction["given_names"] && - rawPrediction["given_names"].map( - (itemPrediction: StringDict) => - this.givenNames.push( - new StringField({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.issuanceDate = new DateField({ - prediction: rawPrediction["issuance_date"], - pageId: pageId, - }); - this.socialSecurity = new StringField({ - prediction: rawPrediction["social_security"], - pageId: pageId, - }); - this.surname = new StringField({ - prediction: rawPrediction["surname"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const givenNames = this.givenNames.join("\n "); - const outStr = `:Given Name(s): ${givenNames} -:Surname: ${this.surname} -:Social Security Number: ${this.socialSecurity} -:Issuance Date: ${this.issuanceDate}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/fr/healthCard/index.ts b/src/product/fr/healthCard/index.ts deleted file mode 100644 index dd23fa4ec..000000000 --- a/src/product/fr/healthCard/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { HealthCardV1 } from "./healthCardV1"; diff --git a/src/product/fr/healthCard/internal.ts b/src/product/fr/healthCard/internal.ts deleted file mode 100644 index 22d5f9e1c..000000000 --- a/src/product/fr/healthCard/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { HealthCardV1 } from "./healthCardV1"; -export { HealthCardV1Document } from "./healthCardV1Document"; diff --git a/src/product/fr/idCard/idCardV1.ts b/src/product/fr/idCard/idCardV1.ts deleted file mode 100644 index 452e7c664..000000000 --- a/src/product/fr/idCard/idCardV1.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { IdCardV1Document } from "./idCardV1Document"; -import { IdCardV1Page } from "./idCardV1Page"; - -/** - * Carte Nationale d'Identité API version 1 inference prediction. - */ -export class IdCardV1 extends Inference { - /** The endpoint's name. */ - endpointName = "idcard_fr"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: IdCardV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new IdCardV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - IdCardV1Page, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/fr/idCard/idCardV1Document.ts b/src/product/fr/idCard/idCardV1Document.ts deleted file mode 100644 index b5c282565..000000000 --- a/src/product/fr/idCard/idCardV1Document.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString, -} from "../../../parsing/common"; -import { DateField, StringField } from "../../../parsing/standard"; - -/** - * Carte Nationale d'Identité API version 1.1 document data. - */ -export class IdCardV1Document implements Prediction { - /** The name of the issuing authority. */ - authority: StringField; - /** The date of birth of the card holder. */ - birthDate: DateField; - /** The place of birth of the card holder. */ - birthPlace: StringField; - /** The expiry date of the identification card. */ - expiryDate: DateField; - /** The gender of the card holder. */ - gender: StringField; - /** The given name(s) of the card holder. */ - givenNames: StringField[] = []; - /** The identification card number. */ - idNumber: StringField; - /** Machine Readable Zone, first line */ - mrz1: StringField; - /** Machine Readable Zone, second line */ - mrz2: StringField; - /** The surname of the card holder. */ - surname: StringField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.authority = new StringField({ - prediction: rawPrediction["authority"], - pageId: pageId, - }); - this.birthDate = new DateField({ - prediction: rawPrediction["birth_date"], - pageId: pageId, - }); - this.birthPlace = new StringField({ - prediction: rawPrediction["birth_place"], - pageId: pageId, - }); - this.expiryDate = new DateField({ - prediction: rawPrediction["expiry_date"], - pageId: pageId, - }); - this.gender = new StringField({ - prediction: rawPrediction["gender"], - pageId: pageId, - }); - rawPrediction["given_names"] && - rawPrediction["given_names"].map( - (itemPrediction: StringDict) => - this.givenNames.push( - new StringField({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.idNumber = new StringField({ - prediction: rawPrediction["id_number"], - pageId: pageId, - }); - this.mrz1 = new StringField({ - prediction: rawPrediction["mrz1"], - pageId: pageId, - }); - this.mrz2 = new StringField({ - prediction: rawPrediction["mrz2"], - pageId: pageId, - }); - this.surname = new StringField({ - prediction: rawPrediction["surname"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const givenNames = this.givenNames.join("\n "); - const outStr = `:Identity Number: ${this.idNumber} -:Given Name(s): ${givenNames} -:Surname: ${this.surname} -:Date of Birth: ${this.birthDate} -:Place of Birth: ${this.birthPlace} -:Expiry Date: ${this.expiryDate} -:Issuing Authority: ${this.authority} -:Gender: ${this.gender} -:MRZ Line 1: ${this.mrz1} -:MRZ Line 2: ${this.mrz2}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/fr/idCard/idCardV1Page.ts b/src/product/fr/idCard/idCardV1Page.ts deleted file mode 100644 index 89580313a..000000000 --- a/src/product/fr/idCard/idCardV1Page.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { StringDict, cleanOutString } from "../../../parsing/common"; -import { ClassificationField } from "../../../parsing/standard"; - -import { IdCardV1Document } from "./idCardV1Document"; - -/** - * Carte Nationale d'Identité API version 1.1 page data. - */ -export class IdCardV1Page extends IdCardV1Document { - /** The side of the document which is visible. */ - documentSide: ClassificationField; - - constructor(rawPrediction: StringDict, pageId?: number) { - super(rawPrediction, pageId); - - this.documentSide = new ClassificationField({ - prediction: rawPrediction["document_side"], - }); - } - - toString(): string { - let outStr = `:Document Side: ${this.documentSide}`.trimEnd(); - outStr += "\n" + super.toString(); - return cleanOutString(outStr); - } -} diff --git a/src/product/fr/idCard/index.ts b/src/product/fr/idCard/index.ts deleted file mode 100644 index d3798f37e..000000000 --- a/src/product/fr/idCard/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { IdCardV1 } from "./idCardV1"; -export { IdCardV2 } from "./idCardV2"; diff --git a/src/product/fr/idCard/internal.ts b/src/product/fr/idCard/internal.ts deleted file mode 100644 index d7e8ab6e9..000000000 --- a/src/product/fr/idCard/internal.ts +++ /dev/null @@ -1,6 +0,0 @@ -export { IdCardV1 } from "./idCardV1"; -export { IdCardV1Document } from "./idCardV1Document"; -export { IdCardV1Page } from "./idCardV1Page"; -export { IdCardV2 } from "./idCardV2"; -export { IdCardV2Document } from "./idCardV2Document"; -export { IdCardV2Page } from "./idCardV2Page"; diff --git a/src/product/fr/index.ts b/src/product/fr/index.ts deleted file mode 100644 index 451ec1612..000000000 --- a/src/product/fr/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -export { BankAccountDetailsV1 } from "./bankAccountDetails/bankAccountDetailsV1"; -export { BankAccountDetailsV2 } from "./bankAccountDetails/bankAccountDetailsV2"; -export { CarteGriseV1 } from "./carteGrise/carteGriseV1"; -export { EnergyBillV1 } from "./energyBill/energyBillV1"; -export { HealthCardV1 } from "./healthCard/healthCardV1"; -export { IdCardV1 } from "./idCard/idCardV1"; -export { IdCardV2 } from "./idCard/idCardV2"; -export { PayslipV2 } from "./payslip/payslipV2"; -export { PayslipV3 } from "./payslip/payslipV3"; diff --git a/src/product/fr/internal.ts b/src/product/fr/internal.ts deleted file mode 100644 index 33f2441ad..000000000 --- a/src/product/fr/internal.ts +++ /dev/null @@ -1,6 +0,0 @@ -export * as bankAccountDetails from "./bankAccountDetails/internal"; -export * as carteGrise from "./carteGrise/internal"; -export * as energyBill from "./energyBill/internal"; -export * as healthCard from "./healthCard/internal"; -export * as idCard from "./idCard/internal"; -export * as payslip from "./payslip/internal"; diff --git a/src/product/fr/payslip/index.ts b/src/product/fr/payslip/index.ts deleted file mode 100644 index 12c775ba8..000000000 --- a/src/product/fr/payslip/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { PayslipV2 } from "./payslipV2"; -export { PayslipV3 } from "./payslipV3"; diff --git a/src/product/fr/payslip/internal.ts b/src/product/fr/payslip/internal.ts deleted file mode 100644 index cefd5848e..000000000 --- a/src/product/fr/payslip/internal.ts +++ /dev/null @@ -1,20 +0,0 @@ -export { PayslipV2 } from "./payslipV2"; -export { PayslipV2BankAccountDetail } from "./payslipV2BankAccountDetail"; -export { PayslipV2Document } from "./payslipV2Document"; -export { PayslipV2Employee } from "./payslipV2Employee"; -export { PayslipV2Employer } from "./payslipV2Employer"; -export { PayslipV2Employment } from "./payslipV2Employment"; -export { PayslipV2PayDetail } from "./payslipV2PayDetail"; -export { PayslipV2PayPeriod } from "./payslipV2PayPeriod"; -export { PayslipV2Pto } from "./payslipV2Pto"; -export { PayslipV2SalaryDetail } from "./payslipV2SalaryDetail"; -export { PayslipV3 } from "./payslipV3"; -export { PayslipV3BankAccountDetail } from "./payslipV3BankAccountDetail"; -export { PayslipV3Document } from "./payslipV3Document"; -export { PayslipV3Employee } from "./payslipV3Employee"; -export { PayslipV3Employer } from "./payslipV3Employer"; -export { PayslipV3Employment } from "./payslipV3Employment"; -export { PayslipV3PaidTimeOff } from "./payslipV3PaidTimeOff"; -export { PayslipV3PayDetail } from "./payslipV3PayDetail"; -export { PayslipV3PayPeriod } from "./payslipV3PayPeriod"; -export { PayslipV3SalaryDetail } from "./payslipV3SalaryDetail"; diff --git a/src/product/fr/payslip/payslipV2.ts b/src/product/fr/payslip/payslipV2.ts deleted file mode 100644 index cb1e12ea5..000000000 --- a/src/product/fr/payslip/payslipV2.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { PayslipV2Document } from "./payslipV2Document"; - -/** - * Payslip API version 2 inference prediction. - */ -export class PayslipV2 extends Inference { - /** The endpoint's name. */ - endpointName = "payslip_fra"; - /** The endpoint's version. */ - endpointVersion = "2"; - /** The document-level prediction. */ - prediction: PayslipV2Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new PayslipV2Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - PayslipV2Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/fr/payslip/payslipV2BankAccountDetail.ts b/src/product/fr/payslip/payslipV2BankAccountDetail.ts deleted file mode 100644 index 033f531be..000000000 --- a/src/product/fr/payslip/payslipV2BankAccountDetail.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employee's bank account. - */ -export class PayslipV2BankAccountDetail { - /** The name of the bank. */ - bankName: string | null; - /** The IBAN of the bank account. */ - iban: string | null; - /** The SWIFT code of the bank. */ - swift: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.bankName = prediction["bank_name"]; - this.iban = prediction["iban"]; - this.swift = prediction["swift"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - bankName: this.bankName ?? "", - iban: this.iban ?? "", - swift: this.swift ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Bank Name: " + - printable.bankName + - ", IBAN: " + - printable.iban + - ", SWIFT: " + - printable.swift - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Bank Name: ${printable.bankName} - :IBAN: ${printable.iban} - :SWIFT: ${printable.swift}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2Document.ts b/src/product/fr/payslip/payslipV2Document.ts deleted file mode 100644 index 83126ef04..000000000 --- a/src/product/fr/payslip/payslipV2Document.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../../parsing/common"; -import { PayslipV2Employee } from "./payslipV2Employee"; -import { PayslipV2Employer } from "./payslipV2Employer"; -import { PayslipV2BankAccountDetail } from "./payslipV2BankAccountDetail"; -import { PayslipV2Employment } from "./payslipV2Employment"; -import { PayslipV2SalaryDetail } from "./payslipV2SalaryDetail"; -import { PayslipV2PayDetail } from "./payslipV2PayDetail"; -import { PayslipV2Pto } from "./payslipV2Pto"; -import { PayslipV2PayPeriod } from "./payslipV2PayPeriod"; - - -/** - * Payslip API version 2.0 document data. - */ -export class PayslipV2Document implements Prediction { - /** Information about the employee's bank account. */ - bankAccountDetails: PayslipV2BankAccountDetail; - /** Information about the employee. */ - employee: PayslipV2Employee; - /** Information about the employer. */ - employer: PayslipV2Employer; - /** Information about the employment. */ - employment: PayslipV2Employment; - /** Detailed information about the pay. */ - payDetail: PayslipV2PayDetail; - /** Information about the pay period. */ - payPeriod: PayslipV2PayPeriod; - /** Information about paid time off. */ - pto: PayslipV2Pto; - /** Detailed information about the earnings. */ - salaryDetails: PayslipV2SalaryDetail[] = []; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.bankAccountDetails = new PayslipV2BankAccountDetail({ - prediction: rawPrediction["bank_account_details"], - pageId: pageId, - }); - this.employee = new PayslipV2Employee({ - prediction: rawPrediction["employee"], - pageId: pageId, - }); - this.employer = new PayslipV2Employer({ - prediction: rawPrediction["employer"], - pageId: pageId, - }); - this.employment = new PayslipV2Employment({ - prediction: rawPrediction["employment"], - pageId: pageId, - }); - this.payDetail = new PayslipV2PayDetail({ - prediction: rawPrediction["pay_detail"], - pageId: pageId, - }); - this.payPeriod = new PayslipV2PayPeriod({ - prediction: rawPrediction["pay_period"], - pageId: pageId, - }); - this.pto = new PayslipV2Pto({ - prediction: rawPrediction["pto"], - pageId: pageId, - }); - rawPrediction["salary_details"] && - rawPrediction["salary_details"].map( - (itemPrediction: StringDict) => - this.salaryDetails.push( - new PayslipV2SalaryDetail({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - } - - /** - * Default string representation. - */ - toString(): string { - let salaryDetailsSummary:string = ""; - if (this.salaryDetails && this.salaryDetails.length > 0) { - const salaryDetailsColSizes:number[] = [14, 11, 38, 11]; - salaryDetailsSummary += "\n" + lineSeparator(salaryDetailsColSizes, "-") + "\n "; - salaryDetailsSummary += "| Amount "; - salaryDetailsSummary += "| Base "; - salaryDetailsSummary += "| Description "; - salaryDetailsSummary += "| Rate "; - salaryDetailsSummary += "|\n" + lineSeparator(salaryDetailsColSizes, "="); - salaryDetailsSummary += this.salaryDetails.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(salaryDetailsColSizes, "-") - ).join(""); - } - const outStr = `:Employee: ${this.employee.toFieldList()} -:Employer: ${this.employer.toFieldList()} -:Bank Account Details: ${this.bankAccountDetails.toFieldList()} -:Employment: ${this.employment.toFieldList()} -:Salary Details: ${salaryDetailsSummary} -:Pay Detail: ${this.payDetail.toFieldList()} -:PTO: ${this.pto.toFieldList()} -:Pay Period: ${this.payPeriod.toFieldList()}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/fr/payslip/payslipV2Employee.ts b/src/product/fr/payslip/payslipV2Employee.ts deleted file mode 100644 index 3c0faf613..000000000 --- a/src/product/fr/payslip/payslipV2Employee.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employee. - */ -export class PayslipV2Employee { - /** The address of the employee. */ - address: string | null; - /** The date of birth of the employee. */ - dateOfBirth: string | null; - /** The first name of the employee. */ - firstName: string | null; - /** The last name of the employee. */ - lastName: string | null; - /** The phone number of the employee. */ - phoneNumber: string | null; - /** The registration number of the employee. */ - registrationNumber: string | null; - /** The social security number of the employee. */ - socialSecurityNumber: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.dateOfBirth = prediction["date_of_birth"]; - this.firstName = prediction["first_name"]; - this.lastName = prediction["last_name"]; - this.phoneNumber = prediction["phone_number"]; - this.registrationNumber = prediction["registration_number"]; - this.socialSecurityNumber = prediction["social_security_number"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - dateOfBirth: this.dateOfBirth ?? "", - firstName: this.firstName ?? "", - lastName: this.lastName ?? "", - phoneNumber: this.phoneNumber ?? "", - registrationNumber: this.registrationNumber ?? "", - socialSecurityNumber: this.socialSecurityNumber ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Date of Birth: " + - printable.dateOfBirth + - ", First Name: " + - printable.firstName + - ", Last Name: " + - printable.lastName + - ", Phone Number: " + - printable.phoneNumber + - ", Registration Number: " + - printable.registrationNumber + - ", Social Security Number: " + - printable.socialSecurityNumber - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Date of Birth: ${printable.dateOfBirth} - :First Name: ${printable.firstName} - :Last Name: ${printable.lastName} - :Phone Number: ${printable.phoneNumber} - :Registration Number: ${printable.registrationNumber} - :Social Security Number: ${printable.socialSecurityNumber}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2Employer.ts b/src/product/fr/payslip/payslipV2Employer.ts deleted file mode 100644 index a499595b8..000000000 --- a/src/product/fr/payslip/payslipV2Employer.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employer. - */ -export class PayslipV2Employer { - /** The address of the employer. */ - address: string | null; - /** The company ID of the employer. */ - companyId: string | null; - /** The site of the company. */ - companySite: string | null; - /** The NAF code of the employer. */ - nafCode: string | null; - /** The name of the employer. */ - name: string | null; - /** The phone number of the employer. */ - phoneNumber: string | null; - /** The URSSAF number of the employer. */ - urssafNumber: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.companyId = prediction["company_id"]; - this.companySite = prediction["company_site"]; - this.nafCode = prediction["naf_code"]; - this.name = prediction["name"]; - this.phoneNumber = prediction["phone_number"]; - this.urssafNumber = prediction["urssaf_number"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - companyId: this.companyId ?? "", - companySite: this.companySite ?? "", - nafCode: this.nafCode ?? "", - name: this.name ?? "", - phoneNumber: this.phoneNumber ?? "", - urssafNumber: this.urssafNumber ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Company ID: " + - printable.companyId + - ", Company Site: " + - printable.companySite + - ", NAF Code: " + - printable.nafCode + - ", Name: " + - printable.name + - ", Phone Number: " + - printable.phoneNumber + - ", URSSAF Number: " + - printable.urssafNumber - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Company ID: ${printable.companyId} - :Company Site: ${printable.companySite} - :NAF Code: ${printable.nafCode} - :Name: ${printable.name} - :Phone Number: ${printable.phoneNumber} - :URSSAF Number: ${printable.urssafNumber}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2Employment.ts b/src/product/fr/payslip/payslipV2Employment.ts deleted file mode 100644 index 14dacdc32..000000000 --- a/src/product/fr/payslip/payslipV2Employment.ts +++ /dev/null @@ -1,103 +0,0 @@ - -import { floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employment. - */ -export class PayslipV2Employment { - /** The category of the employment. */ - category: string | null; - /** The coefficient of the employment. */ - coefficient: number | null; - /** The collective agreement of the employment. */ - collectiveAgreement: string | null; - /** The job title of the employee. */ - jobTitle: string | null; - /** The position level of the employment. */ - positionLevel: string | null; - /** The start date of the employment. */ - startDate: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.category = prediction["category"]; - if ( - prediction["coefficient"] !== undefined && - prediction["coefficient"] !== null && - !isNaN(prediction["coefficient"]) - ) { - this.coefficient = +parseFloat(prediction["coefficient"]); - } else { - this.coefficient = null; - } - this.collectiveAgreement = prediction["collective_agreement"]; - this.jobTitle = prediction["job_title"]; - this.positionLevel = prediction["position_level"]; - this.startDate = prediction["start_date"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - category: this.category ?? "", - coefficient: - this.coefficient !== undefined ? floatToString(this.coefficient) : "", - collectiveAgreement: this.collectiveAgreement ?? "", - jobTitle: this.jobTitle ?? "", - positionLevel: this.positionLevel ?? "", - startDate: this.startDate ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Category: " + - printable.category + - ", Coefficient: " + - printable.coefficient + - ", Collective Agreement: " + - printable.collectiveAgreement + - ", Job Title: " + - printable.jobTitle + - ", Position Level: " + - printable.positionLevel + - ", Start Date: " + - printable.startDate - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Category: ${printable.category} - :Coefficient: ${printable.coefficient} - :Collective Agreement: ${printable.collectiveAgreement} - :Job Title: ${printable.jobTitle} - :Position Level: ${printable.positionLevel} - :Start Date: ${printable.startDate}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2PayDetail.ts b/src/product/fr/payslip/payslipV2PayDetail.ts deleted file mode 100644 index c5486b945..000000000 --- a/src/product/fr/payslip/payslipV2PayDetail.ts +++ /dev/null @@ -1,211 +0,0 @@ - -import { floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Detailed information about the pay. - */ -export class PayslipV2PayDetail { - /** The gross salary of the employee. */ - grossSalary: number | null; - /** The year-to-date gross salary of the employee. */ - grossSalaryYtd: number | null; - /** The income tax rate of the employee. */ - incomeTaxRate: number | null; - /** The income tax withheld from the employee's pay. */ - incomeTaxWithheld: number | null; - /** The net paid amount of the employee. */ - netPaid: number | null; - /** The net paid amount before tax of the employee. */ - netPaidBeforeTax: number | null; - /** The net taxable amount of the employee. */ - netTaxable: number | null; - /** The year-to-date net taxable amount of the employee. */ - netTaxableYtd: number | null; - /** The total cost to the employer. */ - totalCostEmployer: number | null; - /** The total taxes and deductions of the employee. */ - totalTaxesAndDeductions: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["gross_salary"] !== undefined && - prediction["gross_salary"] !== null && - !isNaN(prediction["gross_salary"]) - ) { - this.grossSalary = +parseFloat(prediction["gross_salary"]); - } else { - this.grossSalary = null; - } - if ( - prediction["gross_salary_ytd"] !== undefined && - prediction["gross_salary_ytd"] !== null && - !isNaN(prediction["gross_salary_ytd"]) - ) { - this.grossSalaryYtd = +parseFloat(prediction["gross_salary_ytd"]); - } else { - this.grossSalaryYtd = null; - } - if ( - prediction["income_tax_rate"] !== undefined && - prediction["income_tax_rate"] !== null && - !isNaN(prediction["income_tax_rate"]) - ) { - this.incomeTaxRate = +parseFloat(prediction["income_tax_rate"]); - } else { - this.incomeTaxRate = null; - } - if ( - prediction["income_tax_withheld"] !== undefined && - prediction["income_tax_withheld"] !== null && - !isNaN(prediction["income_tax_withheld"]) - ) { - this.incomeTaxWithheld = +parseFloat(prediction["income_tax_withheld"]); - } else { - this.incomeTaxWithheld = null; - } - if ( - prediction["net_paid"] !== undefined && - prediction["net_paid"] !== null && - !isNaN(prediction["net_paid"]) - ) { - this.netPaid = +parseFloat(prediction["net_paid"]); - } else { - this.netPaid = null; - } - if ( - prediction["net_paid_before_tax"] !== undefined && - prediction["net_paid_before_tax"] !== null && - !isNaN(prediction["net_paid_before_tax"]) - ) { - this.netPaidBeforeTax = +parseFloat(prediction["net_paid_before_tax"]); - } else { - this.netPaidBeforeTax = null; - } - if ( - prediction["net_taxable"] !== undefined && - prediction["net_taxable"] !== null && - !isNaN(prediction["net_taxable"]) - ) { - this.netTaxable = +parseFloat(prediction["net_taxable"]); - } else { - this.netTaxable = null; - } - if ( - prediction["net_taxable_ytd"] !== undefined && - prediction["net_taxable_ytd"] !== null && - !isNaN(prediction["net_taxable_ytd"]) - ) { - this.netTaxableYtd = +parseFloat(prediction["net_taxable_ytd"]); - } else { - this.netTaxableYtd = null; - } - if ( - prediction["total_cost_employer"] !== undefined && - prediction["total_cost_employer"] !== null && - !isNaN(prediction["total_cost_employer"]) - ) { - this.totalCostEmployer = +parseFloat(prediction["total_cost_employer"]); - } else { - this.totalCostEmployer = null; - } - if ( - prediction["total_taxes_and_deductions"] !== undefined && - prediction["total_taxes_and_deductions"] !== null && - !isNaN(prediction["total_taxes_and_deductions"]) - ) { - this.totalTaxesAndDeductions = +parseFloat(prediction["total_taxes_and_deductions"]); - } else { - this.totalTaxesAndDeductions = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - grossSalary: - this.grossSalary !== undefined ? floatToString(this.grossSalary) : "", - grossSalaryYtd: - this.grossSalaryYtd !== undefined ? floatToString(this.grossSalaryYtd) : "", - incomeTaxRate: - this.incomeTaxRate !== undefined ? floatToString(this.incomeTaxRate) : "", - incomeTaxWithheld: - this.incomeTaxWithheld !== undefined ? floatToString(this.incomeTaxWithheld) : "", - netPaid: this.netPaid !== undefined ? floatToString(this.netPaid) : "", - netPaidBeforeTax: - this.netPaidBeforeTax !== undefined ? floatToString(this.netPaidBeforeTax) : "", - netTaxable: - this.netTaxable !== undefined ? floatToString(this.netTaxable) : "", - netTaxableYtd: - this.netTaxableYtd !== undefined ? floatToString(this.netTaxableYtd) : "", - totalCostEmployer: - this.totalCostEmployer !== undefined ? floatToString(this.totalCostEmployer) : "", - totalTaxesAndDeductions: - this.totalTaxesAndDeductions !== undefined ? floatToString(this.totalTaxesAndDeductions) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Gross Salary: " + - printable.grossSalary + - ", Gross Salary YTD: " + - printable.grossSalaryYtd + - ", Income Tax Rate: " + - printable.incomeTaxRate + - ", Income Tax Withheld: " + - printable.incomeTaxWithheld + - ", Net Paid: " + - printable.netPaid + - ", Net Paid Before Tax: " + - printable.netPaidBeforeTax + - ", Net Taxable: " + - printable.netTaxable + - ", Net Taxable YTD: " + - printable.netTaxableYtd + - ", Total Cost Employer: " + - printable.totalCostEmployer + - ", Total Taxes and Deductions: " + - printable.totalTaxesAndDeductions - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Gross Salary: ${printable.grossSalary} - :Gross Salary YTD: ${printable.grossSalaryYtd} - :Income Tax Rate: ${printable.incomeTaxRate} - :Income Tax Withheld: ${printable.incomeTaxWithheld} - :Net Paid: ${printable.netPaid} - :Net Paid Before Tax: ${printable.netPaidBeforeTax} - :Net Taxable: ${printable.netTaxable} - :Net Taxable YTD: ${printable.netTaxableYtd} - :Total Cost Employer: ${printable.totalCostEmployer} - :Total Taxes and Deductions: ${printable.totalTaxesAndDeductions}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2PayPeriod.ts b/src/product/fr/payslip/payslipV2PayPeriod.ts deleted file mode 100644 index a1d82acff..000000000 --- a/src/product/fr/payslip/payslipV2PayPeriod.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the pay period. - */ -export class PayslipV2PayPeriod { - /** The end date of the pay period. */ - endDate: string | null; - /** The month of the pay period. */ - month: string | null; - /** The date of payment for the pay period. */ - paymentDate: string | null; - /** The start date of the pay period. */ - startDate: string | null; - /** The year of the pay period. */ - year: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.endDate = prediction["end_date"]; - this.month = prediction["month"]; - this.paymentDate = prediction["payment_date"]; - this.startDate = prediction["start_date"]; - this.year = prediction["year"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - endDate: this.endDate ?? "", - month: this.month ?? "", - paymentDate: this.paymentDate ?? "", - startDate: this.startDate ?? "", - year: this.year ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "End Date: " + - printable.endDate + - ", Month: " + - printable.month + - ", Payment Date: " + - printable.paymentDate + - ", Start Date: " + - printable.startDate + - ", Year: " + - printable.year - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :End Date: ${printable.endDate} - :Month: ${printable.month} - :Payment Date: ${printable.paymentDate} - :Start Date: ${printable.startDate} - :Year: ${printable.year}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2Pto.ts b/src/product/fr/payslip/payslipV2Pto.ts deleted file mode 100644 index bd604f977..000000000 --- a/src/product/fr/payslip/payslipV2Pto.ts +++ /dev/null @@ -1,100 +0,0 @@ - -import { floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about paid time off. - */ -export class PayslipV2Pto { - /** The amount of paid time off accrued in this period. */ - accruedThisPeriod: number | null; - /** The balance of paid time off at the end of the period. */ - balanceEndOfPeriod: number | null; - /** The amount of paid time off used in this period. */ - usedThisPeriod: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["accrued_this_period"] !== undefined && - prediction["accrued_this_period"] !== null && - !isNaN(prediction["accrued_this_period"]) - ) { - this.accruedThisPeriod = +parseFloat(prediction["accrued_this_period"]); - } else { - this.accruedThisPeriod = null; - } - if ( - prediction["balance_end_of_period"] !== undefined && - prediction["balance_end_of_period"] !== null && - !isNaN(prediction["balance_end_of_period"]) - ) { - this.balanceEndOfPeriod = +parseFloat(prediction["balance_end_of_period"]); - } else { - this.balanceEndOfPeriod = null; - } - if ( - prediction["used_this_period"] !== undefined && - prediction["used_this_period"] !== null && - !isNaN(prediction["used_this_period"]) - ) { - this.usedThisPeriod = +parseFloat(prediction["used_this_period"]); - } else { - this.usedThisPeriod = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - accruedThisPeriod: - this.accruedThisPeriod !== undefined ? floatToString(this.accruedThisPeriod) : "", - balanceEndOfPeriod: - this.balanceEndOfPeriod !== undefined ? floatToString(this.balanceEndOfPeriod) : "", - usedThisPeriod: - this.usedThisPeriod !== undefined ? floatToString(this.usedThisPeriod) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Accrued This Period: " + - printable.accruedThisPeriod + - ", Balance End of Period: " + - printable.balanceEndOfPeriod + - ", Used This Period: " + - printable.usedThisPeriod - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Accrued This Period: ${printable.accruedThisPeriod} - :Balance End of Period: ${printable.balanceEndOfPeriod} - :Used This Period: ${printable.usedThisPeriod}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV2SalaryDetail.ts b/src/product/fr/payslip/payslipV2SalaryDetail.ts deleted file mode 100644 index 630bca80b..000000000 --- a/src/product/fr/payslip/payslipV2SalaryDetail.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Detailed information about the earnings. - */ -export class PayslipV2SalaryDetail { - /** The amount of the earnings. */ - amount: number | null; - /** The base value of the earnings. */ - base: number | null; - /** The description of the earnings. */ - description: string | null; - /** The rate of the earnings. */ - rate: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["amount"] !== undefined && - prediction["amount"] !== null && - !isNaN(prediction["amount"]) - ) { - this.amount = +parseFloat(prediction["amount"]); - } else { - this.amount = null; - } - if ( - prediction["base"] !== undefined && - prediction["base"] !== null && - !isNaN(prediction["base"]) - ) { - this.base = +parseFloat(prediction["base"]); - } else { - this.base = null; - } - this.description = prediction["description"]; - if ( - prediction["rate"] !== undefined && - prediction["rate"] !== null && - !isNaN(prediction["rate"]) - ) { - this.rate = +parseFloat(prediction["rate"]); - } else { - this.rate = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - amount: this.amount !== undefined ? floatToString(this.amount) : "", - base: this.base !== undefined ? floatToString(this.base) : "", - description: this.description ? - this.description.length <= 36 ? - cleanSpecialChars(this.description) : - cleanSpecialChars(this.description).slice(0, 33) + "..." : - "", - rate: this.rate !== undefined ? floatToString(this.rate) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Amount: " + - printable.amount + - ", Base: " + - printable.base + - ", Description: " + - printable.description + - ", Rate: " + - printable.rate - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.amount.padEnd(12) + - " | " + - printable.base.padEnd(9) + - " | " + - printable.description.padEnd(36) + - " | " + - printable.rate.padEnd(9) + - " |" - ); - } -} diff --git a/src/product/fr/payslip/payslipV3.ts b/src/product/fr/payslip/payslipV3.ts deleted file mode 100644 index 17f317b63..000000000 --- a/src/product/fr/payslip/payslipV3.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { PayslipV3Document } from "./payslipV3Document"; - -/** - * Payslip API version 3 inference prediction. - */ -export class PayslipV3 extends Inference { - /** The endpoint's name. */ - endpointName = "payslip_fra"; - /** The endpoint's version. */ - endpointVersion = "3"; - /** The document-level prediction. */ - prediction: PayslipV3Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new PayslipV3Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - PayslipV3Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/fr/payslip/payslipV3BankAccountDetail.ts b/src/product/fr/payslip/payslipV3BankAccountDetail.ts deleted file mode 100644 index 3ec0f2b1c..000000000 --- a/src/product/fr/payslip/payslipV3BankAccountDetail.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employee's bank account. - */ -export class PayslipV3BankAccountDetail { - /** The name of the bank. */ - bankName: string | null; - /** The IBAN of the bank account. */ - iban: string | null; - /** The SWIFT code of the bank. */ - swift: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.bankName = prediction["bank_name"]; - this.iban = prediction["iban"]; - this.swift = prediction["swift"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - bankName: this.bankName ?? "", - iban: this.iban ?? "", - swift: this.swift ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Bank Name: " + - printable.bankName + - ", IBAN: " + - printable.iban + - ", SWIFT: " + - printable.swift - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Bank Name: ${printable.bankName} - :IBAN: ${printable.iban} - :SWIFT: ${printable.swift}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV3Document.ts b/src/product/fr/payslip/payslipV3Document.ts deleted file mode 100644 index 671da64fe..000000000 --- a/src/product/fr/payslip/payslipV3Document.ts +++ /dev/null @@ -1,128 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../../parsing/common"; -import { PayslipV3PayPeriod } from "./payslipV3PayPeriod"; -import { PayslipV3Employee } from "./payslipV3Employee"; -import { PayslipV3Employer } from "./payslipV3Employer"; -import { PayslipV3BankAccountDetail } from "./payslipV3BankAccountDetail"; -import { PayslipV3Employment } from "./payslipV3Employment"; -import { PayslipV3SalaryDetail } from "./payslipV3SalaryDetail"; -import { PayslipV3PayDetail } from "./payslipV3PayDetail"; -import { PayslipV3PaidTimeOff } from "./payslipV3PaidTimeOff"; - - -/** - * Payslip API version 3.0 document data. - */ -export class PayslipV3Document implements Prediction { - /** Information about the employee's bank account. */ - bankAccountDetails: PayslipV3BankAccountDetail; - /** Information about the employee. */ - employee: PayslipV3Employee; - /** Information about the employer. */ - employer: PayslipV3Employer; - /** Information about the employment. */ - employment: PayslipV3Employment; - /** Information about paid time off. */ - paidTimeOff: PayslipV3PaidTimeOff[] = []; - /** Detailed information about the pay. */ - payDetail: PayslipV3PayDetail; - /** Information about the pay period. */ - payPeriod: PayslipV3PayPeriod; - /** Detailed information about the earnings. */ - salaryDetails: PayslipV3SalaryDetail[] = []; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.bankAccountDetails = new PayslipV3BankAccountDetail({ - prediction: rawPrediction["bank_account_details"], - pageId: pageId, - }); - this.employee = new PayslipV3Employee({ - prediction: rawPrediction["employee"], - pageId: pageId, - }); - this.employer = new PayslipV3Employer({ - prediction: rawPrediction["employer"], - pageId: pageId, - }); - this.employment = new PayslipV3Employment({ - prediction: rawPrediction["employment"], - pageId: pageId, - }); - rawPrediction["paid_time_off"] && - rawPrediction["paid_time_off"].map( - (itemPrediction: StringDict) => - this.paidTimeOff.push( - new PayslipV3PaidTimeOff({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.payDetail = new PayslipV3PayDetail({ - prediction: rawPrediction["pay_detail"], - pageId: pageId, - }); - this.payPeriod = new PayslipV3PayPeriod({ - prediction: rawPrediction["pay_period"], - pageId: pageId, - }); - rawPrediction["salary_details"] && - rawPrediction["salary_details"].map( - (itemPrediction: StringDict) => - this.salaryDetails.push( - new PayslipV3SalaryDetail({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - } - - /** - * Default string representation. - */ - toString(): string { - let salaryDetailsSummary:string = ""; - if (this.salaryDetails && this.salaryDetails.length > 0) { - const salaryDetailsColSizes:number[] = [14, 11, 38, 8, 11]; - salaryDetailsSummary += "\n" + lineSeparator(salaryDetailsColSizes, "-") + "\n "; - salaryDetailsSummary += "| Amount "; - salaryDetailsSummary += "| Base "; - salaryDetailsSummary += "| Description "; - salaryDetailsSummary += "| Number "; - salaryDetailsSummary += "| Rate "; - salaryDetailsSummary += "|\n" + lineSeparator(salaryDetailsColSizes, "="); - salaryDetailsSummary += this.salaryDetails.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(salaryDetailsColSizes, "-") - ).join(""); - } - let paidTimeOffSummary:string = ""; - if (this.paidTimeOff && this.paidTimeOff.length > 0) { - const paidTimeOffColSizes:number[] = [11, 8, 13, 11, 11]; - paidTimeOffSummary += "\n" + lineSeparator(paidTimeOffColSizes, "-") + "\n "; - paidTimeOffSummary += "| Accrued "; - paidTimeOffSummary += "| Period "; - paidTimeOffSummary += "| Type "; - paidTimeOffSummary += "| Remaining "; - paidTimeOffSummary += "| Used "; - paidTimeOffSummary += "|\n" + lineSeparator(paidTimeOffColSizes, "="); - paidTimeOffSummary += this.paidTimeOff.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(paidTimeOffColSizes, "-") - ).join(""); - } - const outStr = `:Pay Period: ${this.payPeriod.toFieldList()} -:Employee: ${this.employee.toFieldList()} -:Employer: ${this.employer.toFieldList()} -:Bank Account Details: ${this.bankAccountDetails.toFieldList()} -:Employment: ${this.employment.toFieldList()} -:Salary Details: ${salaryDetailsSummary} -:Pay Detail: ${this.payDetail.toFieldList()} -:Paid Time Off: ${paidTimeOffSummary}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/fr/payslip/payslipV3Employee.ts b/src/product/fr/payslip/payslipV3Employee.ts deleted file mode 100644 index b93f3c7ab..000000000 --- a/src/product/fr/payslip/payslipV3Employee.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employee. - */ -export class PayslipV3Employee { - /** The address of the employee. */ - address: string | null; - /** The date of birth of the employee. */ - dateOfBirth: string | null; - /** The first name of the employee. */ - firstName: string | null; - /** The last name of the employee. */ - lastName: string | null; - /** The phone number of the employee. */ - phoneNumber: string | null; - /** The registration number of the employee. */ - registrationNumber: string | null; - /** The social security number of the employee. */ - socialSecurityNumber: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.dateOfBirth = prediction["date_of_birth"]; - this.firstName = prediction["first_name"]; - this.lastName = prediction["last_name"]; - this.phoneNumber = prediction["phone_number"]; - this.registrationNumber = prediction["registration_number"]; - this.socialSecurityNumber = prediction["social_security_number"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - dateOfBirth: this.dateOfBirth ?? "", - firstName: this.firstName ?? "", - lastName: this.lastName ?? "", - phoneNumber: this.phoneNumber ?? "", - registrationNumber: this.registrationNumber ?? "", - socialSecurityNumber: this.socialSecurityNumber ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Date of Birth: " + - printable.dateOfBirth + - ", First Name: " + - printable.firstName + - ", Last Name: " + - printable.lastName + - ", Phone Number: " + - printable.phoneNumber + - ", Registration Number: " + - printable.registrationNumber + - ", Social Security Number: " + - printable.socialSecurityNumber - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Date of Birth: ${printable.dateOfBirth} - :First Name: ${printable.firstName} - :Last Name: ${printable.lastName} - :Phone Number: ${printable.phoneNumber} - :Registration Number: ${printable.registrationNumber} - :Social Security Number: ${printable.socialSecurityNumber}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV3Employer.ts b/src/product/fr/payslip/payslipV3Employer.ts deleted file mode 100644 index 118dc30c7..000000000 --- a/src/product/fr/payslip/payslipV3Employer.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employer. - */ -export class PayslipV3Employer { - /** The address of the employer. */ - address: string | null; - /** The company ID of the employer. */ - companyId: string | null; - /** The site of the company. */ - companySite: string | null; - /** The NAF code of the employer. */ - nafCode: string | null; - /** The name of the employer. */ - name: string | null; - /** The phone number of the employer. */ - phoneNumber: string | null; - /** The URSSAF number of the employer. */ - urssafNumber: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.address = prediction["address"]; - this.companyId = prediction["company_id"]; - this.companySite = prediction["company_site"]; - this.nafCode = prediction["naf_code"]; - this.name = prediction["name"]; - this.phoneNumber = prediction["phone_number"]; - this.urssafNumber = prediction["urssaf_number"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - address: this.address ?? "", - companyId: this.companyId ?? "", - companySite: this.companySite ?? "", - nafCode: this.nafCode ?? "", - name: this.name ?? "", - phoneNumber: this.phoneNumber ?? "", - urssafNumber: this.urssafNumber ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Address: " + - printable.address + - ", Company ID: " + - printable.companyId + - ", Company Site: " + - printable.companySite + - ", NAF Code: " + - printable.nafCode + - ", Name: " + - printable.name + - ", Phone Number: " + - printable.phoneNumber + - ", URSSAF Number: " + - printable.urssafNumber - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Address: ${printable.address} - :Company ID: ${printable.companyId} - :Company Site: ${printable.companySite} - :NAF Code: ${printable.nafCode} - :Name: ${printable.name} - :Phone Number: ${printable.phoneNumber} - :URSSAF Number: ${printable.urssafNumber}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV3Employment.ts b/src/product/fr/payslip/payslipV3Employment.ts deleted file mode 100644 index 7b2bf817e..000000000 --- a/src/product/fr/payslip/payslipV3Employment.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the employment. - */ -export class PayslipV3Employment { - /** The category of the employment. */ - category: string | null; - /** The coefficient of the employment. */ - coefficient: string | null; - /** The collective agreement of the employment. */ - collectiveAgreement: string | null; - /** The job title of the employee. */ - jobTitle: string | null; - /** The position level of the employment. */ - positionLevel: string | null; - /** The seniority date of the employment. */ - seniorityDate: string | null; - /** The start date of the employment. */ - startDate: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.category = prediction["category"]; - this.coefficient = prediction["coefficient"]; - this.collectiveAgreement = prediction["collective_agreement"]; - this.jobTitle = prediction["job_title"]; - this.positionLevel = prediction["position_level"]; - this.seniorityDate = prediction["seniority_date"]; - this.startDate = prediction["start_date"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - category: this.category ?? "", - coefficient: this.coefficient ?? "", - collectiveAgreement: this.collectiveAgreement ?? "", - jobTitle: this.jobTitle ?? "", - positionLevel: this.positionLevel ?? "", - seniorityDate: this.seniorityDate ?? "", - startDate: this.startDate ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Category: " + - printable.category + - ", Coefficient: " + - printable.coefficient + - ", Collective Agreement: " + - printable.collectiveAgreement + - ", Job Title: " + - printable.jobTitle + - ", Position Level: " + - printable.positionLevel + - ", Seniority Date: " + - printable.seniorityDate + - ", Start Date: " + - printable.startDate - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Category: ${printable.category} - :Coefficient: ${printable.coefficient} - :Collective Agreement: ${printable.collectiveAgreement} - :Job Title: ${printable.jobTitle} - :Position Level: ${printable.positionLevel} - :Seniority Date: ${printable.seniorityDate} - :Start Date: ${printable.startDate}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV3PaidTimeOff.ts b/src/product/fr/payslip/payslipV3PaidTimeOff.ts deleted file mode 100644 index c8fd1bc28..000000000 --- a/src/product/fr/payslip/payslipV3PaidTimeOff.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about paid time off. - */ -export class PayslipV3PaidTimeOff { - /** The amount of paid time off accrued in the period. */ - accrued: number | null; - /** The paid time off period. */ - period: string | null; - /** The type of paid time off. */ - ptoType: string | null; - /** The remaining amount of paid time off at the end of the period. */ - remaining: number | null; - /** The amount of paid time off used in the period. */ - used: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["accrued"] !== undefined && - prediction["accrued"] !== null && - !isNaN(prediction["accrued"]) - ) { - this.accrued = +parseFloat(prediction["accrued"]); - } else { - this.accrued = null; - } - this.period = prediction["period"]; - this.ptoType = prediction["pto_type"]; - if ( - prediction["remaining"] !== undefined && - prediction["remaining"] !== null && - !isNaN(prediction["remaining"]) - ) { - this.remaining = +parseFloat(prediction["remaining"]); - } else { - this.remaining = null; - } - if ( - prediction["used"] !== undefined && - prediction["used"] !== null && - !isNaN(prediction["used"]) - ) { - this.used = +parseFloat(prediction["used"]); - } else { - this.used = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - accrued: this.accrued !== undefined ? floatToString(this.accrued) : "", - period: this.period ? - this.period.length <= 6 ? - cleanSpecialChars(this.period) : - cleanSpecialChars(this.period).slice(0, 3) + "..." : - "", - ptoType: this.ptoType ? - this.ptoType.length <= 11 ? - cleanSpecialChars(this.ptoType) : - cleanSpecialChars(this.ptoType).slice(0, 8) + "..." : - "", - remaining: this.remaining !== undefined ? floatToString(this.remaining) : "", - used: this.used !== undefined ? floatToString(this.used) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Accrued: " + - printable.accrued + - ", Period: " + - printable.period + - ", Type: " + - printable.ptoType + - ", Remaining: " + - printable.remaining + - ", Used: " + - printable.used - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.accrued.padEnd(9) + - " | " + - printable.period.padEnd(6) + - " | " + - printable.ptoType.padEnd(11) + - " | " + - printable.remaining.padEnd(9) + - " | " + - printable.used.padEnd(9) + - " |" - ); - } -} diff --git a/src/product/fr/payslip/payslipV3PayDetail.ts b/src/product/fr/payslip/payslipV3PayDetail.ts deleted file mode 100644 index 1dfb45f86..000000000 --- a/src/product/fr/payslip/payslipV3PayDetail.ts +++ /dev/null @@ -1,211 +0,0 @@ - -import { floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Detailed information about the pay. - */ -export class PayslipV3PayDetail { - /** The gross salary of the employee. */ - grossSalary: number | null; - /** The year-to-date gross salary of the employee. */ - grossSalaryYtd: number | null; - /** The income tax rate of the employee. */ - incomeTaxRate: number | null; - /** The income tax withheld from the employee's pay. */ - incomeTaxWithheld: number | null; - /** The net paid amount of the employee. */ - netPaid: number | null; - /** The net paid amount before tax of the employee. */ - netPaidBeforeTax: number | null; - /** The net taxable amount of the employee. */ - netTaxable: number | null; - /** The year-to-date net taxable amount of the employee. */ - netTaxableYtd: number | null; - /** The total cost to the employer. */ - totalCostEmployer: number | null; - /** The total taxes and deductions of the employee. */ - totalTaxesAndDeductions: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["gross_salary"] !== undefined && - prediction["gross_salary"] !== null && - !isNaN(prediction["gross_salary"]) - ) { - this.grossSalary = +parseFloat(prediction["gross_salary"]); - } else { - this.grossSalary = null; - } - if ( - prediction["gross_salary_ytd"] !== undefined && - prediction["gross_salary_ytd"] !== null && - !isNaN(prediction["gross_salary_ytd"]) - ) { - this.grossSalaryYtd = +parseFloat(prediction["gross_salary_ytd"]); - } else { - this.grossSalaryYtd = null; - } - if ( - prediction["income_tax_rate"] !== undefined && - prediction["income_tax_rate"] !== null && - !isNaN(prediction["income_tax_rate"]) - ) { - this.incomeTaxRate = +parseFloat(prediction["income_tax_rate"]); - } else { - this.incomeTaxRate = null; - } - if ( - prediction["income_tax_withheld"] !== undefined && - prediction["income_tax_withheld"] !== null && - !isNaN(prediction["income_tax_withheld"]) - ) { - this.incomeTaxWithheld = +parseFloat(prediction["income_tax_withheld"]); - } else { - this.incomeTaxWithheld = null; - } - if ( - prediction["net_paid"] !== undefined && - prediction["net_paid"] !== null && - !isNaN(prediction["net_paid"]) - ) { - this.netPaid = +parseFloat(prediction["net_paid"]); - } else { - this.netPaid = null; - } - if ( - prediction["net_paid_before_tax"] !== undefined && - prediction["net_paid_before_tax"] !== null && - !isNaN(prediction["net_paid_before_tax"]) - ) { - this.netPaidBeforeTax = +parseFloat(prediction["net_paid_before_tax"]); - } else { - this.netPaidBeforeTax = null; - } - if ( - prediction["net_taxable"] !== undefined && - prediction["net_taxable"] !== null && - !isNaN(prediction["net_taxable"]) - ) { - this.netTaxable = +parseFloat(prediction["net_taxable"]); - } else { - this.netTaxable = null; - } - if ( - prediction["net_taxable_ytd"] !== undefined && - prediction["net_taxable_ytd"] !== null && - !isNaN(prediction["net_taxable_ytd"]) - ) { - this.netTaxableYtd = +parseFloat(prediction["net_taxable_ytd"]); - } else { - this.netTaxableYtd = null; - } - if ( - prediction["total_cost_employer"] !== undefined && - prediction["total_cost_employer"] !== null && - !isNaN(prediction["total_cost_employer"]) - ) { - this.totalCostEmployer = +parseFloat(prediction["total_cost_employer"]); - } else { - this.totalCostEmployer = null; - } - if ( - prediction["total_taxes_and_deductions"] !== undefined && - prediction["total_taxes_and_deductions"] !== null && - !isNaN(prediction["total_taxes_and_deductions"]) - ) { - this.totalTaxesAndDeductions = +parseFloat(prediction["total_taxes_and_deductions"]); - } else { - this.totalTaxesAndDeductions = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - grossSalary: - this.grossSalary !== undefined ? floatToString(this.grossSalary) : "", - grossSalaryYtd: - this.grossSalaryYtd !== undefined ? floatToString(this.grossSalaryYtd) : "", - incomeTaxRate: - this.incomeTaxRate !== undefined ? floatToString(this.incomeTaxRate) : "", - incomeTaxWithheld: - this.incomeTaxWithheld !== undefined ? floatToString(this.incomeTaxWithheld) : "", - netPaid: this.netPaid !== undefined ? floatToString(this.netPaid) : "", - netPaidBeforeTax: - this.netPaidBeforeTax !== undefined ? floatToString(this.netPaidBeforeTax) : "", - netTaxable: - this.netTaxable !== undefined ? floatToString(this.netTaxable) : "", - netTaxableYtd: - this.netTaxableYtd !== undefined ? floatToString(this.netTaxableYtd) : "", - totalCostEmployer: - this.totalCostEmployer !== undefined ? floatToString(this.totalCostEmployer) : "", - totalTaxesAndDeductions: - this.totalTaxesAndDeductions !== undefined ? floatToString(this.totalTaxesAndDeductions) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Gross Salary: " + - printable.grossSalary + - ", Gross Salary YTD: " + - printable.grossSalaryYtd + - ", Income Tax Rate: " + - printable.incomeTaxRate + - ", Income Tax Withheld: " + - printable.incomeTaxWithheld + - ", Net Paid: " + - printable.netPaid + - ", Net Paid Before Tax: " + - printable.netPaidBeforeTax + - ", Net Taxable: " + - printable.netTaxable + - ", Net Taxable YTD: " + - printable.netTaxableYtd + - ", Total Cost Employer: " + - printable.totalCostEmployer + - ", Total Taxes and Deductions: " + - printable.totalTaxesAndDeductions - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Gross Salary: ${printable.grossSalary} - :Gross Salary YTD: ${printable.grossSalaryYtd} - :Income Tax Rate: ${printable.incomeTaxRate} - :Income Tax Withheld: ${printable.incomeTaxWithheld} - :Net Paid: ${printable.netPaid} - :Net Paid Before Tax: ${printable.netPaidBeforeTax} - :Net Taxable: ${printable.netTaxable} - :Net Taxable YTD: ${printable.netTaxableYtd} - :Total Cost Employer: ${printable.totalCostEmployer} - :Total Taxes and Deductions: ${printable.totalTaxesAndDeductions}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV3PayPeriod.ts b/src/product/fr/payslip/payslipV3PayPeriod.ts deleted file mode 100644 index 418308d2f..000000000 --- a/src/product/fr/payslip/payslipV3PayPeriod.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Information about the pay period. - */ -export class PayslipV3PayPeriod { - /** The end date of the pay period. */ - endDate: string | null; - /** The month of the pay period. */ - month: string | null; - /** The date of payment for the pay period. */ - paymentDate: string | null; - /** The start date of the pay period. */ - startDate: string | null; - /** The year of the pay period. */ - year: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.endDate = prediction["end_date"]; - this.month = prediction["month"]; - this.paymentDate = prediction["payment_date"]; - this.startDate = prediction["start_date"]; - this.year = prediction["year"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - endDate: this.endDate ?? "", - month: this.month ?? "", - paymentDate: this.paymentDate ?? "", - startDate: this.startDate ?? "", - year: this.year ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "End Date: " + - printable.endDate + - ", Month: " + - printable.month + - ", Payment Date: " + - printable.paymentDate + - ", Start Date: " + - printable.startDate + - ", Year: " + - printable.year - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :End Date: ${printable.endDate} - :Month: ${printable.month} - :Payment Date: ${printable.paymentDate} - :Start Date: ${printable.startDate} - :Year: ${printable.year}`.trimEnd(); - } -} diff --git a/src/product/fr/payslip/payslipV3SalaryDetail.ts b/src/product/fr/payslip/payslipV3SalaryDetail.ts deleted file mode 100644 index 2194467d6..000000000 --- a/src/product/fr/payslip/payslipV3SalaryDetail.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Detailed information about the earnings. - */ -export class PayslipV3SalaryDetail { - /** The amount of the earning. */ - amount: number | null; - /** The base rate value of the earning. */ - base: number | null; - /** The description of the earnings. */ - description: string | null; - /** The number of units in the earning. */ - number: number | null; - /** The rate of the earning. */ - rate: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["amount"] !== undefined && - prediction["amount"] !== null && - !isNaN(prediction["amount"]) - ) { - this.amount = +parseFloat(prediction["amount"]); - } else { - this.amount = null; - } - if ( - prediction["base"] !== undefined && - prediction["base"] !== null && - !isNaN(prediction["base"]) - ) { - this.base = +parseFloat(prediction["base"]); - } else { - this.base = null; - } - this.description = prediction["description"]; - if ( - prediction["number"] !== undefined && - prediction["number"] !== null && - !isNaN(prediction["number"]) - ) { - this.number = +parseFloat(prediction["number"]); - } else { - this.number = null; - } - if ( - prediction["rate"] !== undefined && - prediction["rate"] !== null && - !isNaN(prediction["rate"]) - ) { - this.rate = +parseFloat(prediction["rate"]); - } else { - this.rate = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - amount: this.amount !== undefined ? floatToString(this.amount) : "", - base: this.base !== undefined ? floatToString(this.base) : "", - description: this.description ? - this.description.length <= 36 ? - cleanSpecialChars(this.description) : - cleanSpecialChars(this.description).slice(0, 33) + "..." : - "", - number: this.number !== undefined ? floatToString(this.number) : "", - rate: this.rate !== undefined ? floatToString(this.rate) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Amount: " + - printable.amount + - ", Base: " + - printable.base + - ", Description: " + - printable.description + - ", Number: " + - printable.number + - ", Rate: " + - printable.rate - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.amount.padEnd(12) + - " | " + - printable.base.padEnd(9) + - " | " + - printable.description.padEnd(36) + - " | " + - printable.number.padEnd(6) + - " | " + - printable.rate.padEnd(9) + - " |" - ); - } -} diff --git a/src/product/generated/internal.ts b/src/product/generated/internal.ts deleted file mode 100644 index cacc7d5ab..000000000 --- a/src/product/generated/internal.ts +++ /dev/null @@ -1,4 +0,0 @@ -export { GeneratedV1 } from "./generatedV1"; -export { GeneratedV1Document } from "./generatedV1Document"; -export { GeneratedV1Page } from "./generatedV1Page"; -export { GeneratedV1Prediction } from "./generatedV1Prediction"; diff --git a/src/product/ind/index.ts b/src/product/ind/index.ts deleted file mode 100644 index 0be5c6329..000000000 --- a/src/product/ind/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { IndianPassportV1 } from "./indianPassport/indianPassportV1"; diff --git a/src/product/ind/indianPassport/index.ts b/src/product/ind/indianPassport/index.ts deleted file mode 100644 index 5267a54bf..000000000 --- a/src/product/ind/indianPassport/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { IndianPassportV1 } from "./indianPassportV1"; diff --git a/src/product/ind/indianPassport/indianPassportV1.ts b/src/product/ind/indianPassport/indianPassportV1.ts deleted file mode 100644 index fe377f8c2..000000000 --- a/src/product/ind/indianPassport/indianPassportV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { IndianPassportV1Document } from "./indianPassportV1Document"; - -/** - * Passport - India API version 1 inference prediction. - */ -export class IndianPassportV1 extends Inference { - /** The endpoint's name. */ - endpointName = "ind_passport"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: IndianPassportV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new IndianPassportV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - IndianPassportV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/ind/indianPassport/indianPassportV1Document.ts b/src/product/ind/indianPassport/indianPassportV1Document.ts deleted file mode 100644 index c71ac7a60..000000000 --- a/src/product/ind/indianPassport/indianPassportV1Document.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString, -} from "../../../parsing/common"; -import { - ClassificationField, - DateField, - StringField, -} from "../../../parsing/standard"; - -/** - * Passport - India API version 1.2 document data. - */ -export class IndianPassportV1Document implements Prediction { - /** The first line of the address of the passport holder. */ - address1: StringField; - /** The second line of the address of the passport holder. */ - address2: StringField; - /** The third line of the address of the passport holder. */ - address3: StringField; - /** The birth date of the passport holder, ISO format: YYYY-MM-DD. */ - birthDate: DateField; - /** The birth place of the passport holder. */ - birthPlace: StringField; - /** ISO 3166-1 alpha-3 country code (3 letters format). */ - country: StringField; - /** The date when the passport will expire, ISO format: YYYY-MM-DD. */ - expiryDate: DateField; - /** The file number of the passport document. */ - fileNumber: StringField; - /** The gender of the passport holder. */ - gender: ClassificationField; - /** The given names of the passport holder. */ - givenNames: StringField; - /** The identification number of the passport document. */ - idNumber: StringField; - /** The date when the passport was issued, ISO format: YYYY-MM-DD. */ - issuanceDate: DateField; - /** The place where the passport was issued. */ - issuancePlace: StringField; - /** The name of the legal guardian of the passport holder (if applicable). */ - legalGuardian: StringField; - /** The first line of the machine-readable zone (MRZ) of the passport document. */ - mrz1: StringField; - /** The second line of the machine-readable zone (MRZ) of the passport document. */ - mrz2: StringField; - /** The name of the mother of the passport holder. */ - nameOfMother: StringField; - /** The name of the spouse of the passport holder (if applicable). */ - nameOfSpouse: StringField; - /** The date of issue of the old passport (if applicable), ISO format: YYYY-MM-DD. */ - oldPassportDateOfIssue: DateField; - /** The number of the old passport (if applicable). */ - oldPassportNumber: StringField; - /** The place of issue of the old passport (if applicable). */ - oldPassportPlaceOfIssue: StringField; - /** The page number of the passport document. */ - pageNumber: ClassificationField; - /** The surname of the passport holder. */ - surname: StringField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.address1 = new StringField({ - prediction: rawPrediction["address1"], - pageId: pageId, - }); - this.address2 = new StringField({ - prediction: rawPrediction["address2"], - pageId: pageId, - }); - this.address3 = new StringField({ - prediction: rawPrediction["address3"], - pageId: pageId, - }); - this.birthDate = new DateField({ - prediction: rawPrediction["birth_date"], - pageId: pageId, - }); - this.birthPlace = new StringField({ - prediction: rawPrediction["birth_place"], - pageId: pageId, - }); - this.country = new StringField({ - prediction: rawPrediction["country"], - pageId: pageId, - }); - this.expiryDate = new DateField({ - prediction: rawPrediction["expiry_date"], - pageId: pageId, - }); - this.fileNumber = new StringField({ - prediction: rawPrediction["file_number"], - pageId: pageId, - }); - this.gender = new ClassificationField({ - prediction: rawPrediction["gender"], - }); - this.givenNames = new StringField({ - prediction: rawPrediction["given_names"], - pageId: pageId, - }); - this.idNumber = new StringField({ - prediction: rawPrediction["id_number"], - pageId: pageId, - }); - this.issuanceDate = new DateField({ - prediction: rawPrediction["issuance_date"], - pageId: pageId, - }); - this.issuancePlace = new StringField({ - prediction: rawPrediction["issuance_place"], - pageId: pageId, - }); - this.legalGuardian = new StringField({ - prediction: rawPrediction["legal_guardian"], - pageId: pageId, - }); - this.mrz1 = new StringField({ - prediction: rawPrediction["mrz1"], - pageId: pageId, - }); - this.mrz2 = new StringField({ - prediction: rawPrediction["mrz2"], - pageId: pageId, - }); - this.nameOfMother = new StringField({ - prediction: rawPrediction["name_of_mother"], - pageId: pageId, - }); - this.nameOfSpouse = new StringField({ - prediction: rawPrediction["name_of_spouse"], - pageId: pageId, - }); - this.oldPassportDateOfIssue = new DateField({ - prediction: rawPrediction["old_passport_date_of_issue"], - pageId: pageId, - }); - this.oldPassportNumber = new StringField({ - prediction: rawPrediction["old_passport_number"], - pageId: pageId, - }); - this.oldPassportPlaceOfIssue = new StringField({ - prediction: rawPrediction["old_passport_place_of_issue"], - pageId: pageId, - }); - this.pageNumber = new ClassificationField({ - prediction: rawPrediction["page_number"], - }); - this.surname = new StringField({ - prediction: rawPrediction["surname"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const outStr = `:Page Number: ${this.pageNumber} -:Country: ${this.country} -:ID Number: ${this.idNumber} -:Given Names: ${this.givenNames} -:Surname: ${this.surname} -:Birth Date: ${this.birthDate} -:Birth Place: ${this.birthPlace} -:Issuance Place: ${this.issuancePlace} -:Gender: ${this.gender} -:Issuance Date: ${this.issuanceDate} -:Expiry Date: ${this.expiryDate} -:MRZ Line 1: ${this.mrz1} -:MRZ Line 2: ${this.mrz2} -:Legal Guardian: ${this.legalGuardian} -:Name of Spouse: ${this.nameOfSpouse} -:Name of Mother: ${this.nameOfMother} -:Old Passport Date of Issue: ${this.oldPassportDateOfIssue} -:Old Passport Number: ${this.oldPassportNumber} -:Old Passport Place of Issue: ${this.oldPassportPlaceOfIssue} -:Address Line 1: ${this.address1} -:Address Line 2: ${this.address2} -:Address Line 3: ${this.address3} -:File Number: ${this.fileNumber}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/ind/indianPassport/internal.ts b/src/product/ind/indianPassport/internal.ts deleted file mode 100644 index f94735992..000000000 --- a/src/product/ind/indianPassport/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { IndianPassportV1 } from "./indianPassportV1"; -export { IndianPassportV1Document } from "./indianPassportV1Document"; diff --git a/src/product/ind/internal.ts b/src/product/ind/internal.ts deleted file mode 100644 index fb8c2c7e7..000000000 --- a/src/product/ind/internal.ts +++ /dev/null @@ -1 +0,0 @@ -export * as indianPassport from "./indianPassport/internal"; diff --git a/src/product/index.ts b/src/product/index.ts deleted file mode 100644 index 286822f0e..000000000 --- a/src/product/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -export * as fr from "./fr"; -export * as ind from "./ind"; -export * as us from "./us"; -export { BarcodeReaderV1 } from "./barcodeReader/barcodeReaderV1"; -export { BillOfLadingV1 } from "./billOfLading/billOfLadingV1"; -export { BusinessCardV1 } from "./businessCard/businessCardV1"; -export { CropperV1 } from "./cropper/cropperV1"; -export { CustomV1 } from "./custom/customV1"; -export { DeliveryNoteV1 } from "./deliveryNote/deliveryNoteV1"; -export { DriverLicenseV1 } from "./driverLicense/driverLicenseV1"; -export { FinancialDocumentV1 } from "./financialDocument/financialDocumentV1"; -export { GeneratedV1 } from "./generated/generatedV1"; -export { InternationalIdV2 } from "./internationalId/internationalIdV2"; -export { InvoiceSplitterV1 } from "./invoiceSplitter/invoiceSplitterV1"; -export { InvoiceV4 } from "./invoice/invoiceV4"; -export { MultiReceiptsDetectorV1 } from "./multiReceiptsDetector/multiReceiptsDetectorV1"; -export { NutritionFactsLabelV1 } from "./nutritionFactsLabel/nutritionFactsLabelV1"; -export { PassportV1 } from "./passport/passportV1"; -export { ReceiptV5 } from "./receipt/receiptV5"; -export { ResumeV1 } from "./resume/resumeV1"; diff --git a/src/product/internal.ts b/src/product/internal.ts deleted file mode 100644 index 2e181bc32..000000000 --- a/src/product/internal.ts +++ /dev/null @@ -1,20 +0,0 @@ -export * as barcodeReader from "./barcodeReader/internal"; -export * as billOfLading from "./billOfLading/internal"; -export * as businessCard from "./businessCard/internal"; -export * as cropper from "./cropper/internal"; -export * as custom from "./custom/internal"; -export * as deliveryNote from "./deliveryNote/internal"; -export * as driverLicense from "./driverLicense/internal"; -export * as financialDocument from "./financialDocument/internal"; -export * as fr from "./fr/internal"; -export * as generated from "./generated/internal"; -export * as ind from "./ind/internal"; -export * as internationalId from "./internationalId/internal"; -export * as invoice from "./invoice/internal"; -export * as invoiceSplitter from "./invoiceSplitter/internal"; -export * as multiReceiptsDetector from "./multiReceiptsDetector/internal"; -export * as nutritionFactsLabel from "./nutritionFactsLabel/internal"; -export * as passport from "./passport/internal"; -export * as receipt from "./receipt/internal"; -export * as resume from "./resume/internal"; -export * as us from "./us/internal"; diff --git a/src/product/internationalId/index.ts b/src/product/internationalId/index.ts deleted file mode 100644 index 40a70d3bb..000000000 --- a/src/product/internationalId/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { InternationalIdV2 } from "./internationalIdV2"; diff --git a/src/product/internationalId/internal.ts b/src/product/internationalId/internal.ts deleted file mode 100644 index 0e8a70eec..000000000 --- a/src/product/internationalId/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { InternationalIdV2 } from "./internationalIdV2"; -export { InternationalIdV2Document } from "./internationalIdV2Document"; diff --git a/src/product/invoice/index.ts b/src/product/invoice/index.ts deleted file mode 100644 index 69cff0b0c..000000000 --- a/src/product/invoice/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { InvoiceV4 } from "./invoiceV4"; diff --git a/src/product/invoice/internal.ts b/src/product/invoice/internal.ts deleted file mode 100644 index 3138e93f8..000000000 --- a/src/product/invoice/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { InvoiceV4 } from "./invoiceV4"; -export { InvoiceV4Document } from "./invoiceV4Document"; -export { InvoiceV4LineItem } from "./invoiceV4LineItem"; diff --git a/src/product/invoiceSplitter/index.ts b/src/product/invoiceSplitter/index.ts deleted file mode 100644 index e7e1db9f3..000000000 --- a/src/product/invoiceSplitter/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { InvoiceSplitterV1 } from "./invoiceSplitterV1"; diff --git a/src/product/multiReceiptsDetector/index.ts b/src/product/multiReceiptsDetector/index.ts deleted file mode 100644 index 9f3e8d862..000000000 --- a/src/product/multiReceiptsDetector/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { MultiReceiptsDetectorV1 } from "./multiReceiptsDetectorV1"; diff --git a/src/product/nutritionFactsLabel/index.ts b/src/product/nutritionFactsLabel/index.ts deleted file mode 100644 index 7be4db39d..000000000 --- a/src/product/nutritionFactsLabel/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { NutritionFactsLabelV1 } from "./nutritionFactsLabelV1"; diff --git a/src/product/nutritionFactsLabel/internal.ts b/src/product/nutritionFactsLabel/internal.ts deleted file mode 100644 index 1c18c00f1..000000000 --- a/src/product/nutritionFactsLabel/internal.ts +++ /dev/null @@ -1,15 +0,0 @@ -export { NutritionFactsLabelV1 } from "./nutritionFactsLabelV1"; -export { NutritionFactsLabelV1AddedSugar } from "./nutritionFactsLabelV1AddedSugar"; -export { NutritionFactsLabelV1Calorie } from "./nutritionFactsLabelV1Calorie"; -export { NutritionFactsLabelV1Cholesterol } from "./nutritionFactsLabelV1Cholesterol"; -export { NutritionFactsLabelV1DietaryFiber } from "./nutritionFactsLabelV1DietaryFiber"; -export { NutritionFactsLabelV1Document } from "./nutritionFactsLabelV1Document"; -export { NutritionFactsLabelV1Nutrient } from "./nutritionFactsLabelV1Nutrient"; -export { NutritionFactsLabelV1Protein } from "./nutritionFactsLabelV1Protein"; -export { NutritionFactsLabelV1SaturatedFat } from "./nutritionFactsLabelV1SaturatedFat"; -export { NutritionFactsLabelV1ServingSize } from "./nutritionFactsLabelV1ServingSize"; -export { NutritionFactsLabelV1Sodium } from "./nutritionFactsLabelV1Sodium"; -export { NutritionFactsLabelV1TotalCarbohydrate } from "./nutritionFactsLabelV1TotalCarbohydrate"; -export { NutritionFactsLabelV1TotalFat } from "./nutritionFactsLabelV1TotalFat"; -export { NutritionFactsLabelV1TotalSugar } from "./nutritionFactsLabelV1TotalSugar"; -export { NutritionFactsLabelV1TransFat } from "./nutritionFactsLabelV1TransFat"; diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1.ts deleted file mode 100644 index 38b20fc5c..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { NutritionFactsLabelV1Document } from "./nutritionFactsLabelV1Document"; - -/** - * Nutrition Facts Label API version 1 inference prediction. - */ -export class NutritionFactsLabelV1 extends Inference { - /** The endpoint's name. */ - endpointName = "nutrition_facts"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: NutritionFactsLabelV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new NutritionFactsLabelV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - NutritionFactsLabelV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1AddedSugar.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1AddedSugar.ts deleted file mode 100644 index 1a915c83b..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1AddedSugar.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of added sugars in the product. - */ -export class NutritionFactsLabelV1AddedSugar { - /** DVs are the recommended amounts of added sugars to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of added sugars per 100g of the product. */ - per100G: number | null; - /** The amount of added sugars per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Calorie.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1Calorie.ts deleted file mode 100644 index e9d800c74..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Calorie.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of calories in the product. - */ -export class NutritionFactsLabelV1Calorie { - /** DVs are the recommended amounts of calories to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of calories per 100g of the product. */ - per100G: number | null; - /** The amount of calories per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Cholesterol.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1Cholesterol.ts deleted file mode 100644 index 3dac26312..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Cholesterol.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of cholesterol in the product. - */ -export class NutritionFactsLabelV1Cholesterol { - /** DVs are the recommended amounts of cholesterol to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of cholesterol per 100g of the product. */ - per100G: number | null; - /** The amount of cholesterol per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1DietaryFiber.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1DietaryFiber.ts deleted file mode 100644 index 0c2318bec..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1DietaryFiber.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of dietary fiber in the product. - */ -export class NutritionFactsLabelV1DietaryFiber { - /** DVs are the recommended amounts of dietary fiber to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of dietary fiber per 100g of the product. */ - per100G: number | null; - /** The amount of dietary fiber per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Document.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1Document.ts deleted file mode 100644 index d6a042047..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Document.ts +++ /dev/null @@ -1,154 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { NutritionFactsLabelV1ServingSize } from "./nutritionFactsLabelV1ServingSize"; -import { NutritionFactsLabelV1Calorie } from "./nutritionFactsLabelV1Calorie"; -import { NutritionFactsLabelV1TotalFat } from "./nutritionFactsLabelV1TotalFat"; -import { NutritionFactsLabelV1SaturatedFat } from "./nutritionFactsLabelV1SaturatedFat"; -import { NutritionFactsLabelV1TransFat } from "./nutritionFactsLabelV1TransFat"; -import { NutritionFactsLabelV1Cholesterol } from "./nutritionFactsLabelV1Cholesterol"; -import { NutritionFactsLabelV1TotalCarbohydrate } from "./nutritionFactsLabelV1TotalCarbohydrate"; -import { NutritionFactsLabelV1DietaryFiber } from "./nutritionFactsLabelV1DietaryFiber"; -import { NutritionFactsLabelV1TotalSugar } from "./nutritionFactsLabelV1TotalSugar"; -import { NutritionFactsLabelV1AddedSugar } from "./nutritionFactsLabelV1AddedSugar"; -import { NutritionFactsLabelV1Protein } from "./nutritionFactsLabelV1Protein"; -import { NutritionFactsLabelV1Sodium } from "./nutritionFactsLabelV1Sodium"; -import { NutritionFactsLabelV1Nutrient } from "./nutritionFactsLabelV1Nutrient"; -import { AmountField } from "../../parsing/standard"; - -/** - * Nutrition Facts Label API version 1.0 document data. - */ -export class NutritionFactsLabelV1Document implements Prediction { - /** The amount of added sugars in the product. */ - addedSugars: NutritionFactsLabelV1AddedSugar; - /** The amount of calories in the product. */ - calories: NutritionFactsLabelV1Calorie; - /** The amount of cholesterol in the product. */ - cholesterol: NutritionFactsLabelV1Cholesterol; - /** The amount of dietary fiber in the product. */ - dietaryFiber: NutritionFactsLabelV1DietaryFiber; - /** The amount of nutrients in the product. */ - nutrients: NutritionFactsLabelV1Nutrient[] = []; - /** The amount of protein in the product. */ - protein: NutritionFactsLabelV1Protein; - /** The amount of saturated fat in the product. */ - saturatedFat: NutritionFactsLabelV1SaturatedFat; - /** The number of servings in each box of the product. */ - servingPerBox: AmountField; - /** The size of a single serving of the product. */ - servingSize: NutritionFactsLabelV1ServingSize; - /** The amount of sodium in the product. */ - sodium: NutritionFactsLabelV1Sodium; - /** The total amount of carbohydrates in the product. */ - totalCarbohydrate: NutritionFactsLabelV1TotalCarbohydrate; - /** The total amount of fat in the product. */ - totalFat: NutritionFactsLabelV1TotalFat; - /** The total amount of sugars in the product. */ - totalSugars: NutritionFactsLabelV1TotalSugar; - /** The amount of trans fat in the product. */ - transFat: NutritionFactsLabelV1TransFat; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.addedSugars = new NutritionFactsLabelV1AddedSugar({ - prediction: rawPrediction["added_sugars"], - pageId: pageId, - }); - this.calories = new NutritionFactsLabelV1Calorie({ - prediction: rawPrediction["calories"], - pageId: pageId, - }); - this.cholesterol = new NutritionFactsLabelV1Cholesterol({ - prediction: rawPrediction["cholesterol"], - pageId: pageId, - }); - this.dietaryFiber = new NutritionFactsLabelV1DietaryFiber({ - prediction: rawPrediction["dietary_fiber"], - pageId: pageId, - }); - rawPrediction["nutrients"] && - rawPrediction["nutrients"].map( - (itemPrediction: StringDict) => - this.nutrients.push( - new NutritionFactsLabelV1Nutrient({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.protein = new NutritionFactsLabelV1Protein({ - prediction: rawPrediction["protein"], - pageId: pageId, - }); - this.saturatedFat = new NutritionFactsLabelV1SaturatedFat({ - prediction: rawPrediction["saturated_fat"], - pageId: pageId, - }); - this.servingPerBox = new AmountField({ - prediction: rawPrediction["serving_per_box"], - pageId: pageId, - }); - this.servingSize = new NutritionFactsLabelV1ServingSize({ - prediction: rawPrediction["serving_size"], - pageId: pageId, - }); - this.sodium = new NutritionFactsLabelV1Sodium({ - prediction: rawPrediction["sodium"], - pageId: pageId, - }); - this.totalCarbohydrate = new NutritionFactsLabelV1TotalCarbohydrate({ - prediction: rawPrediction["total_carbohydrate"], - pageId: pageId, - }); - this.totalFat = new NutritionFactsLabelV1TotalFat({ - prediction: rawPrediction["total_fat"], - pageId: pageId, - }); - this.totalSugars = new NutritionFactsLabelV1TotalSugar({ - prediction: rawPrediction["total_sugars"], - pageId: pageId, - }); - this.transFat = new NutritionFactsLabelV1TransFat({ - prediction: rawPrediction["trans_fat"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - let nutrientsSummary:string = ""; - if (this.nutrients && this.nutrients.length > 0) { - const nutrientsColSizes:number[] = [13, 22, 10, 13, 6]; - nutrientsSummary += "\n" + lineSeparator(nutrientsColSizes, "-") + "\n "; - nutrientsSummary += "| Daily Value "; - nutrientsSummary += "| Name "; - nutrientsSummary += "| Per 100g "; - nutrientsSummary += "| Per Serving "; - nutrientsSummary += "| Unit "; - nutrientsSummary += "|\n" + lineSeparator(nutrientsColSizes, "="); - nutrientsSummary += this.nutrients.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(nutrientsColSizes, "-") - ).join(""); - } - const outStr = `:Serving per Box: ${this.servingPerBox} -:Serving Size: ${this.servingSize.toFieldList()} -:Calories: ${this.calories.toFieldList()} -:Total Fat: ${this.totalFat.toFieldList()} -:Saturated Fat: ${this.saturatedFat.toFieldList()} -:Trans Fat: ${this.transFat.toFieldList()} -:Cholesterol: ${this.cholesterol.toFieldList()} -:Total Carbohydrate: ${this.totalCarbohydrate.toFieldList()} -:Dietary Fiber: ${this.dietaryFiber.toFieldList()} -:Total Sugars: ${this.totalSugars.toFieldList()} -:Added Sugars: ${this.addedSugars.toFieldList()} -:Protein: ${this.protein.toFieldList()} -:sodium: ${this.sodium.toFieldList()} -:nutrients: ${nutrientsSummary}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Nutrient.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1Nutrient.ts deleted file mode 100644 index fb0522e8e..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Nutrient.ts +++ /dev/null @@ -1,127 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of nutrients in the product. - */ -export class NutritionFactsLabelV1Nutrient { - /** DVs are the recommended amounts of nutrients to consume or not to exceed each day. */ - dailyValue: number | null; - /** The name of nutrients of the product. */ - name: string | null; - /** The amount of nutrients per 100g of the product. */ - per100G: number | null; - /** The amount of nutrients per serving of the product. */ - perServing: number | null; - /** The unit of measurement for the amount of nutrients. */ - unit: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - this.name = prediction["name"]; - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.unit = prediction["unit"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - name: this.name ? - this.name.length <= 20 ? - cleanSpecialChars(this.name) : - cleanSpecialChars(this.name).slice(0, 17) + "..." : - "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - unit: this.unit ? - this.unit.length <= 4 ? - cleanSpecialChars(this.unit) : - cleanSpecialChars(this.unit).slice(0, 1) + "..." : - "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Name: " + - printable.name + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing + - ", Unit: " + - printable.unit - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.dailyValue.padEnd(11) + - " | " + - printable.name.padEnd(20) + - " | " + - printable.per100G.padEnd(8) + - " | " + - printable.perServing.padEnd(11) + - " | " + - printable.unit.padEnd(4) + - " |" - ); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Protein.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1Protein.ts deleted file mode 100644 index 4420d7b3c..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Protein.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of protein in the product. - */ -export class NutritionFactsLabelV1Protein { - /** DVs are the recommended amounts of protein to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of protein per 100g of the product. */ - per100G: number | null; - /** The amount of protein per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1SaturatedFat.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1SaturatedFat.ts deleted file mode 100644 index c2e18d05e..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1SaturatedFat.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of saturated fat in the product. - */ -export class NutritionFactsLabelV1SaturatedFat { - /** DVs are the recommended amounts of saturated fat to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of saturated fat per 100g of the product. */ - per100G: number | null; - /** The amount of saturated fat per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1ServingSize.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1ServingSize.ts deleted file mode 100644 index bcd7dbc64..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1ServingSize.ts +++ /dev/null @@ -1,74 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The size of a single serving of the product. - */ -export class NutritionFactsLabelV1ServingSize { - /** The amount of a single serving. */ - amount: number | null; - /** The unit for the amount of a single serving. */ - unit: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["amount"] !== undefined && - prediction["amount"] !== null && - !isNaN(prediction["amount"]) - ) { - this.amount = +parseFloat(prediction["amount"]); - } else { - this.amount = null; - } - this.unit = prediction["unit"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - amount: this.amount !== undefined ? floatToString(this.amount) : "", - unit: this.unit ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Amount: " + - printable.amount + - ", Unit: " + - printable.unit - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Amount: ${printable.amount} - :Unit: ${printable.unit}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Sodium.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1Sodium.ts deleted file mode 100644 index 81416e35f..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1Sodium.ts +++ /dev/null @@ -1,106 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of sodium in the product. - */ -export class NutritionFactsLabelV1Sodium { - /** DVs are the recommended amounts of sodium to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of sodium per 100g of the product. */ - per100G: number | null; - /** The amount of sodium per serving of the product. */ - perServing: number | null; - /** The unit of measurement for the amount of sodium. */ - unit: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.unit = prediction["unit"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - unit: this.unit ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing + - ", Unit: " + - printable.unit - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing} - :Unit: ${printable.unit}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalCarbohydrate.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalCarbohydrate.ts deleted file mode 100644 index cbf2358fa..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalCarbohydrate.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The total amount of carbohydrates in the product. - */ -export class NutritionFactsLabelV1TotalCarbohydrate { - /** DVs are the recommended amounts of total carbohydrates to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of total carbohydrates per 100g of the product. */ - per100G: number | null; - /** The amount of total carbohydrates per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalFat.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalFat.ts deleted file mode 100644 index e805f5943..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalFat.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The total amount of fat in the product. - */ -export class NutritionFactsLabelV1TotalFat { - /** DVs are the recommended amounts of total fat to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of total fat per 100g of the product. */ - per100G: number | null; - /** The amount of total fat per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalSugar.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalSugar.ts deleted file mode 100644 index 9aacdf540..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TotalSugar.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The total amount of sugars in the product. - */ -export class NutritionFactsLabelV1TotalSugar { - /** DVs are the recommended amounts of total sugars to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of total sugars per 100g of the product. */ - per100G: number | null; - /** The amount of total sugars per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TransFat.ts b/src/product/nutritionFactsLabel/nutritionFactsLabelV1TransFat.ts deleted file mode 100644 index ddbcc2974..000000000 --- a/src/product/nutritionFactsLabel/nutritionFactsLabelV1TransFat.ts +++ /dev/null @@ -1,99 +0,0 @@ - -import { floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; - -/** - * The amount of trans fat in the product. - */ -export class NutritionFactsLabelV1TransFat { - /** DVs are the recommended amounts of trans fat to consume or not to exceed each day. */ - dailyValue: number | null; - /** The amount of trans fat per 100g of the product. */ - per100G: number | null; - /** The amount of trans fat per serving of the product. */ - perServing: number | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["daily_value"] !== undefined && - prediction["daily_value"] !== null && - !isNaN(prediction["daily_value"]) - ) { - this.dailyValue = +parseFloat(prediction["daily_value"]); - } else { - this.dailyValue = null; - } - if ( - prediction["per_100g"] !== undefined && - prediction["per_100g"] !== null && - !isNaN(prediction["per_100g"]) - ) { - this.per100G = +parseFloat(prediction["per_100g"]); - } else { - this.per100G = null; - } - if ( - prediction["per_serving"] !== undefined && - prediction["per_serving"] !== null && - !isNaN(prediction["per_serving"]) - ) { - this.perServing = +parseFloat(prediction["per_serving"]); - } else { - this.perServing = null; - } - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - dailyValue: - this.dailyValue !== undefined ? floatToString(this.dailyValue) : "", - per100G: this.per100G !== undefined ? floatToString(this.per100G) : "", - perServing: - this.perServing !== undefined ? floatToString(this.perServing) : "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Daily Value: " + - printable.dailyValue + - ", Per 100g: " + - printable.per100G + - ", Per Serving: " + - printable.perServing - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :Daily Value: ${printable.dailyValue} - :Per 100g: ${printable.per100G} - :Per Serving: ${printable.perServing}`.trimEnd(); - } -} diff --git a/src/product/passport/index.ts b/src/product/passport/index.ts deleted file mode 100644 index b31048233..000000000 --- a/src/product/passport/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PassportV1 } from "./passportV1"; diff --git a/src/product/passport/internal.ts b/src/product/passport/internal.ts deleted file mode 100644 index 8b8e4c6e9..000000000 --- a/src/product/passport/internal.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { PassportV1 } from "./passportV1"; -export { PassportV1Document } from "./passportV1Document"; diff --git a/src/product/receipt/index.ts b/src/product/receipt/index.ts deleted file mode 100644 index c4b30f525..000000000 --- a/src/product/receipt/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { ReceiptV5 } from "./receiptV5"; diff --git a/src/product/receipt/internal.ts b/src/product/receipt/internal.ts deleted file mode 100644 index 4b94bbdae..000000000 --- a/src/product/receipt/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { ReceiptV5 } from "./receiptV5"; -export { ReceiptV5Document } from "./receiptV5Document"; -export { ReceiptV5LineItem } from "./receiptV5LineItem"; diff --git a/src/product/resume/index.ts b/src/product/resume/index.ts deleted file mode 100644 index 6f8b1d4d5..000000000 --- a/src/product/resume/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { ResumeV1 } from "./resumeV1"; diff --git a/src/product/resume/internal.ts b/src/product/resume/internal.ts deleted file mode 100644 index ee2809539..000000000 --- a/src/product/resume/internal.ts +++ /dev/null @@ -1,7 +0,0 @@ -export { ResumeV1 } from "./resumeV1"; -export { ResumeV1Certificate } from "./resumeV1Certificate"; -export { ResumeV1Document } from "./resumeV1Document"; -export { ResumeV1Education } from "./resumeV1Education"; -export { ResumeV1Language } from "./resumeV1Language"; -export { ResumeV1ProfessionalExperience } from "./resumeV1ProfessionalExperience"; -export { ResumeV1SocialNetworksUrl } from "./resumeV1SocialNetworksUrl"; diff --git a/src/product/us/bankCheck/index.ts b/src/product/us/bankCheck/index.ts deleted file mode 100644 index c865a399b..000000000 --- a/src/product/us/bankCheck/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { BankCheckV1 } from "./bankCheckV1"; diff --git a/src/product/us/bankCheck/internal.ts b/src/product/us/bankCheck/internal.ts deleted file mode 100644 index 834f393d5..000000000 --- a/src/product/us/bankCheck/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { BankCheckV1 } from "./bankCheckV1"; -export { BankCheckV1Document } from "./bankCheckV1Document"; -export { BankCheckV1Page } from "./bankCheckV1Page"; diff --git a/src/product/us/healthcareCard/healthcareCardV1.ts b/src/product/us/healthcareCard/healthcareCardV1.ts deleted file mode 100644 index 0fb72755a..000000000 --- a/src/product/us/healthcareCard/healthcareCardV1.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { HealthcareCardV1Document } from "./healthcareCardV1Document"; - -/** - * Healthcare Card API version 1 inference prediction. - */ -export class HealthcareCardV1 extends Inference { - /** The endpoint's name. */ - endpointName = "us_healthcare_cards"; - /** The endpoint's version. */ - endpointVersion = "1"; - /** The document-level prediction. */ - prediction: HealthcareCardV1Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new HealthcareCardV1Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - HealthcareCardV1Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/us/healthcareCard/healthcareCardV1Copay.ts b/src/product/us/healthcareCard/healthcareCardV1Copay.ts deleted file mode 100644 index b2492a0ee..000000000 --- a/src/product/us/healthcareCard/healthcareCardV1Copay.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { cleanSpecialChars, floatToString } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * Copayments for covered services. - */ -export class HealthcareCardV1Copay { - /** The price of the service. */ - serviceFees: number | null; - /** The name of the service. */ - serviceName: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - if ( - prediction["service_fees"] !== undefined && - prediction["service_fees"] !== null && - !isNaN(prediction["service_fees"]) - ) { - this.serviceFees = +parseFloat(prediction["service_fees"]); - } else { - this.serviceFees = null; - } - this.serviceName = prediction["service_name"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - serviceFees: - this.serviceFees !== undefined ? floatToString(this.serviceFees) : "", - serviceName: this.serviceName ? - this.serviceName.length <= 20 ? - cleanSpecialChars(this.serviceName) : - cleanSpecialChars(this.serviceName).slice(0, 17) + "..." : - "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "Service Fees: " + - printable.serviceFees + - ", Service Name: " + - printable.serviceName - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.serviceFees.padEnd(12) + - " | " + - printable.serviceName.padEnd(20) + - " |" - ); - } -} diff --git a/src/product/us/healthcareCard/healthcareCardV1Document.ts b/src/product/us/healthcareCard/healthcareCardV1Document.ts deleted file mode 100644 index 919a8cd55..000000000 --- a/src/product/us/healthcareCard/healthcareCardV1Document.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../../parsing/common"; -import { HealthcareCardV1Copay } from "./healthcareCardV1Copay"; -import { DateField, StringField } from "../../../parsing/standard"; - -/** - * Healthcare Card API version 1.3 document data. - */ -export class HealthcareCardV1Document implements Prediction { - /** The name of the company that provides the healthcare plan. */ - companyName: StringField; - /** Copayments for covered services. */ - copays: HealthcareCardV1Copay[] = []; - /** The list of dependents covered by the healthcare plan. */ - dependents: StringField[] = []; - /** The date when the member enrolled in the healthcare plan. */ - enrollmentDate: DateField; - /** The group number associated with the healthcare plan. */ - groupNumber: StringField; - /** The organization that issued the healthcare plan. */ - issuer80840: StringField; - /** The unique identifier for the member in the healthcare system. */ - memberId: StringField; - /** The name of the member covered by the healthcare plan. */ - memberName: StringField; - /** The unique identifier for the payer in the healthcare system. */ - payerId: StringField; - /** The name of the healthcare plan. */ - planName: StringField; - /** The BIN number for prescription drug coverage. */ - rxBin: StringField; - /** The group number for prescription drug coverage. */ - rxGrp: StringField; - /** The ID number for prescription drug coverage. */ - rxId: StringField; - /** The PCN number for prescription drug coverage. */ - rxPcn: StringField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.companyName = new StringField({ - prediction: rawPrediction["company_name"], - pageId: pageId, - }); - rawPrediction["copays"] && - rawPrediction["copays"].map( - (itemPrediction: StringDict) => - this.copays.push( - new HealthcareCardV1Copay({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - rawPrediction["dependents"] && - rawPrediction["dependents"].map( - (itemPrediction: StringDict) => - this.dependents.push( - new StringField({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.enrollmentDate = new DateField({ - prediction: rawPrediction["enrollment_date"], - pageId: pageId, - }); - this.groupNumber = new StringField({ - prediction: rawPrediction["group_number"], - pageId: pageId, - }); - this.issuer80840 = new StringField({ - prediction: rawPrediction["issuer_80840"], - pageId: pageId, - }); - this.memberId = new StringField({ - prediction: rawPrediction["member_id"], - pageId: pageId, - }); - this.memberName = new StringField({ - prediction: rawPrediction["member_name"], - pageId: pageId, - }); - this.payerId = new StringField({ - prediction: rawPrediction["payer_id"], - pageId: pageId, - }); - this.planName = new StringField({ - prediction: rawPrediction["plan_name"], - pageId: pageId, - }); - this.rxBin = new StringField({ - prediction: rawPrediction["rx_bin"], - pageId: pageId, - }); - this.rxGrp = new StringField({ - prediction: rawPrediction["rx_grp"], - pageId: pageId, - }); - this.rxId = new StringField({ - prediction: rawPrediction["rx_id"], - pageId: pageId, - }); - this.rxPcn = new StringField({ - prediction: rawPrediction["rx_pcn"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const dependents = this.dependents.join("\n "); - let copaysSummary:string = ""; - if (this.copays && this.copays.length > 0) { - const copaysColSizes:number[] = [14, 22]; - copaysSummary += "\n" + lineSeparator(copaysColSizes, "-") + "\n "; - copaysSummary += "| Service Fees "; - copaysSummary += "| Service Name "; - copaysSummary += "|\n" + lineSeparator(copaysColSizes, "="); - copaysSummary += this.copays.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(copaysColSizes, "-") - ).join(""); - } - const outStr = `:Company Name: ${this.companyName} -:Plan Name: ${this.planName} -:Member Name: ${this.memberName} -:Member ID: ${this.memberId} -:Issuer 80840: ${this.issuer80840} -:Dependents: ${dependents} -:Group Number: ${this.groupNumber} -:Payer ID: ${this.payerId} -:RX BIN: ${this.rxBin} -:RX ID: ${this.rxId} -:RX GRP: ${this.rxGrp} -:RX PCN: ${this.rxPcn} -:Copays: ${copaysSummary} -:Enrollment Date: ${this.enrollmentDate}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/us/healthcareCard/index.ts b/src/product/us/healthcareCard/index.ts deleted file mode 100644 index 56b18900a..000000000 --- a/src/product/us/healthcareCard/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { HealthcareCardV1 } from "./healthcareCardV1"; diff --git a/src/product/us/healthcareCard/internal.ts b/src/product/us/healthcareCard/internal.ts deleted file mode 100644 index a3be3a17e..000000000 --- a/src/product/us/healthcareCard/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { HealthcareCardV1 } from "./healthcareCardV1"; -export { HealthcareCardV1Copay } from "./healthcareCardV1Copay"; -export { HealthcareCardV1Document } from "./healthcareCardV1Document"; diff --git a/src/product/us/index.ts b/src/product/us/index.ts deleted file mode 100644 index 82ca91070..000000000 --- a/src/product/us/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { BankCheckV1 } from "./bankCheck/bankCheckV1"; -export { HealthcareCardV1 } from "./healthcareCard/healthcareCardV1"; -export { UsMailV3 } from "./usMail/usMailV3"; diff --git a/src/product/us/internal.ts b/src/product/us/internal.ts deleted file mode 100644 index 849fc79cd..000000000 --- a/src/product/us/internal.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * as bankCheck from "./bankCheck/internal"; -export * as healthcareCard from "./healthcareCard/internal"; -export * as usMail from "./usMail/internal"; diff --git a/src/product/us/usMail/index.ts b/src/product/us/usMail/index.ts deleted file mode 100644 index 0917b8ea3..000000000 --- a/src/product/us/usMail/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { UsMailV3 } from "./usMailV3"; diff --git a/src/product/us/usMail/internal.ts b/src/product/us/usMail/internal.ts deleted file mode 100644 index 9bc5cfe0c..000000000 --- a/src/product/us/usMail/internal.ts +++ /dev/null @@ -1,4 +0,0 @@ -export { UsMailV3 } from "./usMailV3"; -export { UsMailV3Document } from "./usMailV3Document"; -export { UsMailV3RecipientAddress } from "./usMailV3RecipientAddress"; -export { UsMailV3SenderAddress } from "./usMailV3SenderAddress"; diff --git a/src/product/us/usMail/usMailV3.ts b/src/product/us/usMail/usMailV3.ts deleted file mode 100644 index 3e5d35c96..000000000 --- a/src/product/us/usMail/usMailV3.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { UsMailV3Document } from "./usMailV3Document"; - -/** - * US Mail API version 3 inference prediction. - */ -export class UsMailV3 extends Inference { - /** The endpoint's name. */ - endpointName = "us_mail"; - /** The endpoint's version. */ - endpointVersion = "3"; - /** The document-level prediction. */ - prediction: UsMailV3Document; - /** The document's pages. */ - pages: Page[] = []; - - constructor(rawPrediction: StringDict) { - super(rawPrediction); - this.prediction = new UsMailV3Document(rawPrediction["prediction"]); - rawPrediction["pages"].forEach( - (page: StringDict) => { - if (page.prediction !== undefined && page.prediction !== null && - Object.keys(page.prediction).length > 0) { - this.pages.push(new Page( - UsMailV3Document, - page, - page["id"], - page["orientation"] - )); - } - } - ); - } -} diff --git a/src/product/us/usMail/usMailV3Document.ts b/src/product/us/usMail/usMailV3Document.ts deleted file mode 100644 index bf552637b..000000000 --- a/src/product/us/usMail/usMailV3Document.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { - Prediction, - StringDict, - cleanOutString,lineSeparator, -} from "../../../parsing/common"; -import { UsMailV3SenderAddress } from "./usMailV3SenderAddress"; -import { UsMailV3RecipientAddress } from "./usMailV3RecipientAddress"; -import { BooleanField, StringField } from "../../../parsing/standard"; - -/** - * US Mail API version 3.0 document data. - */ -export class UsMailV3Document implements Prediction { - /** Whether the mailing is marked as return to sender. */ - isReturnToSender: BooleanField; - /** The addresses of the recipients. */ - recipientAddresses: UsMailV3RecipientAddress[] = []; - /** The names of the recipients. */ - recipientNames: StringField[] = []; - /** The address of the sender. */ - senderAddress: UsMailV3SenderAddress; - /** The name of the sender. */ - senderName: StringField; - - constructor(rawPrediction: StringDict, pageId?: number) { - this.isReturnToSender = new BooleanField({ - prediction: rawPrediction["is_return_to_sender"], - pageId: pageId, - }); - rawPrediction["recipient_addresses"] && - rawPrediction["recipient_addresses"].map( - (itemPrediction: StringDict) => - this.recipientAddresses.push( - new UsMailV3RecipientAddress({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - rawPrediction["recipient_names"] && - rawPrediction["recipient_names"].map( - (itemPrediction: StringDict) => - this.recipientNames.push( - new StringField({ - prediction: itemPrediction, - pageId: pageId, - }) - ) - ); - this.senderAddress = new UsMailV3SenderAddress({ - prediction: rawPrediction["sender_address"], - pageId: pageId, - }); - this.senderName = new StringField({ - prediction: rawPrediction["sender_name"], - pageId: pageId, - }); - } - - /** - * Default string representation. - */ - toString(): string { - const recipientNames = this.recipientNames.join("\n "); - let recipientAddressesSummary:string = ""; - if (this.recipientAddresses && this.recipientAddresses.length > 0) { - const recipientAddressesColSizes:number[] = [17, 37, 19, 13, 24, 7, 27, 17]; - recipientAddressesSummary += "\n" + lineSeparator(recipientAddressesColSizes, "-") + "\n "; - recipientAddressesSummary += "| City "; - recipientAddressesSummary += "| Complete Address "; - recipientAddressesSummary += "| Is Address Change "; - recipientAddressesSummary += "| Postal Code "; - recipientAddressesSummary += "| Private Mailbox Number "; - recipientAddressesSummary += "| State "; - recipientAddressesSummary += "| Street "; - recipientAddressesSummary += "| Unit "; - recipientAddressesSummary += "|\n" + lineSeparator(recipientAddressesColSizes, "="); - recipientAddressesSummary += this.recipientAddresses.map( - (item) => - "\n " + item.toTableLine() + "\n" + lineSeparator(recipientAddressesColSizes, "-") - ).join(""); - } - const outStr = `:Sender Name: ${this.senderName} -:Sender Address: ${this.senderAddress.toFieldList()} -:Recipient Names: ${recipientNames} -:Recipient Addresses: ${recipientAddressesSummary} -:Return to Sender: ${this.isReturnToSender}`.trimEnd(); - return cleanOutString(outStr); - } -} diff --git a/src/product/us/usMail/usMailV3RecipientAddress.ts b/src/product/us/usMail/usMailV3RecipientAddress.ts deleted file mode 100644 index 32b6aee0c..000000000 --- a/src/product/us/usMail/usMailV3RecipientAddress.ts +++ /dev/null @@ -1,150 +0,0 @@ - -import { cleanSpecialChars } from "../../../parsing/common"; -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * The addresses of the recipients. - */ -export class UsMailV3RecipientAddress { - /** The city of the recipient's address. */ - city: string | null; - /** The complete address of the recipient. */ - complete: string | null; - /** Indicates if the recipient's address is a change of address. */ - isAddressChange: boolean | null; - /** The postal code of the recipient's address. */ - postalCode: string | null; - /** The private mailbox number of the recipient's address. */ - privateMailboxNumber: string | null; - /** Second part of the ISO 3166-2 code, consisting of two letters indicating the US State. */ - state: string | null; - /** The street of the recipient's address. */ - street: string | null; - /** The unit number of the recipient's address. */ - unit: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.city = prediction["city"]; - this.complete = prediction["complete"]; - this.isAddressChange = prediction["is_address_change"]; - this.postalCode = prediction["postal_code"]; - this.privateMailboxNumber = prediction["private_mailbox_number"]; - this.state = prediction["state"]; - this.street = prediction["street"]; - this.unit = prediction["unit"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - city: this.city ? - this.city.length <= 15 ? - cleanSpecialChars(this.city) : - cleanSpecialChars(this.city).slice(0, 12) + "..." : - "", - complete: this.complete ? - this.complete.length <= 35 ? - cleanSpecialChars(this.complete) : - cleanSpecialChars(this.complete).slice(0, 32) + "..." : - "", - isAddressChange: this.isAddressChange === true ? - "True" : - this.isAddressChange === false ? - "False" : - "", - postalCode: this.postalCode ? - this.postalCode.length <= 11 ? - cleanSpecialChars(this.postalCode) : - cleanSpecialChars(this.postalCode).slice(0, 8) + "..." : - "", - privateMailboxNumber: this.privateMailboxNumber ? - this.privateMailboxNumber.length <= 22 ? - cleanSpecialChars(this.privateMailboxNumber) : - cleanSpecialChars(this.privateMailboxNumber).slice(0, 19) + "..." : - "", - state: this.state ? - this.state.length <= 5 ? - cleanSpecialChars(this.state) : - cleanSpecialChars(this.state).slice(0, 2) + "..." : - "", - street: this.street ? - this.street.length <= 25 ? - cleanSpecialChars(this.street) : - cleanSpecialChars(this.street).slice(0, 22) + "..." : - "", - unit: this.unit ? - this.unit.length <= 15 ? - cleanSpecialChars(this.unit) : - cleanSpecialChars(this.unit).slice(0, 12) + "..." : - "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "City: " + - printable.city + - ", Complete Address: " + - printable.complete + - ", Is Address Change: " + - printable.isAddressChange + - ", Postal Code: " + - printable.postalCode + - ", Private Mailbox Number: " + - printable.privateMailboxNumber + - ", State: " + - printable.state + - ", Street: " + - printable.street + - ", Unit: " + - printable.unit - ); - } - - /** - * Output in a format suitable for inclusion in an rST table. - */ - toTableLine(): string { - const printable = this.#printableValues(); - return ( - "| " + - printable.city.padEnd(15) + - " | " + - printable.complete.padEnd(35) + - " | " + - printable.isAddressChange.padEnd(17) + - " | " + - printable.postalCode.padEnd(11) + - " | " + - printable.privateMailboxNumber.padEnd(22) + - " | " + - printable.state.padEnd(5) + - " | " + - printable.street.padEnd(25) + - " | " + - printable.unit.padEnd(15) + - " |" - ); - } -} diff --git a/src/product/us/usMail/usMailV3SenderAddress.ts b/src/product/us/usMail/usMailV3SenderAddress.ts deleted file mode 100644 index 9567a1eba..000000000 --- a/src/product/us/usMail/usMailV3SenderAddress.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; - -/** - * The address of the sender. - */ -export class UsMailV3SenderAddress { - /** The city of the sender's address. */ - city: string | null; - /** The complete address of the sender. */ - complete: string | null; - /** The postal code of the sender's address. */ - postalCode: string | null; - /** Second part of the ISO 3166-2 code, consisting of two letters indicating the US State. */ - state: string | null; - /** The street of the sender's address. */ - street: string | null; - /** Confidence score */ - confidence: number = 0.0; - /** The document page on which the information was found. */ - pageId: number; - /** - * Contains the relative vertices coordinates (points) of a polygon containing - * the field in the document. - */ - polygon: Polygon = new Polygon(); - - constructor({ prediction = {} }: StringDict) { - this.city = prediction["city"]; - this.complete = prediction["complete"]; - this.postalCode = prediction["postal_code"]; - this.state = prediction["state"]; - this.street = prediction["street"]; - this.pageId = prediction["page_id"]; - this.confidence = prediction["confidence"] ? prediction.confidence : 0.0; - if (prediction["polygon"]) { - this.polygon = prediction.polygon; - } - } - - /** - * Collection of fields as representable strings. - */ - #printableValues() { - return { - city: this.city ?? "", - complete: this.complete ?? "", - postalCode: this.postalCode ?? "", - state: this.state ?? "", - street: this.street ?? "", - }; - } - - /** - * Default string representation. - */ - toString(): string { - const printable = this.#printableValues(); - return ( - "City: " + - printable.city + - ", Complete Address: " + - printable.complete + - ", Postal Code: " + - printable.postalCode + - ", State: " + - printable.state + - ", Street: " + - printable.street - ); - } - - /** - * Output in a format suitable for inclusion in a field list. - */ - toFieldList(): string { - const printable = this.#printableValues(); - return ` - :City: ${printable.city} - :Complete Address: ${printable.complete} - :Postal Code: ${printable.postalCode} - :State: ${printable.state} - :Street: ${printable.street}`.trimEnd(); - } -} diff --git a/src/cli.ts b/src/v1/cli.ts similarity index 73% rename from src/cli.ts rename to src/v1/cli.ts index 703a9cfd9..6a628dcd4 100644 --- a/src/cli.ts +++ b/src/v1/cli.ts @@ -1,14 +1,24 @@ -import { Command, OptionValues, Option } from "commander"; -import { AsyncPredictResponse, Document, Inference, StringDict } from "./parsing/common"; -import { Client, PredictOptions } from "./client"; -import { PageOptions, PageOptionsOperation, PathInput } from "./input"; +import { + Command, OptionValues, Option, +} from "commander"; +import { + Document, Inference, StringDict, +} from "@/v1/parsing/common/index.js"; +import { + Client, PredictOptions, +} from "./client.js"; +import { + PageOptions, PageOptionsOperation, PathInput, +} from "@/input/index.js"; import * as console from "console"; -import { CLI_COMMAND_CONFIG, COMMAND_CUSTOM, COMMAND_GENERATED, ProductConfig } from "./cliProducts"; +import { + CLI_COMMAND_CONFIG, COMMAND_GENERATED, ProductConfig, +} from "./product/cliProducts.js"; +import { Endpoint } from "./http/index.js"; const program = new Command(); - // // EXECUTE THE COMMANDS // @@ -58,7 +68,7 @@ async function callParse( const pageOptions = getPageOptions(options); const inputSource = new PathInput({ inputPath: inputPath }); let response; - if (command === COMMAND_CUSTOM || command === COMMAND_GENERATED) { + if (command === COMMAND_GENERATED) { const customEndpoint = mindeeClient.createEndpoint( options.endpoint, options.account, @@ -90,36 +100,27 @@ async function callEnqueueAndParse( const predictParams = getPredictParams(options); const pageOptions = getPageOptions(options); const inputSource = new PathInput({ inputPath: inputPath }); - let response: AsyncPredictResponse; - if (command === COMMAND_CUSTOM || command === COMMAND_GENERATED) { - const customEndpoint = mindeeClient.createEndpoint( + let customEndpoint: Endpoint | undefined = undefined; + if (command === COMMAND_GENERATED) { + customEndpoint = mindeeClient.createEndpoint( options.endpoint, options.account, options.version ); - response = await mindeeClient.enqueueAndParse(productClass, inputSource, { - endpoint: customEndpoint, - pageOptions: pageOptions, - allWords: predictParams.allWords, - cropper: predictParams.cropper, - initialDelaySec: 2, - delaySec: 1.5, - maxRetries: 80, - }); - } else { - response = await mindeeClient.enqueueAndParse(productClass, inputSource, { - pageOptions: pageOptions, - allWords: predictParams.allWords, - cropper: predictParams.cropper, - initialDelaySec: 2, - delaySec: 1.5, - maxRetries: 80, - }); - if (!response.document) { - throw Error("Document could not be retrieved"); - } - printResponse(response.document, options); } + const response = await mindeeClient.enqueueAndParse(productClass, inputSource, { + endpoint: customEndpoint, + pageOptions: pageOptions, + allWords: predictParams.allWords, + cropper: predictParams.cropper, + initialDelaySec: 2, + delaySec: 1.5, + maxRetries: 80, + }); + if (!response.document) { + throw Error("Document could not be retrieved"); + } + printResponse(response.document, options); } async function callGetDocument( @@ -131,24 +132,6 @@ async function callGetDocument( printResponse(response.document, options); } -async function callSendFeedback( - productClass: new (httpResponse: StringDict) => T, - documentId: string, - feedbackStr: string, - options: any -): Promise { - const mindeeClient = initClient(options); - const feedback = { - feedback: JSON.parse(feedbackStr), - }; - const response = await mindeeClient.sendFeedback( - productClass, - documentId, - feedback - ); - console.log(response.feedback); -} - function printResponse( document: Document, options: any @@ -219,7 +202,7 @@ function routeSwitchboard( } function addPredictAction(prog: Command) { - if (prog.name() === COMMAND_CUSTOM || prog.name() === COMMAND_GENERATED) { + if (prog.name() === COMMAND_GENERATED) { prog.action(function ( inputPath: string, options: OptionValues, @@ -274,21 +257,6 @@ export function cli() { addMainOptions(getDocProductCmd); } - const feedbackProductCmd: Command = productCmd.command("feedback") - .description("Send feedback for a document.") - .argument("", "Unique ID of the document.") - .argument("", "Feedback to send, ex '{\"key\": \"value\"}'.") - .action(async (documentId, feedback, options) => { - const docClass = getConfig(name).docClass; - await callSendFeedback( - docClass, - documentId, - feedback, - { ...options, ...productCmd.opts(), ...program.opts() } - ); - }); - addMainOptions(feedbackProductCmd); - const predictProductCmd: Command = productCmd.command("parse") .description("Send a file for parsing."); @@ -303,7 +271,7 @@ export function cli() { predictProductCmd.addOption(asyncOpt); } - if (name === COMMAND_CUSTOM || name === COMMAND_GENERATED) { + if (name === COMMAND_GENERATED) { addCustomPostOptions(predictProductCmd); } addMainOptions(predictProductCmd); diff --git a/src/client.ts b/src/v1/client.ts similarity index 71% rename from src/client.ts rename to src/v1/client.ts index 435996482..51364859d 100644 --- a/src/client.ts +++ b/src/v1/client.ts @@ -1,31 +1,32 @@ +import { setTimeout } from "node:timers/promises"; +import { Dispatcher } from "undici"; import { InputSource, - LocalResponse, PageOptions, -} from "./input"; -import { ApiSettings, Endpoint, EndpointResponse, STANDARD_API_OWNER } from "./http"; +} from "@/input/index.js"; +import { BaseHttpResponse } from "@/http/index.js"; +import { errorHandler } from "@/errors/handler.js"; +import { LOG_LEVELS, logger } from "@/logger.js"; +import { + ApiSettingsV1, + Endpoint, + STANDARD_API_OWNER, +} from "./http/index.js"; import { AsyncPredictResponse, ExecutionPriority, FeedbackResponse, Inference, PredictResponse, - StringDict -} from "./parsing/common"; -import { errorHandler } from "./errors/handler"; -import { LOG_LEVELS, logger } from "./logger"; -import { InferenceFactory } from "./parsing/common/inference"; -import { CustomV1, GeneratedV1 } from "./product"; - -import { setTimeout } from "node:timers/promises"; -import { MindeeError } from "./errors"; -import { WorkflowResponse } from "./parsing/common/workflowResponse"; -import { WorkflowEndpoint } from "./http/workflowEndpoint"; -import { Base64Input, BufferInput, BytesInput, PathInput, StreamInput, UrlInput } from "./input"; -import { Readable } from "stream"; + StringDict, + WorkflowResponse, +} from "./parsing/common/index.js"; +import { InferenceFactory } from "./parsing/common/inference.js"; +import { GeneratedV1 } from "./product/index.js"; +import { WorkflowEndpoint } from "./http/index.js"; /** - * Common options for workflows & predictions. + * Common options for workflows and predictions. */ interface BaseOptions { /** @@ -37,7 +38,6 @@ interface BaseOptions { * This is done before sending the file to the server and is useful to avoid page limitations. */ pageOptions?: PageOptions; - /** * If set, will enable Retrieval-Augmented Generation (only works if a valid workflowId is set). */ @@ -123,6 +123,8 @@ export interface ClientOptions { throwOnError?: boolean; /** Log debug messages. */ debug?: boolean; + /** Custom dispatcher for HTTP requests. */ + dispatcher?: Dispatcher; } /** @@ -131,26 +133,29 @@ export interface ClientOptions { * @category Client */ export class Client { - /** Key of the API. */ - protected apiKey: string; + /** Mindee V1 API settings. */ + protected apiSettings: ApiSettingsV1; /** * @param {ClientOptions} options options for the initialization of a client. */ constructor( - { apiKey, throwOnError, debug }: ClientOptions = { - apiKey: "", + { apiKey, throwOnError, debug, dispatcher }: ClientOptions = { + apiKey: undefined, throwOnError: true, debug: false, + dispatcher: undefined, } ) { - this.apiKey = apiKey ? apiKey : ""; + this.apiSettings = new ApiSettingsV1({ + apiKey: apiKey, + dispatcher: dispatcher, + }); errorHandler.throwOnError = throwOnError ?? true; - logger.level = - debug ?? process.env.MINDEE_DEBUG + logger.level = debug ?? process.env.MINDEE_DEBUG ? LOG_LEVELS["debug"] : LOG_LEVELS["warn"]; - logger.debug("Client initialized"); + logger.debug("Client V1 Initialized"); } /** @@ -174,8 +179,7 @@ export class Client { pageOptions: undefined, } ): Promise> { - const endpoint: Endpoint = - params?.endpoint ?? this.#initializeOTSEndpoint(productClass); + const endpoint: Endpoint = params?.endpoint ?? this.#initializeOTSEndpoint(productClass); if (inputSource === undefined) { throw new Error("The 'parse' function requires an input document."); } @@ -191,7 +195,7 @@ export class Client { /** * Send the document to an asynchronous endpoint and return its ID in the queue. - * @param productClass product class to use for calling the API and parsing the response. + * @param productClass product class to use for calling the API and parsing the response. * @param inputSource file to parse. * @param params parameters relating to prediction options. * @category Asynchronous @@ -223,7 +227,7 @@ export class Client { /** * Polls a queue and returns its status as well as the prediction results if the parsing is done. * - * @param productClass product class to use for calling the API and parsing the response. + * @param productClass product class to use for calling the API and parsing the response. * @param queueId id of the queue to poll. * @param params parameters relating to prediction options. * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. @@ -242,30 +246,6 @@ export class Client { return new AsyncPredictResponse(productClass, docResponse.data); } - async loadPrediction( - productClass: new (httpResponse: StringDict) => T, - localResponse: LocalResponse - ) { - /** - * Load a prediction. - * - * @param productClass Product class to use for calling the API and parsing the response. - * @param localResponse Local response to load. - * @category Asynchronous - * @returns A valid prediction - */ - try { - const asDict = await localResponse.asDict(); - if (Object.prototype.hasOwnProperty.call(asDict, "job")) { - return new AsyncPredictResponse(productClass, asDict); - } - return new PredictResponse(productClass, asDict); - } catch { - throw new MindeeError("No prediction found in local response."); - } - } - - /** * Send the document to an asynchronous endpoint and return its ID in the queue. * @param inputSource file to send to the API. @@ -279,7 +259,7 @@ export class Client { workflowId: string, params: WorkflowOptions = {} ): Promise> { - const workflowEndpoint = new WorkflowEndpoint(this.#buildApiSettings(), workflowId); + const workflowEndpoint = new WorkflowEndpoint(this.apiSettings, workflowId); if (inputSource === undefined) { throw new Error("The 'executeWorkflow' function requires an input document."); } @@ -297,7 +277,7 @@ export class Client { /** * Fetch prediction results from a document already processed. * - * @param productClass product class to use for calling the API and parsing the response. + * @param productClass product class to use for calling the API and parsing the response. * @param documentId id of the document to fetch. * @param params optional parameters. * @param params.endpoint Endpoint, only specify if using a custom product. @@ -312,14 +292,14 @@ export class Client { ): Promise> { const endpoint: Endpoint = params?.endpoint ?? this.#initializeOTSEndpoint(productClass); - const response: EndpointResponse = await endpoint.getDocument(documentId); + const response: BaseHttpResponse = await endpoint.getDocument(documentId); return new PredictResponse(productClass, response.data); } /** - * Send a feedback for a document. + * Send feedback for a document. * - * @param productClass product class to use for calling the API and parsing the response. + * @param productClass product class to use for calling the API and parsing the response. * @param documentId id of the document to send feedback for. * @param feedback the feedback to send. * @param params optional parameters. @@ -336,7 +316,7 @@ export class Client { ): Promise { const endpoint: Endpoint = params?.endpoint ?? this.#initializeOTSEndpoint(productClass); - const response: EndpointResponse = await endpoint.sendFeedback(documentId, feedback); + const response: BaseHttpResponse = await endpoint.sendFeedback(documentId, feedback); return new FeedbackResponse(response.data); } @@ -368,7 +348,7 @@ export class Client { /** * Send a document to an asynchronous endpoint and poll the server until the result is sent or - * until the maximum amount of tries is reached. + * until the maximum number of tries is reached. * * @param productClass product class to use for calling the API and parsing the response. * @param inputSource document to parse. @@ -459,20 +439,10 @@ Job status: ${pollResults.job.status}.` endpointName, accountName, endpointVersion, - this.#buildApiSettings() + this.apiSettings ); } - /** - * Builds a document endpoint. - * @returns a custom `Endpoint` object. - */ - #buildApiSettings(): ApiSettings { - return new ApiSettings({ - apiKey: this.apiKey, - }); - } - /** * Creates a custom endpoint with the given values. Raises an error if the endpoint is invalid. * @param endpointName Name of the custom Endpoint. @@ -487,17 +457,13 @@ Job status: ${pollResults.job.status}.` accountName: string, endpointVersion?: string ): Endpoint { - const cleanAccountName: string = this.#cleanAccountName( - CustomV1, - accountName - ); if (!endpointName || endpointName.length === 0) { throw new Error("Missing parameter 'endpointName' for custom build!"); } let cleanEndpointVersion: string; if (!endpointVersion || endpointVersion.length === 0) { logger.debug( - "Warning: No version provided for a custom build, will attempt to poll version 1 by default." + "No version provided for a custom build, will poll using version 1 by default." ); cleanEndpointVersion = "1"; } else { @@ -505,7 +471,7 @@ Job status: ${pollResults.job.status}.` } return this.#buildProductEndpoint( endpointName, - cleanAccountName, + accountName, cleanEndpointVersion ); } @@ -516,9 +482,6 @@ Job status: ${pollResults.job.status}.` #initializeOTSEndpoint( productClass: new (httpResponse: StringDict) => T ): Endpoint { - if (productClass.name === "CustomV1") { - throw new Error("Incorrect parameters for Custom build."); - } const [endpointName, endpointVersion] = this.#getOtsEndpoint(productClass); return this.#buildProductEndpoint( endpointName, @@ -527,33 +490,9 @@ Job status: ${pollResults.job.status}.` ); } - /** - * Checks that an account name is provided for custom builds, and sets the default one otherwise. - * @param productClass product class to use for calling the API and parsing the response. - * @param accountName name of the account's holder. Only required on custom builds. - * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. - * - * @returns the name of the account. Sends an error if one isn't provided for a custom build. - */ - #cleanAccountName( - productClass: new (httpResponse: StringDict) => T, - accountName?: string - ): string { - if (productClass.name === "CustomV1") { - if (!accountName || accountName.length === 0) { - logger.debug( - `Warning: no account name provided for custom build, ${STANDARD_API_OWNER} will be used by default` - ); - return STANDARD_API_OWNER; - } - return accountName; - } - return STANDARD_API_OWNER; - } - /** * Get the name and version of an OTS endpoint. - * @param productClass product class to use for calling the API and parsing the response. + * @param productClass product class to use for calling the API and parsing the response. * Mandatory to retrieve default OTS endpoint data. * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. * @@ -566,78 +505,4 @@ Job status: ${pollResults.job.status}.` InferenceFactory.getEndpoint(productClass); return [endpointName, endpointVersion]; } - - /** - * Load an input document from a local path. - * @param inputPath - * @deprecated Use `new mindee.PathInput()` instead. - */ - docFromPath(inputPath: string): PathInput { - return new PathInput({ - inputPath: inputPath, - }); - } - - /** - * Load an input document from a base64 encoded string. - * @param inputString input content, as a string. - * @param filename file name. - * @deprecated Use `new mindee.Base64Input()` instead. - */ - docFromBase64(inputString: string, filename: string): Base64Input { - return new Base64Input({ - inputString: inputString, - filename: filename, - }); - } - - /** - * Load an input document from a `stream.Readable` object. - * @param inputStream input content, as a readable stream. - * @param filename file name. - * @deprecated Use `new mindee.StreamInput()` instead. - */ - docFromStream(inputStream: Readable, filename: string): StreamInput { - return new StreamInput({ - inputStream: inputStream, - filename: filename, - }); - } - - /** - * Load an input document from bytes. - * @param inputBytes input content, as a Uint8Array or Buffer. - * @param filename file name. - * @deprecated Use `new mindee.BytesInput()` instead. - */ - docFromBytes(inputBytes: Uint8Array, filename: string): BytesInput { - return new BytesInput({ - inputBytes: inputBytes, - filename: filename, - }); - } - - /** - * Load an input document from a URL. - * @param url input url. Must be HTTPS. - * @deprecated Use `new mindee.UrlInput()` instead. - */ - docFromUrl(url: string): UrlInput { - return new UrlInput({ - url: url, - }); - } - - /** - * Load an input document from a Buffer. - * @param buffer input content, as a buffer. - * @param filename file name. - * @deprecated Use `new mindee.BufferInput()` instead. - */ - docFromBuffer(buffer: Buffer, filename: string): BufferInput { - return new BufferInput({ - buffer: buffer, - filename: filename, - }); - } } diff --git a/src/imageOperations/internal.ts b/src/v1/extraction/index.ts similarity index 57% rename from src/imageOperations/internal.ts rename to src/v1/extraction/index.ts index 0c15d6534..cf7cd9ea4 100644 --- a/src/imageOperations/internal.ts +++ b/src/v1/extraction/index.ts @@ -1,3 +1,2 @@ -export { extractReceipts, ExtractedMultiReceiptImage } from "./multiReceiptsExtractor"; -export { extractInvoices, ExtractedInvoiceSplitterImage } from "./invoiceSplitterExtractor"; -export { ExtractedImage } from "./common"; +export { extractReceipts, ExtractedMultiReceiptImage } from "./multiReceiptsExtractor/index.js"; +export { extractInvoices, ExtractedInvoiceSplitterImage } from "./invoiceSplitterExtractor/index.js"; diff --git a/src/imageOperations/invoiceSplitterExtractor/extractedInvoiceSplitterImage.ts b/src/v1/extraction/invoiceSplitterExtractor/extractedInvoiceSplitterImage.ts similarity index 87% rename from src/imageOperations/invoiceSplitterExtractor/extractedInvoiceSplitterImage.ts rename to src/v1/extraction/invoiceSplitterExtractor/extractedInvoiceSplitterImage.ts index 36f8fdb17..21f3c0c39 100644 --- a/src/imageOperations/invoiceSplitterExtractor/extractedInvoiceSplitterImage.ts +++ b/src/v1/extraction/invoiceSplitterExtractor/extractedInvoiceSplitterImage.ts @@ -1,4 +1,4 @@ -import { ExtractedImage } from "../common/extractedImage"; +import { ExtractedImage } from "@/image/extractedImage.js"; /** * Wrapper class for extracted invoice pages. diff --git a/src/v1/extraction/invoiceSplitterExtractor/index.ts b/src/v1/extraction/invoiceSplitterExtractor/index.ts new file mode 100644 index 000000000..376c31138 --- /dev/null +++ b/src/v1/extraction/invoiceSplitterExtractor/index.ts @@ -0,0 +1,2 @@ +export { extractInvoices } from "./invoiceSplitterExtractor.js"; +export { ExtractedInvoiceSplitterImage } from "./extractedInvoiceSplitterImage.js"; diff --git a/src/imageOperations/invoiceSplitterExtractor/invoiceSplitterExtractor.ts b/src/v1/extraction/invoiceSplitterExtractor/invoiceSplitterExtractor.ts similarity index 61% rename from src/imageOperations/invoiceSplitterExtractor/invoiceSplitterExtractor.ts rename to src/v1/extraction/invoiceSplitterExtractor/invoiceSplitterExtractor.ts index 83c4303af..b6de0479e 100644 --- a/src/imageOperations/invoiceSplitterExtractor/invoiceSplitterExtractor.ts +++ b/src/v1/extraction/invoiceSplitterExtractor/invoiceSplitterExtractor.ts @@ -1,22 +1,38 @@ -import { PDFDocument } from "@cantoo/pdf-lib"; -import { MindeeError, MindeeMimeTypeError } from "../../errors"; -import { InvoiceSplitterV1 } from "../../product"; -import { LocalInputSource } from "../../input"; -import { ExtractedInvoiceSplitterImage } from "./extractedInvoiceSplitterImage"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as pdfLibTypes from "@cantoo/pdf-lib"; +import { MindeeError, MindeeInputSourceError } from "@/errors/index.js"; +import { InvoiceSplitterV1 } from "@/v1/product/index.js"; +import { LocalInputSource } from "@/input/index.js"; +import { ExtractedInvoiceSplitterImage } from "@/v1/extraction/index.js"; +import { loadOptionalDependency } from "@/dependency/index.js"; -async function splitPdf(pdfDoc: PDFDocument, invoicePageGroups: number[][]): Promise { +let pdfLib: typeof pdfLibTypes | null = null; + +async function getPdfLib(): Promise { + if (!pdfLib) { + const pdfLibImport = await loadOptionalDependency("@cantoo/pdf-lib", "Text Embedding"); + pdfLib = (pdfLibImport as any).default || pdfLibImport; + } + return pdfLib!; +} + +async function splitPdf( + pdfDoc: pdfLibTypes.PDFDocument, + invoicePageGroups: number[][]): Promise { + const pdfLib = await getPdfLib(); if (invoicePageGroups.length === 0) { return []; } const generatedPdfs: ExtractedInvoiceSplitterImage[] = []; for (let i = 0; i < invoicePageGroups.length; i++) { - const subdocument = await PDFDocument.create(); + const subdocument = await pdfLib.PDFDocument.create(); const fullIndexes = []; for (let j = invoicePageGroups[i][0]; j <= invoicePageGroups[i][invoicePageGroups[i].length - 1]; j++) { fullIndexes.push(j); } const copiedPages = await subdocument.copyPages(pdfDoc, fullIndexes); - copiedPages.map((page) => { + copiedPages.map((page: pdfLibTypes.PDFPage) => { subdocument.addPage(page); }); const subdocumentBytes = await subdocument.save(); @@ -29,18 +45,23 @@ async function splitPdf(pdfDoc: PDFDocument, invoicePageGroups: number[][]): Pro return generatedPdfs; } -async function getPdfDoc(inputFile: LocalInputSource): Promise { +async function getPdfDoc(inputFile: LocalInputSource): Promise { + const pdfLib = await getPdfLib(); await inputFile.init(); if (!inputFile.isPdf()) { - throw new MindeeMimeTypeError("Invoice Splitter is only compatible with pdf documents."); + throw new MindeeInputSourceError( + "Invoice Splitter is only compatible with PDF documents." + ); } - const pdfDoc = await PDFDocument.load(inputFile.fileObject, { + const pdfDoc = await pdfLib.PDFDocument.load(inputFile.fileObject, { ignoreEncryption: true, password: "" }); if (pdfDoc.getPageCount() < 2) { - throw new MindeeError("Invoice Splitter is only compatible with multi-page-pdf documents."); + throw new MindeeInputSourceError( + "Invoice Splitter is only compatible with multi-page PDF documents." + ); } return pdfDoc; } diff --git a/src/imageOperations/multiReceiptsExtractor/extractedMultiReceiptImage.ts b/src/v1/extraction/multiReceiptsExtractor/extractedMultiReceiptImage.ts similarity index 88% rename from src/imageOperations/multiReceiptsExtractor/extractedMultiReceiptImage.ts rename to src/v1/extraction/multiReceiptsExtractor/extractedMultiReceiptImage.ts index 83ce558f1..075cc6335 100644 --- a/src/imageOperations/multiReceiptsExtractor/extractedMultiReceiptImage.ts +++ b/src/v1/extraction/multiReceiptsExtractor/extractedMultiReceiptImage.ts @@ -1,4 +1,4 @@ -import { ExtractedImage } from "../common"; +import { ExtractedImage } from "@/image/index.js"; /** * Wrapper class for extracted multiple-receipts images. diff --git a/src/v1/extraction/multiReceiptsExtractor/index.ts b/src/v1/extraction/multiReceiptsExtractor/index.ts new file mode 100644 index 000000000..b889e434f --- /dev/null +++ b/src/v1/extraction/multiReceiptsExtractor/index.ts @@ -0,0 +1,2 @@ +export { extractReceipts } from "./multiReceiptsExtractor.js"; +export { ExtractedMultiReceiptImage } from "./extractedMultiReceiptImage.js"; diff --git a/src/imageOperations/multiReceiptsExtractor/multiReceiptsExtractor.ts b/src/v1/extraction/multiReceiptsExtractor/multiReceiptsExtractor.ts similarity index 65% rename from src/imageOperations/multiReceiptsExtractor/multiReceiptsExtractor.ts rename to src/v1/extraction/multiReceiptsExtractor/multiReceiptsExtractor.ts index 086fe405c..c49e37b58 100644 --- a/src/imageOperations/multiReceiptsExtractor/multiReceiptsExtractor.ts +++ b/src/v1/extraction/multiReceiptsExtractor/multiReceiptsExtractor.ts @@ -1,11 +1,24 @@ -import { PDFDocument, PDFImage, PDFPage, degrees } from "@cantoo/pdf-lib"; -import { MindeeError, MindeeMimeTypeError } from "../../errors"; -import { Polygon } from "../../geometry"; -import { MultiReceiptsDetectorV1 } from "../../product"; -import { ExtractedMultiReceiptImage } from "./extractedMultiReceiptImage"; -import { LocalInputSource } from "../../input"; -import { extractFromPage } from "../common"; -import { PositionField } from "../../parsing/standard"; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import type * as pdfLibTypes from "@cantoo/pdf-lib"; +import { MindeeError, MindeeInputSourceError } from "@/errors/index.js"; +import { Polygon } from "@/geometry/index.js"; +import { MultiReceiptsDetectorV1 } from "@/v1/product/index.js"; +import { ExtractedMultiReceiptImage } from "@/v1/extraction/index.js"; +import { LocalInputSource } from "@/input/index.js"; +import { extractFromPage } from "@/image/index.js"; +import { PositionField } from "@/v1/parsing/standard/index.js"; +import { loadOptionalDependency } from "@/dependency/index.js"; + +let pdfLib: typeof pdfLibTypes | null = null; + +async function getPdfLib(): Promise { + if (!pdfLib) { + const pdfLibImport = await loadOptionalDependency("@cantoo/pdf-lib", "Text Embedding"); + pdfLib = (pdfLibImport as any).default || pdfLibImport; + } + return pdfLib!; +} /** * Given a page and a set of coordinates, extracts & assigns individual receipts to an ExtractedMultiReceiptImage @@ -17,7 +30,7 @@ import { PositionField } from "../../parsing/standard"; * pages. */ async function extractReceiptsFromPage( - pdfPage: PDFPage, + pdfPage: pdfLibTypes.PDFPage, boundingBoxes: Polygon[], pageId: number) { const extractedReceiptsRaw = await extractFromPage(pdfPage, boundingBoxes); @@ -29,21 +42,22 @@ async function extractReceiptsFromPage( } async function loadPdfDoc(inputFile: LocalInputSource) { - let pdfDoc: PDFDocument; + const pdfLib = await getPdfLib(); + let pdfDoc: pdfLibTypes.PDFDocument; if (!["image/jpeg", "image/jpg", "image/png", "application/pdf"].includes(inputFile.mimeType)) { - throw new MindeeMimeTypeError( + throw new MindeeInputSourceError( 'Unsupported file type "' + inputFile.mimeType + '" Currently supported types are .png, .jpg and .pdf' ); } else if (inputFile.isPdf()) { - pdfDoc = await PDFDocument.load(inputFile.fileObject, { + pdfDoc = await pdfLib.PDFDocument.load(inputFile.fileObject, { ignoreEncryption: true, password: "" }); } else { - pdfDoc = await PDFDocument.create(); - let image: PDFImage; + pdfDoc = await pdfLib.PDFDocument.create(); + let image: pdfLibTypes.PDFImage; if (inputFile.mimeType === "image/png") { image = await pdfDoc.embedPng(inputFile.fileObject); } else { @@ -67,6 +81,7 @@ export async function extractReceipts( inputFile: LocalInputSource, inference: MultiReceiptsDetectorV1 ): Promise { + const pdfLib = await getPdfLib(); const images: ExtractedMultiReceiptImage[] = []; if (!inference.prediction.receipts) { throw new MindeeError("No possible receipts candidates found for MultiReceipts extraction."); @@ -74,7 +89,7 @@ export async function extractReceipts( const pdfDoc = await loadPdfDoc(inputFile); for (let pageId = 0; pageId < pdfDoc.getPageCount(); pageId++) { const [page] = await pdfDoc.copyPages(pdfDoc, [pageId]); - page.setRotation(degrees(inference.pages[pageId].orientation?.value ?? 0)); + page.setRotation(pdfLib.degrees(inference.pages[pageId].orientation?.value ?? 0)); const receiptPositions = inference.pages[pageId].prediction.receipts.map( (receipt: PositionField) => receipt.boundingBox ); diff --git a/src/v1/http/apiSettingsV1.ts b/src/v1/http/apiSettingsV1.ts new file mode 100644 index 000000000..ec4213d09 --- /dev/null +++ b/src/v1/http/apiSettingsV1.ts @@ -0,0 +1,51 @@ + +import { logger } from "@/logger.js"; +import { BaseSettings, MindeeApiConstructorProps } from "../../http/baseSettings.js"; +import { MindeeConfigurationError } from "@/errors/index.js"; + +export const STANDARD_API_OWNER: string = "mindee"; +const API_V1_KEY_ENVVAR_NAME: string = "MINDEE_API_KEY"; +const API_V1_HOST_ENVVAR_NAME: string = "MINDEE_API_HOST"; +const DEFAULT_MINDEE_API_HOST: string = "api.mindee.net"; + +export class ApiSettingsV1 extends BaseSettings { + baseHeaders: Record; + + constructor({ + apiKey, + dispatcher, + }: MindeeApiConstructorProps) { + super(apiKey, dispatcher); + if (!this.apiKey || this.apiKey.length === 0) { + throw new MindeeConfigurationError( + "Your V1 API key could not be set, check your Client Configuration\n." + + `You can set this using the ${API_V1_KEY_ENVVAR_NAME} environment variable.` + ); + } + /* eslint-disable @typescript-eslint/naming-convention */ + this.baseHeaders = { + "User-Agent": this.getUserAgent(), + Authorization: `Token ${this.apiKey}`, + }; + } + + protected apiKeyFromEnv(): string { + const envVarValue = process.env[API_V1_KEY_ENVVAR_NAME]; + if (envVarValue) { + logger.debug( + "Set the V1 API key from the environment" + ); + return envVarValue; + } + return ""; + } + + protected hostnameFromEnv(): string { + const envVarValue = process.env[API_V1_HOST_ENVVAR_NAME]; + if (envVarValue) { + logger.debug(`Set the V1 API hostname from the environment to: ${envVarValue}`); + return envVarValue; + } + return DEFAULT_MINDEE_API_HOST; + } +} diff --git a/src/http/endpoint.ts b/src/v1/http/endpoint.ts similarity index 55% rename from src/http/endpoint.ts rename to src/v1/http/endpoint.ts index bba83afb9..d74cf2ca9 100644 --- a/src/http/endpoint.ts +++ b/src/v1/http/endpoint.ts @@ -1,19 +1,20 @@ -import { RequestOptions } from "https"; import { URLSearchParams } from "url"; -import FormData from "form-data"; -import { InputSource, LocalInputSource } from "../input"; -import { handleError } from "./error"; -import { ApiSettings } from "./apiSettings"; -import { BaseEndpoint, EndpointResponse } from "./baseEndpoint"; -import { StringDict } from "../parsing/common"; -import { ClientRequest } from "http"; -import { isValidAsyncResponse, isValidSyncResponse } from "./responseValidation"; -import { PredictParams } from "./httpParams"; +import { InputSource, LocalInputSource } from "@/input/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { cutDocPages, sendRequestAndReadResponse, BaseHttpResponse } from "@/http/apiCore.js"; +import { ApiSettingsV1 } from "./apiSettingsV1.js"; +import { handleError } from "./errors.js"; +import { PredictParams } from "./httpParams.js"; +import { isValidAsyncResponse, isValidSyncResponse } from "./responseValidation.js"; /** * Endpoint for a product (OTS or Custom). */ -export class Endpoint extends BaseEndpoint { +export class Endpoint { + /** Settings relating to the API. */ + settings: ApiSettingsV1; + /** Root of the URL for API calls. */ + urlRoot: string; /** URL of a product. */ urlName: string; /** Account owning the product. */ @@ -25,22 +26,15 @@ export class Endpoint extends BaseEndpoint { urlName: string, owner: string, version: string, - settings: ApiSettings + settings: ApiSettingsV1 ) { - super(settings, `/v1/products/${owner}/${urlName}/v${version}`); + this.settings = settings; + this.urlRoot = `/v1/products/${owner}/${urlName}/v${version}`; this.owner = owner; this.urlName = urlName; this.version = version; } - /** - * Changes the url to a workflow ID. - * @param workflowId - */ - useWorkflowId(workflowId: string) { - this.urlRoot = `/v1/workflows/${workflowId}`; - } - /** * Sends a document to the API and parses out the result. * Throws an error if the server's response contains one. @@ -48,10 +42,10 @@ export class Endpoint extends BaseEndpoint { * @category Synchronous * @returns a `Promise` containing parsing results. */ - async predict(params: PredictParams): Promise { + async predict(params: PredictParams): Promise { await params.inputDoc.init(); if (params.pageOptions !== undefined) { - await BaseEndpoint.cutDocPages(params.inputDoc, params.pageOptions); + await cutDocPages(params.inputDoc, params.pageOptions); } const response = await this.#predictReqPost( params.inputDoc, @@ -62,7 +56,6 @@ export class Endpoint extends BaseEndpoint { if (!isValidSyncResponse(response)) { handleError(this.urlName, response, this.extractStatusMessage(response)); } - return response; } @@ -73,10 +66,10 @@ export class Endpoint extends BaseEndpoint { * @category Asynchronous * @returns a `Promise` containing queue data. */ - async predictAsync(params: PredictParams): Promise { + async predictAsync(params: PredictParams): Promise { await params.inputDoc.init(); if (params.pageOptions !== undefined) { - await BaseEndpoint.cutDocPages(params.inputDoc, params.pageOptions); + await cutDocPages(params.inputDoc, params.pageOptions); } const response = await this.#predictAsyncReqPost( params.inputDoc, @@ -92,7 +85,7 @@ export class Endpoint extends BaseEndpoint { return response; } - private extractStatusMessage(response: EndpointResponse): string | undefined { + private extractStatusMessage(response: BaseHttpResponse): string | undefined { if (response.messageObj?.statusMessage !== undefined && response.messageObj?.statusMessage !== null) { return response.messageObj?.statusMessage; } @@ -114,8 +107,8 @@ export class Endpoint extends BaseEndpoint { * @category Asynchronous * @returns a `Promise` containing the parsed result. */ - async getQueuedDocument(queueId: string): Promise { - const queueResponse: EndpointResponse = await this.#documentQueueReqGet(queueId); + async getQueuedDocument(queueId: string): Promise { + const queueResponse: BaseHttpResponse = await this.#documentQueueReqGet(queueId); const queueStatusCode = queueResponse.messageObj.statusCode; if (!isValidAsyncResponse(queueResponse)) { handleError(this.urlName, queueResponse, this.extractStatusMessage(queueResponse)); @@ -136,14 +129,13 @@ export class Endpoint extends BaseEndpoint { * Send a feedback * @param {string} documentId */ - async getDocument(documentId: string): Promise { + async getDocument(documentId: string): Promise { const response = await this.#documentGetReq( documentId, ); if (!isValidAsyncResponse(response)) { handleError("document", response, this.extractStatusMessage(response)); } - return response; } @@ -155,15 +147,14 @@ export class Endpoint extends BaseEndpoint { async sendFeedback( documentId: string, feedback: StringDict - ): Promise { - const response: EndpointResponse = await this.#documentFeedbackPutReq( + ): Promise { + const response: BaseHttpResponse = await this.#documentFeedbackPutReq( documentId, feedback, ); if (!isValidSyncResponse(response)) { handleError("feedback", response, this.extractStatusMessage(response)); } - return response; } @@ -177,7 +168,7 @@ export class Endpoint extends BaseEndpoint { * @param rag * @param workflowId */ - protected sendFileForPrediction( + protected async sendFileForPrediction( input: InputSource, predictUrl: string, includeWords: boolean = false, @@ -185,53 +176,47 @@ export class Endpoint extends BaseEndpoint { cropper: boolean = false, rag: boolean = false, workflowId: string | undefined = undefined - ): Promise { - return new Promise((resolve, reject) => { - const searchParams = new URLSearchParams(); - if (cropper) { - searchParams.append("cropper", "true"); - } - if (rag) { - searchParams.append("rag", "true"); - } - if (fullText) { - searchParams.append("full_text_ocr", "true"); - } + ): Promise { + const searchParams = new URLSearchParams(); + if (cropper) { + searchParams.set("cropper", "true"); + } + if (rag) { + searchParams.set("rag", "true"); + } + if (fullText) { + searchParams.set("full_text_ocr", "true"); + } - const form = new FormData(); - if (input instanceof LocalInputSource && input.fileObject instanceof Buffer) { - form.append("document", input.fileObject, { - filename: input.filename, - }); - } else { - form.append("document", input.fileObject); - } + const form = new FormData(); + if (input instanceof LocalInputSource && input.fileObject instanceof Buffer) { + form.set("document", new Blob([input.fileObject]), input.filename); + } else { + form.set("document", input.fileObject); + } + if (includeWords) { + form.set("include_mvision", "true"); + } - if (includeWords) { - form.append("include_mvision", "true"); - } - const headers = { ...this.settings.baseHeaders, ...form.getHeaders() }; - let path: string; - if (workflowId === undefined) { - path = `${this.urlRoot}/${predictUrl}`; - } else { - path = `/v1/workflows/${workflowId}/predict_async`; - } - if (searchParams.toString().length > 0) { - path += `?${searchParams}`; - } - const options: RequestOptions = { - method: "POST", - headers: headers, - hostname: this.settings.hostname, - path: path, - timeout: this.settings.timeout, - }; - const req = BaseEndpoint.readResponse(options, resolve, reject); - form.pipe(req); - // potential ECONNRESET if we don't end the request. - req.end(); - }); + let path: string; + if (workflowId === undefined) { + path = `${this.urlRoot}/${predictUrl}`; + } else { + path = `/v1/workflows/${workflowId}/predict_async`; + } + if (searchParams.toString().length > 0) { + path += `?${searchParams}`; + } + + const options = { + method: "POST", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: path, + timeout: this.settings.timeout, + body: form, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); } /** @@ -246,7 +231,7 @@ export class Endpoint extends BaseEndpoint { includeWords: boolean = false, fullText: boolean = false, cropper: boolean = false - ): Promise { + ): Promise { return this.sendFileForPrediction(input, "predict", includeWords, fullText, cropper); } @@ -266,7 +251,7 @@ export class Endpoint extends BaseEndpoint { cropper: boolean = false, rag: boolean = false, workflowId: string | undefined = undefined - ): Promise { + ): Promise { return this.sendFileForPrediction( input, "predict_async", @@ -282,36 +267,30 @@ export class Endpoint extends BaseEndpoint { * Make a request to GET the status of a document in the queue. * @param queueId */ - #documentQueueReqGet(queueId: string): Promise { - return new Promise((resolve, reject) => { - const options = { - method: "GET", - headers: this.settings.baseHeaders, - hostname: this.settings.hostname, - path: `${this.urlRoot}/documents/queue/${queueId}`, - }; - const req = BaseEndpoint.readResponse(options, resolve, reject); - // potential ECONNRESET if we don't end the request. - req.end(); - }); + async #documentQueueReqGet(queueId: string): Promise { + const options = { + method: "GET", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: `${this.urlRoot}/documents/queue/${queueId}`, + timeout: this.settings.timeout, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); } /** * Make a request to GET a document. * @param documentId */ - #documentGetReq(documentId: string): Promise { - return new Promise((resolve, reject) => { - const options = { - method: "GET", - headers: this.settings.baseHeaders, - hostname: this.settings.hostname, - path: `${this.urlRoot}/documents/${documentId}`, - }; - const req = BaseEndpoint.readResponse(options, resolve, reject); - // potential ECONNRESET if we don't end the request. - req.end(); - }); + async #documentGetReq(documentId: string): Promise { + const options = { + method: "GET", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: `${this.urlRoot}/documents/${documentId}`, + timeout: this.settings.timeout, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); } /** @@ -319,19 +298,15 @@ export class Endpoint extends BaseEndpoint { * @param documentId * @param feedback */ - #documentFeedbackPutReq(documentId: string, feedback: StringDict): Promise { - return new Promise((resolve, reject) => { - const options = { - method: "PUT", - headers: this.settings.baseHeaders, - hostname: this.settings.hostname, - path: `/v1/documents/${documentId}/feedback`, - }; - const req: ClientRequest = BaseEndpoint.readResponse(options, resolve, reject); - req.write(JSON.stringify(feedback)); - - // potential ECONNRESET if we don't end the request. - req.end(); - }); + async #documentFeedbackPutReq(documentId: string, feedback: StringDict): Promise { + const options = { + method: "PUT", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: `/v1/documents/${documentId}/feedback`, + body: JSON.stringify(feedback), + timeout: this.settings.timeout, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); } } diff --git a/src/http/error.ts b/src/v1/http/errors.ts similarity index 86% rename from src/http/error.ts rename to src/v1/http/errors.ts index 12dd9a2da..46ac67730 100644 --- a/src/http/error.ts +++ b/src/v1/http/errors.ts @@ -1,11 +1,11 @@ -import { MindeeError } from "../errors"; -import { errorHandler } from "../errors/handler"; -import { StringDict } from "../parsing/common"; -import { EndpointResponse } from "./baseEndpoint"; +import { MindeeError } from "@/errors/index.js"; +import { errorHandler } from "@/errors/handler.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { BaseHttpResponse } from "../../http/apiCore.js"; export function handleError( urlName: string, - response: EndpointResponse, + response: BaseHttpResponse, serverError?: string ): void { let code; @@ -99,7 +99,7 @@ export function handleError( errorToThrow = new MindeeHttp504Error(errorObj, urlName, code); break; default: - errorToThrow = new MindeeHttpError(errorObj, urlName, code); + errorToThrow = new MindeeHttpErrorV1(errorObj, urlName, code); break; } errorHandler.throw(errorToThrow); @@ -109,7 +109,7 @@ export function handleError( * `Error` wrapper for server (HTTP) errors. * Is used when an error is lacking a handled error code. */ -export class MindeeHttpError extends MindeeError { +export class MindeeHttpErrorV1 extends MindeeError { /** Description of the error. */ message: string = ""; /** Additional details on the error. */ @@ -130,7 +130,7 @@ export class MindeeHttpError extends MindeeError { * Generic client errors. * Can include errors like InvalidQuery. */ -export class MindeeHttp400Error extends MindeeHttpError { +export class MindeeHttp400Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp400Error"; @@ -140,7 +140,7 @@ export class MindeeHttp400Error extends MindeeHttpError { /** * Can include errors like NoTokenSet or InvalidToken. */ -export class MindeeHttp401Error extends MindeeHttpError { +export class MindeeHttp401Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp401Error"; @@ -151,14 +151,14 @@ export class MindeeHttp401Error extends MindeeHttpError { * Regular AccessForbidden error. * Can also include errors like PlanLimitReached, AsyncRequestDisallowed or SyncRequestDisallowed. */ -export class MindeeHttp403Error extends MindeeHttpError { +export class MindeeHttp403Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp403Error"; } } -export class MindeeHttp404Error extends MindeeHttpError { +export class MindeeHttp404Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp404Error"; @@ -169,7 +169,7 @@ export class MindeeHttp404Error extends MindeeHttpError { * Rare error. * Can occasionally happen when unusually large documents are passed. */ -export class MindeeHttp413Error extends MindeeHttpError { +export class MindeeHttp413Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp413Error"; @@ -180,7 +180,7 @@ export class MindeeHttp413Error extends MindeeHttpError { * Usually corresponds to TooManyRequests errors. * Arises whenever too many calls to the API are made in quick succession. */ -export class MindeeHttp429Error extends MindeeHttpError { +export class MindeeHttp429Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp429Error"; @@ -190,7 +190,7 @@ export class MindeeHttp429Error extends MindeeHttpError { /** * Generic server errors. */ -export class MindeeHttp500Error extends MindeeHttpError { +export class MindeeHttp500Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp500Error"; @@ -201,7 +201,7 @@ export class MindeeHttp500Error extends MindeeHttpError { * Miscellaneous server errors. * Can include errors like RequestTimeout or GatewayTimeout. */ -export class MindeeHttp504Error extends MindeeHttpError { +export class MindeeHttp504Error extends MindeeHttpErrorV1 { constructor(httpError: StringDict, urlName: string, code?: number) { super(httpError, urlName, code); this.name = "MindeeHttp504Error"; diff --git a/src/http/httpParams.ts b/src/v1/http/httpParams.ts similarity index 74% rename from src/http/httpParams.ts rename to src/v1/http/httpParams.ts index 9db0bd117..4cbab3377 100644 --- a/src/http/httpParams.ts +++ b/src/v1/http/httpParams.ts @@ -1,5 +1,5 @@ -import { InputSource, PageOptions } from "../input"; -import { ExecutionPriority } from "../parsing/common"; +import { InputSource, PageOptions } from "@/input/index.js"; +import { ExecutionPriority } from "@/v1/parsing/common/index.js"; interface HTTPParams { inputDoc: InputSource; diff --git a/src/v1/http/index.ts b/src/v1/http/index.ts new file mode 100644 index 000000000..77ff1c369 --- /dev/null +++ b/src/v1/http/index.ts @@ -0,0 +1,19 @@ +export { Endpoint } from "./endpoint.js"; +export { + STANDARD_API_OWNER, + ApiSettingsV1, +} from "./apiSettingsV1.js"; +export { + MindeeHttpErrorV1, + MindeeHttp400Error, + MindeeHttp401Error, + MindeeHttp403Error, + MindeeHttp404Error, + MindeeHttp413Error, + MindeeHttp429Error, + MindeeHttp500Error, + MindeeHttp504Error, + handleError, +} from "./errors.js"; +export { WorkflowEndpoint } from "./workflowEndpoint.js"; +export type { PredictParams, WorkflowParams } from "./httpParams.js"; diff --git a/src/http/responseValidation.ts b/src/v1/http/responseValidation.ts similarity index 90% rename from src/http/responseValidation.ts rename to src/v1/http/responseValidation.ts index e3bf926fb..8aa63ca9d 100644 --- a/src/http/responseValidation.ts +++ b/src/v1/http/responseValidation.ts @@ -1,4 +1,4 @@ -import { EndpointResponse } from "./baseEndpoint"; +import { BaseHttpResponse } from "../../http/apiCore.js"; /** * Checks if the synchronous response is valid. Returns True if the response is valid. @@ -6,7 +6,7 @@ import { EndpointResponse } from "./baseEndpoint"; * @param response an endpoint response object. * @returns bool */ -export function isValidSyncResponse(response: EndpointResponse): boolean { +export function isValidSyncResponse(response: BaseHttpResponse): boolean { if (!response.messageObj || !response.messageObj.statusCode) { return false; } @@ -33,7 +33,7 @@ export function isValidSyncResponse(response: EndpointResponse): boolean { * @param response an endpoint response object. * @returns bool */ -export function isValidAsyncResponse(response: EndpointResponse): boolean { +export function isValidAsyncResponse(response: BaseHttpResponse): boolean { if (!isValidSyncResponse(response)) { return false; } @@ -61,7 +61,7 @@ export function isValidAsyncResponse(response: EndpointResponse): boolean { * @param response an endpoint response object. * @returns EndpointResponse Returns the job error if the error is due to parsing, returns the http error otherwise. */ -export function cleanRequestData(response: EndpointResponse): EndpointResponse { +export function cleanRequestData(response: BaseHttpResponse): BaseHttpResponse { if (response.messageObj && response.messageObj.statusCode && ( diff --git a/src/v1/http/workflowEndpoint.ts b/src/v1/http/workflowEndpoint.ts new file mode 100644 index 000000000..7a4b5959f --- /dev/null +++ b/src/v1/http/workflowEndpoint.ts @@ -0,0 +1,118 @@ +import { URLSearchParams } from "url"; +import { InputSource, LocalInputSource } from "@/input/index.js"; +import { ExecutionPriority } from "@/v1/parsing/common/index.js"; +import { cutDocPages, sendRequestAndReadResponse, BaseHttpResponse } from "../../http/apiCore.js"; +import { ApiSettingsV1 } from "./apiSettingsV1.js"; +import { handleError } from "./errors.js"; +import { WorkflowParams } from "./httpParams.js"; +import { isValidSyncResponse } from "./responseValidation.js"; + +/** + * Endpoint for a workflow. + */ +export class WorkflowEndpoint { + /** Settings relating to the API. */ + settings: ApiSettingsV1; + /** Root of the URL for API calls. */ + urlRoot: string; + + constructor( + settings: ApiSettingsV1, + workflowId: string + ) { + this.settings = settings; + this.urlRoot = `/v1/workflows/${workflowId}/executions`; + } + + /** + * Sends a document to a workflow execution. + * Throws an error if the server's response contains one. + * @param {WorkflowParams} params parameters relating to prediction options. + * @category Synchronous + * @returns a `Promise` containing parsing results. + */ + async executeWorkflow(params: WorkflowParams): Promise { + await params.inputDoc.init(); + if (params.pageOptions !== undefined) { + await cutDocPages(params.inputDoc, params.pageOptions); + } + const response = await this.#workflowReqPost(params); + if (!isValidSyncResponse(response)) { + handleError(this.urlRoot, response, response.messageObj?.statusMessage); + } + return response; + } + + /** + * Make a request to POST a document for workflow. + * + * @param {WorkflowParams} params parameters relating to prediction options. + */ + #workflowReqPost(params: WorkflowParams): Promise { + return this.sendFileForPrediction( + params.inputDoc, + params.alias, + params.priority, + params.fullText, + params.publicUrl, + params.rag + ); + } + + /** + * Send a file to a prediction API. + * @param input + * @param alias + * @param priority + * @param fullText + * @param publicUrl + * @param rag + */ + protected async sendFileForPrediction( + input: InputSource, + alias: string | null = null, + priority: ExecutionPriority | null = null, + fullText: boolean = false, + publicUrl: string | null = null, + rag: boolean | null = null, + ): Promise { + const searchParams = new URLSearchParams(); + if (fullText) { + searchParams.set("full_text_ocr", "true"); + } + if (rag) { + searchParams.set("rag", "true"); + } + + const form = new FormData(); + if (input instanceof LocalInputSource && input.fileObject instanceof Buffer) { + form.set("document", new Blob([input.fileObject]), input.filename); + } else { + form.set("document", input.fileObject); + } + if (alias) { + form.set("alias", alias); + } + if (publicUrl) { + form.set("public_url", publicUrl); + } + if (priority) { + form.set("priority", priority.toString()); + } + + let path = this.urlRoot; + if (searchParams.toString().length > 0) { + path += `?${searchParams}`; + } + + const options = { + method: "POST", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: path, + timeout: this.settings.timeout, + body: form, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); + } +} diff --git a/src/v1/index.ts b/src/v1/index.ts new file mode 100644 index 000000000..a02639605 --- /dev/null +++ b/src/v1/index.ts @@ -0,0 +1,19 @@ +export * as extraction from "./extraction/index.js"; +export * as http from "./http/index.js"; +export * as parsing from "./parsing/index.js"; +export * as product from "./product/index.js"; +export { LocalResponse } from "./parsing/localResponse.js"; +export { Client } from "./client.js"; +export type { + OptionalAsyncOptions, + PredictOptions, + WorkflowOptions +} from "./client.js"; +export { + AsyncPredictResponse, + PredictResponse, + Inference, + Prediction, + Document, + Page, +} from "./parsing/common/index.js"; diff --git a/src/parsing/common/apiRequest.ts b/src/v1/parsing/common/apiRequest.ts similarity index 92% rename from src/parsing/common/apiRequest.ts rename to src/v1/parsing/common/apiRequest.ts index 5ce160a0d..3d28d3db6 100644 --- a/src/parsing/common/apiRequest.ts +++ b/src/v1/parsing/common/apiRequest.ts @@ -1,7 +1,7 @@ -import { StringDict } from "./stringDict"; +import { StringDict } from "@/parsing/stringDict.js"; /** * Holds the information relating to an API HTTP request. - * + * * @category API Response */ export class ApiRequest { diff --git a/src/parsing/common/apiResponse.ts b/src/v1/parsing/common/apiResponse.ts similarity index 86% rename from src/parsing/common/apiResponse.ts rename to src/v1/parsing/common/apiResponse.ts index 439000384..b900eccd1 100644 --- a/src/parsing/common/apiResponse.ts +++ b/src/v1/parsing/common/apiResponse.ts @@ -1,9 +1,9 @@ -import { ApiRequest } from "./apiRequest"; -import { StringDict } from "./stringDict"; +import { ApiRequest } from "./apiRequest.js"; +import { StringDict } from "@/parsing/stringDict.js"; /** Base wrapper for API requests. - * + * * @category API Response */ export abstract class ApiResponse { diff --git a/src/parsing/common/asyncPredictResponse.ts b/src/v1/parsing/common/asyncPredictResponse.ts similarity index 92% rename from src/parsing/common/asyncPredictResponse.ts rename to src/v1/parsing/common/asyncPredictResponse.ts index aee7b1447..ea4778923 100644 --- a/src/parsing/common/asyncPredictResponse.ts +++ b/src/v1/parsing/common/asyncPredictResponse.ts @@ -1,7 +1,7 @@ -import { ApiResponse } from "./apiResponse"; -import { StringDict } from "./stringDict"; -import { Inference } from "./inference"; -import { Document } from "./document"; +import { ApiResponse } from "./apiResponse.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Inference } from "./inference.js"; +import { Document } from "./document.js"; /** Wrapper for asynchronous request queues. Holds information regarding a job (queue). * diff --git a/src/parsing/common/document.ts b/src/v1/parsing/common/document.ts similarity index 92% rename from src/parsing/common/document.ts rename to src/v1/parsing/common/document.ts index d7a28bb4b..f9d8c4a86 100644 --- a/src/parsing/common/document.ts +++ b/src/v1/parsing/common/document.ts @@ -1,9 +1,8 @@ -import { CropperExtra, FullTextOcrExtra } from "./extras"; -import { ExtraField, Extras } from "./extras/extras"; -import { Inference } from "./inference"; -import { Ocr } from "./ocr"; -import { StringDict } from "./stringDict"; -import { RAGExtra } from "./extras/ragExtra"; +import { CropperExtra, FullTextOcrExtra, ExtraField, Extras } from "./extras/index.js"; +import { Inference } from "./inference.js"; +import { Ocr } from "./ocr.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { RAGExtra } from "./extras/ragExtra.js"; /** * Document prediction wrapper class. Holds the results of a parsed document. diff --git a/src/parsing/common/execution.ts b/src/v1/parsing/common/execution.ts similarity index 84% rename from src/parsing/common/execution.ts rename to src/v1/parsing/common/execution.ts index 24ff111ad..4560c122a 100644 --- a/src/parsing/common/execution.ts +++ b/src/v1/parsing/common/execution.ts @@ -1,9 +1,9 @@ -import { Inference } from "./inference"; -import { GeneratedV1Document } from "../../product/generated/generatedV1Document"; -import { ExecutionFile } from "./executionFile"; -import { StringDict } from "./stringDict"; -import { ExecutionPriority } from "./executionPriority"; -import { parseDate } from "./dateParser"; +import { Inference } from "./inference.js"; +import { GeneratedV1Document } from "@/v1/product/generated/generatedV1Document.js"; +import { ExecutionFile } from "./executionFile.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { ExecutionPriority } from "./executionPriority.js"; +import { parseDate } from "../../../parsing/dateParser.js"; /** * Representation of an execution for a workflow. diff --git a/src/parsing/common/executionFile.ts b/src/v1/parsing/common/executionFile.ts similarity index 86% rename from src/parsing/common/executionFile.ts rename to src/v1/parsing/common/executionFile.ts index 25dbdb6b3..b13530b3e 100644 --- a/src/parsing/common/executionFile.ts +++ b/src/v1/parsing/common/executionFile.ts @@ -1,4 +1,4 @@ -import { StringDict } from "./stringDict"; +import { StringDict } from "@/parsing/stringDict.js"; /** * Representation of an execution's file info. diff --git a/src/parsing/common/executionPriority.ts b/src/v1/parsing/common/executionPriority.ts similarity index 100% rename from src/parsing/common/executionPriority.ts rename to src/v1/parsing/common/executionPriority.ts diff --git a/src/parsing/common/extras/cropperExtra.ts b/src/v1/parsing/common/extras/cropperExtra.ts similarity index 74% rename from src/parsing/common/extras/cropperExtra.ts rename to src/v1/parsing/common/extras/cropperExtra.ts index 20934146a..1884972aa 100644 --- a/src/parsing/common/extras/cropperExtra.ts +++ b/src/v1/parsing/common/extras/cropperExtra.ts @@ -1,7 +1,7 @@ -import { PositionField } from "../../standard"; -import { StringDict } from "../stringDict"; -import { cleanOutString } from "../summaryHelper"; -import { ExtraField } from "./extras"; +import { PositionField } from "@/v1/parsing/standard/position.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { cleanOutString } from "../summaryHelper.js"; +import { ExtraField } from "./extras.js"; export class CropperExtra extends ExtraField { cropping: PositionField[] = []; diff --git a/src/parsing/common/extras/extras.ts b/src/v1/parsing/common/extras/extras.ts similarity index 100% rename from src/parsing/common/extras/extras.ts rename to src/v1/parsing/common/extras/extras.ts diff --git a/src/parsing/common/extras/fullTextOcrExtra.ts b/src/v1/parsing/common/extras/fullTextOcrExtra.ts similarity index 83% rename from src/parsing/common/extras/fullTextOcrExtra.ts rename to src/v1/parsing/common/extras/fullTextOcrExtra.ts index 217776ae7..eaea594ab 100644 --- a/src/parsing/common/extras/fullTextOcrExtra.ts +++ b/src/v1/parsing/common/extras/fullTextOcrExtra.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../stringDict"; -import { ExtraField } from "./extras"; +import { StringDict } from "@/parsing/stringDict.js"; +import { ExtraField } from "./extras.js"; export class FullTextOcrExtra extends ExtraField { content?: string; diff --git a/src/v1/parsing/common/extras/index.ts b/src/v1/parsing/common/extras/index.ts new file mode 100644 index 000000000..114fc7981 --- /dev/null +++ b/src/v1/parsing/common/extras/index.ts @@ -0,0 +1,4 @@ +export { CropperExtra } from "./cropperExtra.js"; +export { Extras, ExtraField } from "./extras.js"; +export { FullTextOcrExtra } from "./fullTextOcrExtra.js"; +export { RAGExtra } from "./ragExtra.js"; diff --git a/src/parsing/common/extras/ragExtra.ts b/src/v1/parsing/common/extras/ragExtra.ts similarity index 78% rename from src/parsing/common/extras/ragExtra.ts rename to src/v1/parsing/common/extras/ragExtra.ts index 38f520edf..563a1ff4b 100644 --- a/src/parsing/common/extras/ragExtra.ts +++ b/src/v1/parsing/common/extras/ragExtra.ts @@ -1,5 +1,5 @@ -import { ExtraField } from "./extras"; -import { StringDict } from "../stringDict"; +import { ExtraField } from "./extras.js"; +import { StringDict } from "@/parsing/stringDict.js"; export class RAGExtra extends ExtraField { /** diff --git a/src/parsing/common/feedback/feedbackResponse.ts b/src/v1/parsing/common/feedback/feedbackResponse.ts similarity index 80% rename from src/parsing/common/feedback/feedbackResponse.ts rename to src/v1/parsing/common/feedback/feedbackResponse.ts index db8cf4ea9..070114e3c 100644 --- a/src/parsing/common/feedback/feedbackResponse.ts +++ b/src/v1/parsing/common/feedback/feedbackResponse.ts @@ -1,5 +1,5 @@ -import { ApiResponse } from "../apiResponse"; -import { StringDict } from "../stringDict"; +import { ApiResponse } from "../apiResponse.js"; +import { StringDict } from "@/parsing/stringDict.js"; /** * Wrapper for feedback response. diff --git a/src/v1/parsing/common/index.ts b/src/v1/parsing/common/index.ts new file mode 100644 index 000000000..0b6084dcd --- /dev/null +++ b/src/v1/parsing/common/index.ts @@ -0,0 +1,18 @@ +export { Document } from "./document.js"; +export { Execution } from "./execution.js"; +export { ExecutionFile } from "./executionFile.js"; +export { ExecutionPriority } from "./executionPriority.js"; +export { Inference } from "./inference.js"; +export { FeedbackResponse } from "./feedback/feedbackResponse.js"; +export { OrientationField } from "./orientation.js"; +export type { StringDict } from "../../../parsing/stringDict.js"; +export { AsyncPredictResponse } from "./asyncPredictResponse.js"; +export { PredictResponse } from "./predictResponse.js"; +export { Prediction } from "./prediction.js"; +export { Page } from "./page.js"; +export { + cleanOutString, lineSeparator, floatToString, cleanSpecialChars +} from "./summaryHelper.js"; +export * as extras from "./extras/index.js"; +export { parseDate } from "../../../parsing/dateParser.js"; +export { WorkflowResponse } from "./workflowResponse.js"; diff --git a/src/parsing/common/inference.ts b/src/v1/parsing/common/inference.ts similarity index 87% rename from src/parsing/common/inference.ts rename to src/v1/parsing/common/inference.ts index 6cfa7826b..be25f5144 100644 --- a/src/parsing/common/inference.ts +++ b/src/v1/parsing/common/inference.ts @@ -1,10 +1,11 @@ -import { StringDict } from "../common"; -import { ExtraField, Extras } from "./extras/extras"; -import { Page } from "./page"; -import type { Prediction } from "./prediction"; -import { Product } from "./product"; -import { CropperExtra, FullTextOcrExtra } from "./extras"; -import { RAGExtra } from "./extras/ragExtra"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Page } from "./page.js"; +import type { Prediction } from "./prediction.js"; +import { Product } from "./product.js"; +import { + CropperExtra, FullTextOcrExtra, ExtraField, Extras, RAGExtra +} from "./extras/index.js"; +import { MindeeConfigurationError } from "@/errors/index.js"; /** * @@ -108,11 +109,6 @@ export class InferenceFactory { public static getEndpoint( inferenceClass: new (httpResponse: StringDict) => T ): [string, string] { - if (inferenceClass.name === "CustomV1") { - throw new Error( - "Cannot process custom endpoint as OTS API endpoints. Please provide an endpoint name & version manually." - ); - } const emptyProduct = new inferenceClass({ prediction: {}, pages: [], @@ -123,7 +119,7 @@ export class InferenceFactory { emptyProduct.endpointName.length === 0 || emptyProduct.endpointVersion.length === 0 ) { - throw new Error( + throw new MindeeConfigurationError( `Error during endpoint verification, no endpoint found for product ${inferenceClass.name}.` ); } diff --git a/src/parsing/common/mvisionV1.ts b/src/v1/parsing/common/mvisionV1.ts similarity index 72% rename from src/parsing/common/mvisionV1.ts rename to src/v1/parsing/common/mvisionV1.ts index f95aef126..ca392aa86 100644 --- a/src/parsing/common/mvisionV1.ts +++ b/src/v1/parsing/common/mvisionV1.ts @@ -1,6 +1,6 @@ -import { Word } from "../standard"; -import { StringDict } from "./stringDict"; -import { OcrPage } from "./ocrPage"; +import { Word } from "@/v1/parsing/standard/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { OcrPage } from "./ocrPage.js"; export class MvisionV1 { /** List of words found on the page. */ diff --git a/src/parsing/common/ocr.ts b/src/v1/parsing/common/ocr.ts similarity index 74% rename from src/parsing/common/ocr.ts rename to src/v1/parsing/common/ocr.ts index 015967c94..477ea3940 100644 --- a/src/parsing/common/ocr.ts +++ b/src/v1/parsing/common/ocr.ts @@ -1,5 +1,5 @@ -import { StringDict } from "./stringDict"; -import { MvisionV1 } from "./mvisionV1"; +import { StringDict } from "@/parsing/stringDict.js"; +import { MvisionV1 } from "./mvisionV1.js"; export class Ocr { /** Default Mindee OCR */ diff --git a/src/parsing/common/ocrPage.ts b/src/v1/parsing/common/ocrPage.ts similarity index 94% rename from src/parsing/common/ocrPage.ts rename to src/v1/parsing/common/ocrPage.ts index b3ceacf3d..617e74a68 100644 --- a/src/parsing/common/ocrPage.ts +++ b/src/v1/parsing/common/ocrPage.ts @@ -3,9 +3,9 @@ import { compareOnY, getCentroid, isPointInPolygonY, -} from "../../geometry"; -import { Word } from "../standard"; -import { StringDict } from "./stringDict"; +} from "@/geometry/index.js"; +import { Word } from "@/v1/parsing/standard/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; export class OcrPage { /** Flat list of all words read */ diff --git a/src/parsing/common/orientation.ts b/src/v1/parsing/common/orientation.ts similarity index 91% rename from src/parsing/common/orientation.ts rename to src/v1/parsing/common/orientation.ts index f74bb6a79..967555c85 100644 --- a/src/parsing/common/orientation.ts +++ b/src/v1/parsing/common/orientation.ts @@ -1,4 +1,4 @@ -import { BaseField, BaseFieldConstructor } from "../standard"; +import { BaseField, BaseFieldConstructor } from "@/v1/parsing/standard/index.js"; interface OrientationFieldConstructor extends BaseFieldConstructor { pageId: number; diff --git a/src/parsing/common/page.ts b/src/v1/parsing/common/page.ts similarity index 93% rename from src/parsing/common/page.ts rename to src/v1/parsing/common/page.ts index 5a592d81d..206c44954 100644 --- a/src/parsing/common/page.ts +++ b/src/v1/parsing/common/page.ts @@ -1,8 +1,7 @@ -import { CropperExtra, FullTextOcrExtra } from "./extras"; -import { ExtraField, Extras } from "./extras/extras"; -import { OrientationField } from "./orientation"; -import { Prediction } from "./prediction"; -import { StringDict } from "./stringDict"; +import { CropperExtra, FullTextOcrExtra, ExtraField, Extras } from "./extras/index.js"; +import { OrientationField } from "./orientation.js"; +import { Prediction } from "./prediction.js"; +import { StringDict } from "@/parsing/stringDict.js"; /** diff --git a/src/parsing/common/predictResponse.ts b/src/v1/parsing/common/predictResponse.ts similarity index 75% rename from src/parsing/common/predictResponse.ts rename to src/v1/parsing/common/predictResponse.ts index 4aac4cc40..b8ad6a41b 100644 --- a/src/parsing/common/predictResponse.ts +++ b/src/v1/parsing/common/predictResponse.ts @@ -1,5 +1,7 @@ -import { ApiResponse } from "./apiResponse"; -import { Document, Inference, StringDict } from "."; +import { ApiResponse } from "./apiResponse.js"; +import { Document } from "./document.js"; +import { Inference } from "./inference.js"; +import { StringDict } from "../../../parsing/stringDict.js"; /** Wrapper for synchronous prediction response. * diff --git a/src/parsing/common/prediction.ts b/src/v1/parsing/common/prediction.ts similarity index 100% rename from src/parsing/common/prediction.ts rename to src/v1/parsing/common/prediction.ts diff --git a/src/parsing/common/product.ts b/src/v1/parsing/common/product.ts similarity index 100% rename from src/parsing/common/product.ts rename to src/v1/parsing/common/product.ts diff --git a/src/parsing/common/summaryHelper.ts b/src/v1/parsing/common/summaryHelper.ts similarity index 100% rename from src/parsing/common/summaryHelper.ts rename to src/v1/parsing/common/summaryHelper.ts diff --git a/src/parsing/common/workflowResponse.ts b/src/v1/parsing/common/workflowResponse.ts similarity index 74% rename from src/parsing/common/workflowResponse.ts rename to src/v1/parsing/common/workflowResponse.ts index b2b907fa7..56c4b6b66 100644 --- a/src/parsing/common/workflowResponse.ts +++ b/src/v1/parsing/common/workflowResponse.ts @@ -1,7 +1,7 @@ -import { StringDict } from "./stringDict"; -import { ApiResponse } from "./apiResponse"; -import { Execution } from "./execution"; -import { Inference } from "./inference"; +import { StringDict } from "@/parsing/stringDict.js"; +import { ApiResponse } from "./apiResponse.js"; +import { Execution } from "./execution.js"; +import { Inference } from "./inference.js"; /** Wrapper for workflow requests. diff --git a/src/parsing/generated/generatedList.ts b/src/v1/parsing/generated/generatedList.ts similarity index 92% rename from src/parsing/generated/generatedList.ts rename to src/v1/parsing/generated/generatedList.ts index f225ae331..86c966148 100644 --- a/src/parsing/generated/generatedList.ts +++ b/src/v1/parsing/generated/generatedList.ts @@ -1,6 +1,6 @@ -import { StringDict } from "../common"; -import { StringField } from "../standard"; -import { GeneratedObjectField, isGeneratedObject } from "./generatedObject"; +import { StringDict } from "@/parsing/stringDict.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; +import { GeneratedObjectField, isGeneratedObject } from "./generatedObject.js"; export interface GeneratedListFieldConstructor { prediction: StringDict[]; diff --git a/src/parsing/generated/generatedObject.ts b/src/v1/parsing/generated/generatedObject.ts similarity index 96% rename from src/parsing/generated/generatedObject.ts rename to src/v1/parsing/generated/generatedObject.ts index 0e989664c..6fb592eb2 100644 --- a/src/parsing/generated/generatedObject.ts +++ b/src/v1/parsing/generated/generatedObject.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../common"; -import { BaseFieldConstructor, PositionField } from "../standard"; +import { StringDict } from "@/parsing/stringDict.js"; +import { BaseFieldConstructor, PositionField } from "@/v1/parsing/standard/index.js"; /** A JSON-like object, with miscellaneous values. */ diff --git a/src/v1/parsing/generated/index.ts b/src/v1/parsing/generated/index.ts new file mode 100644 index 000000000..cc6ab17b1 --- /dev/null +++ b/src/v1/parsing/generated/index.ts @@ -0,0 +1,2 @@ +export { GeneratedObjectField, isGeneratedObject } from "./generatedObject.js"; +export { GeneratedListField } from "./generatedList.js"; diff --git a/src/v1/parsing/index.ts b/src/v1/parsing/index.ts new file mode 100644 index 000000000..b2fa95a5a --- /dev/null +++ b/src/v1/parsing/index.ts @@ -0,0 +1,4 @@ +export * as common from "./common/index.js"; +export * as generated from "./generated/index.js"; +export * as standard from "./standard/index.js"; +export { LocalResponse } from "./localResponse.js"; diff --git a/src/v1/parsing/localResponse.ts b/src/v1/parsing/localResponse.ts new file mode 100644 index 000000000..9ba58346d --- /dev/null +++ b/src/v1/parsing/localResponse.ts @@ -0,0 +1,32 @@ +import { LocalResponseBase } from "@/parsing/localResponseBase.js"; +import { AsyncPredictResponse, Inference, PredictResponse } from "@/v1/index.js"; +import { StringDict } from "@/parsing/index.js"; +import { MindeeError } from "@/errors/index.js"; + +/** + * Local response loaded from a file. + * Note: Has to be initialized through init() before use. + */ +export class LocalResponse extends LocalResponseBase { + async loadPrediction( + productClass: new (httpResponse: StringDict) => T + ) { + /** + * Load a prediction. + * + * @param productClass Product class to use for calling the API and parsing the response. + * @param localResponse Local response to load. + * @category Asynchronous + * @returns A valid prediction + */ + try { + const asDict = await this.asDict(); + if (Object.prototype.hasOwnProperty.call(asDict, "job")) { + return new AsyncPredictResponse(productClass, asDict); + } + return new PredictResponse(productClass, asDict); + } catch { + throw new MindeeError("No prediction found in local response."); + } + } +} diff --git a/src/parsing/standard/addressField.ts b/src/v1/parsing/standard/addressField.ts similarity index 96% rename from src/parsing/standard/addressField.ts rename to src/v1/parsing/standard/addressField.ts index f8b4901d8..bacbdbc0e 100644 --- a/src/parsing/standard/addressField.ts +++ b/src/v1/parsing/standard/addressField.ts @@ -1,4 +1,4 @@ -import { StringField, FieldConstructor } from "./text"; +import { StringField, FieldConstructor } from "./text.js"; /** * A field containing a detailed address value broken down into components diff --git a/src/parsing/standard/amount.ts b/src/v1/parsing/standard/amount.ts similarity index 83% rename from src/parsing/standard/amount.ts rename to src/v1/parsing/standard/amount.ts index 1976555cb..af4892c04 100644 --- a/src/parsing/standard/amount.ts +++ b/src/v1/parsing/standard/amount.ts @@ -1,6 +1,6 @@ -import { Field } from "./field"; -import { BaseFieldConstructor } from "./base"; -import { floatToString } from "../common"; +import { Field } from "./field.js"; +import { BaseFieldConstructor } from "./base.js"; +import { floatToString } from "@/v1/parsing/common/index.js"; /** * A field containing an amount value. diff --git a/src/parsing/standard/base.ts b/src/v1/parsing/standard/base.ts similarity index 97% rename from src/parsing/standard/base.ts rename to src/v1/parsing/standard/base.ts index b0339d483..f0d6a73fd 100644 --- a/src/parsing/standard/base.ts +++ b/src/v1/parsing/standard/base.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; /** * @property {object} prediction - Prediction object from HTTP response. diff --git a/src/parsing/standard/boolean.ts b/src/v1/parsing/standard/boolean.ts similarity index 87% rename from src/parsing/standard/boolean.ts rename to src/v1/parsing/standard/boolean.ts index 83a767aeb..f96e395bb 100644 --- a/src/parsing/standard/boolean.ts +++ b/src/v1/parsing/standard/boolean.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../common"; -import { Field } from "./field"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Field } from "./field.js"; export interface FieldConstructor { prediction: StringDict; diff --git a/src/parsing/standard/classification.ts b/src/v1/parsing/standard/classification.ts similarity index 89% rename from src/parsing/standard/classification.ts rename to src/v1/parsing/standard/classification.ts index 865f22bd7..d33cdfbbd 100644 --- a/src/parsing/standard/classification.ts +++ b/src/v1/parsing/standard/classification.ts @@ -1,4 +1,4 @@ -import { BaseField, BaseFieldConstructor } from "./base"; +import { BaseField, BaseFieldConstructor } from "./base.js"; /** * Represents a classifier value. diff --git a/src/parsing/standard/companyRegistration.ts b/src/v1/parsing/standard/companyRegistration.ts similarity index 87% rename from src/parsing/standard/companyRegistration.ts rename to src/v1/parsing/standard/companyRegistration.ts index b315f9ac7..d11fae3d8 100644 --- a/src/parsing/standard/companyRegistration.ts +++ b/src/v1/parsing/standard/companyRegistration.ts @@ -1,6 +1,6 @@ -import { Field } from "./field"; -import { BaseFieldConstructor } from "./base"; -import { cleanOutString } from "../common"; +import { Field } from "./field.js"; +import { BaseFieldConstructor } from "./base.js"; +import { cleanOutString } from "@/v1/parsing/common/index.js"; /** * A company registration item. diff --git a/src/parsing/standard/date.ts b/src/v1/parsing/standard/date.ts similarity index 93% rename from src/parsing/standard/date.ts rename to src/v1/parsing/standard/date.ts index ff295910d..f7b18e317 100644 --- a/src/parsing/standard/date.ts +++ b/src/v1/parsing/standard/date.ts @@ -1,5 +1,5 @@ -import { Field } from "./field"; -import { BaseFieldConstructor } from "./base"; +import { Field } from "./field.js"; +import { BaseFieldConstructor } from "./base.js"; /** * A field containing a date value. diff --git a/src/parsing/standard/field.ts b/src/v1/parsing/standard/field.ts similarity index 93% rename from src/parsing/standard/field.ts rename to src/v1/parsing/standard/field.ts index 2fe81230c..7bb34c6f5 100644 --- a/src/parsing/standard/field.ts +++ b/src/v1/parsing/standard/field.ts @@ -1,5 +1,5 @@ -import { BaseField, BaseFieldConstructor } from "./base"; -import { Polygon, BoundingBox, getBoundingBox } from "../../geometry"; +import { BaseField, BaseFieldConstructor } from "./base.js"; +import { Polygon, BoundingBox, getBoundingBox } from "@/geometry/index.js"; /** * A basic field with position and page information. diff --git a/src/v1/parsing/standard/index.ts b/src/v1/parsing/standard/index.ts new file mode 100644 index 000000000..19a4b41a9 --- /dev/null +++ b/src/v1/parsing/standard/index.ts @@ -0,0 +1,15 @@ +export { AddressField } from "./addressField.js"; +export { AmountField } from "./amount.js"; +export { BaseField } from "./base.js"; +export type { BaseFieldConstructor } from "./base.js"; +export { BooleanField } from "./boolean.js"; +export { ClassificationField } from "./classification.js"; +export { CompanyRegistrationField } from "./companyRegistration.js"; +export { DateField } from "./date.js"; +export { LocaleField } from "./locale.js"; +export { Field } from "./field.js"; +export { Taxes, TaxField } from "./tax.js"; +export { StringField } from "./text.js"; +export { PaymentDetailsField } from "./paymentDetails.js"; +export { PositionField } from "./position.js"; +export type { Word } from "./word.js"; diff --git a/src/parsing/standard/locale.ts b/src/v1/parsing/standard/locale.ts similarity index 96% rename from src/parsing/standard/locale.ts rename to src/v1/parsing/standard/locale.ts index 02a49a156..f5a41ba5a 100644 --- a/src/parsing/standard/locale.ts +++ b/src/v1/parsing/standard/locale.ts @@ -1,4 +1,4 @@ -import { BaseField, BaseFieldConstructor } from "./base"; +import { BaseField, BaseFieldConstructor } from "./base.js"; /** * The locale detected on the document. diff --git a/src/parsing/standard/paymentDetails.ts b/src/v1/parsing/standard/paymentDetails.ts similarity index 97% rename from src/parsing/standard/paymentDetails.ts rename to src/v1/parsing/standard/paymentDetails.ts index d412d02fa..b56c6fda4 100644 --- a/src/parsing/standard/paymentDetails.ts +++ b/src/v1/parsing/standard/paymentDetails.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../common"; -import { Field } from "./field"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Field } from "./field.js"; /** * @property {StringDict} constructor.prediction - Prediction object from HTTP response diff --git a/src/parsing/standard/position.ts b/src/v1/parsing/standard/position.ts similarity index 94% rename from src/parsing/standard/position.ts rename to src/v1/parsing/standard/position.ts index a9835f7e4..428c6e712 100644 --- a/src/parsing/standard/position.ts +++ b/src/v1/parsing/standard/position.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../common"; -import { Polygon } from "../../geometry"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; export interface PositionFieldConstructor { prediction: StringDict; diff --git a/src/parsing/standard/tax.ts b/src/v1/parsing/standard/tax.ts similarity index 95% rename from src/parsing/standard/tax.ts rename to src/v1/parsing/standard/tax.ts index 316c877f2..4e63d3cae 100644 --- a/src/parsing/standard/tax.ts +++ b/src/v1/parsing/standard/tax.ts @@ -1,6 +1,6 @@ -import { floatToString, StringDict } from "../common"; -import { Field } from "./field"; -import { BaseFieldConstructor } from "./base"; +import { floatToString, StringDict } from "@/v1/parsing/common/index.js"; +import { Field } from "./field.js"; +import { BaseFieldConstructor } from "./base.js"; /** * @property {string} constructor.rateKey - Key to use to get the tax rate in the prediction dict. diff --git a/src/parsing/standard/text.ts b/src/v1/parsing/standard/text.ts similarity index 87% rename from src/parsing/standard/text.ts rename to src/v1/parsing/standard/text.ts index 6fe97c58b..908815113 100644 --- a/src/parsing/standard/text.ts +++ b/src/v1/parsing/standard/text.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../common"; -import { Field } from "./field"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Field } from "./field.js"; export interface FieldConstructor { prediction: StringDict; diff --git a/src/parsing/standard/word.ts b/src/v1/parsing/standard/word.ts similarity index 81% rename from src/parsing/standard/word.ts rename to src/v1/parsing/standard/word.ts index 9b66ba31e..cb093b0ae 100644 --- a/src/parsing/standard/word.ts +++ b/src/v1/parsing/standard/word.ts @@ -1,4 +1,4 @@ -import * as geometry from "../../geometry"; +import * as geometry from "@/geometry/index.js"; export type Word = { /** diff --git a/src/product/barcodeReader/barcodeReaderV1.ts b/src/v1/product/barcodeReader/barcodeReaderV1.ts similarity index 92% rename from src/product/barcodeReader/barcodeReaderV1.ts rename to src/v1/product/barcodeReader/barcodeReaderV1.ts index a804105e4..b5e9d0223 100644 --- a/src/product/barcodeReader/barcodeReaderV1.ts +++ b/src/v1/product/barcodeReader/barcodeReaderV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { BarcodeReaderV1Document } from "./barcodeReaderV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { BarcodeReaderV1Document } from "./barcodeReaderV1Document.js"; /** * Barcode Reader API version 1 inference prediction. diff --git a/src/product/barcodeReader/barcodeReaderV1Document.ts b/src/v1/product/barcodeReader/barcodeReaderV1Document.ts similarity index 92% rename from src/product/barcodeReader/barcodeReaderV1Document.ts rename to src/v1/product/barcodeReader/barcodeReaderV1Document.ts index 0b3819562..d37a647ed 100644 --- a/src/product/barcodeReader/barcodeReaderV1Document.ts +++ b/src/v1/product/barcodeReader/barcodeReaderV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../parsing/common"; -import { StringField } from "../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; /** * Barcode Reader API version 1.0 document data. diff --git a/src/product/barcodeReader/internal.ts b/src/v1/product/barcodeReader/index.ts similarity index 50% rename from src/product/barcodeReader/internal.ts rename to src/v1/product/barcodeReader/index.ts index e37102d33..5917e1487 100644 --- a/src/product/barcodeReader/internal.ts +++ b/src/v1/product/barcodeReader/index.ts @@ -1,2 +1,2 @@ -export { BarcodeReaderV1 } from "./barcodeReaderV1"; -export { BarcodeReaderV1Document } from "./barcodeReaderV1Document"; +export { BarcodeReaderV1 } from "./barcodeReaderV1.js"; +export { BarcodeReaderV1Document } from "./barcodeReaderV1Document.js"; diff --git a/src/cliProducts.ts b/src/v1/product/cliProducts.ts similarity index 57% rename from src/cliProducts.ts rename to src/v1/product/cliProducts.ts index 0dcb9400a..fb7d82fc7 100644 --- a/src/cliProducts.ts +++ b/src/v1/product/cliProducts.ts @@ -4,10 +4,9 @@ // The Map's key is the command name as it will appear on the console. // -import * as product from "./product"; -import { Inference, StringDict } from "./parsing/common"; +import * as product from "@/v1/product/index.js"; +import { Inference, StringDict } from "@/v1/parsing/common/index.js"; -export const COMMAND_CUSTOM = "custom"; export const COMMAND_GENERATED = "generated"; export interface ProductConfig { @@ -19,16 +18,6 @@ export interface ProductConfig { } export const CLI_COMMAND_CONFIG = new Map([ - [ - COMMAND_CUSTOM, - { - displayName: "Custom Document", - docClass: product.CustomV1, - allWords: false, - async: false, - sync: true, - }, - ], [ COMMAND_GENERATED, { @@ -49,26 +38,6 @@ export const CLI_COMMAND_CONFIG = new Map([ sync: true, }, ], - [ - "bill-of-lading", - { - displayName: "Bill of Lading", - docClass: product.BillOfLadingV1, - allWords: false, - async: true, - sync: false, - }, - ], - [ - "business-card", - { - displayName: "Business Card", - docClass: product.BusinessCardV1, - allWords: false, - async: true, - sync: false, - }, - ], [ "cropper", { @@ -79,16 +48,6 @@ export const CLI_COMMAND_CONFIG = new Map([ sync: true, }, ], - [ - "delivery-note", - { - displayName: "Delivery note", - docClass: product.DeliveryNoteV1, - allWords: false, - async: true, - sync: false, - }, - ], [ "driver-license", { @@ -119,36 +78,6 @@ export const CLI_COMMAND_CONFIG = new Map([ sync: true, }, ], - [ - "fr-carte-grise", - { - displayName: "FR Carte Grise", - docClass: product.fr.CarteGriseV1, - allWords: false, - async: false, - sync: true, - }, - ], - [ - "fr-energy-bill", - { - displayName: "FR Energy Bill", - docClass: product.fr.EnergyBillV1, - allWords: false, - async: true, - sync: false, - }, - ], - [ - "fr-health-card", - { - displayName: "FR Health Card", - docClass: product.fr.HealthCardV1, - allWords: false, - async: true, - sync: false, - }, - ], [ "fr-carte-nationale-d-identite", { @@ -159,26 +88,6 @@ export const CLI_COMMAND_CONFIG = new Map([ sync: true, }, ], - [ - "fr-payslip", - { - displayName: "FR Payslip", - docClass: product.fr.PayslipV3, - allWords: false, - async: true, - sync: false, - }, - ], - [ - "ind-passport-india", - { - displayName: "IND Passport - India", - docClass: product.ind.IndianPassportV1, - allWords: false, - async: true, - sync: false, - }, - ], [ "international-id", { @@ -219,16 +128,6 @@ export const CLI_COMMAND_CONFIG = new Map([ sync: true, }, ], - [ - "nutrition-facts-label", - { - displayName: "Nutrition Facts Label", - docClass: product.NutritionFactsLabelV1, - allWords: false, - async: true, - sync: false, - }, - ], [ "passport", { @@ -269,24 +168,4 @@ export const CLI_COMMAND_CONFIG = new Map([ sync: true, }, ], - [ - "us-healthcare-card", - { - displayName: "US Healthcare Card", - docClass: product.us.HealthcareCardV1, - allWords: false, - async: true, - sync: false, - }, - ], - [ - "us-us-mail", - { - displayName: "US US Mail", - docClass: product.us.UsMailV3, - allWords: false, - async: true, - sync: false, - }, - ], ]); diff --git a/src/product/cropper/cropperV1.ts b/src/v1/product/cropper/cropperV1.ts similarity index 81% rename from src/product/cropper/cropperV1.ts rename to src/v1/product/cropper/cropperV1.ts index 2968138cf..2383a6cea 100644 --- a/src/product/cropper/cropperV1.ts +++ b/src/v1/product/cropper/cropperV1.ts @@ -1,6 +1,6 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { CropperV1Document } from "./cropperV1Document"; -import { CropperV1Page } from "./cropperV1Page"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { CropperV1Document } from "./cropperV1Document.js"; +import { CropperV1Page } from "./cropperV1Page.js"; /** * Cropper API version 1 inference prediction. diff --git a/src/product/cropper/cropperV1Document.ts b/src/v1/product/cropper/cropperV1Document.ts similarity index 77% rename from src/product/cropper/cropperV1Document.ts rename to src/v1/product/cropper/cropperV1Document.ts index bc7a85c63..82342acd3 100644 --- a/src/product/cropper/cropperV1Document.ts +++ b/src/v1/product/cropper/cropperV1Document.ts @@ -1,4 +1,4 @@ -import { Prediction } from "../../parsing/common"; +import { Prediction } from "@/v1/parsing/common/index.js"; /** diff --git a/src/product/cropper/cropperV1Page.ts b/src/v1/product/cropper/cropperV1Page.ts similarity index 77% rename from src/product/cropper/cropperV1Page.ts rename to src/v1/product/cropper/cropperV1Page.ts index de1b0a663..39b43e6e5 100644 --- a/src/product/cropper/cropperV1Page.ts +++ b/src/v1/product/cropper/cropperV1Page.ts @@ -1,7 +1,7 @@ -import { StringDict, cleanOutString } from "../../parsing/common"; -import { PositionField } from "../../parsing/standard"; +import { StringDict, cleanOutString } from "@/v1/parsing/common/index.js"; +import { PositionField } from "@/v1/parsing/standard/index.js"; -import { CropperV1Document } from "./cropperV1Document"; +import { CropperV1Document } from "./cropperV1Document.js"; /** * Cropper API version 1.1 page data. diff --git a/src/v1/product/cropper/index.ts b/src/v1/product/cropper/index.ts new file mode 100644 index 000000000..c99760e58 --- /dev/null +++ b/src/v1/product/cropper/index.ts @@ -0,0 +1,3 @@ +export { CropperV1 } from "./cropperV1.js"; +export { CropperV1Document } from "./cropperV1Document.js"; +export { CropperV1Page } from "./cropperV1Page.js"; diff --git a/src/product/driverLicense/driverLicenseV1.ts b/src/v1/product/driverLicense/driverLicenseV1.ts similarity index 92% rename from src/product/driverLicense/driverLicenseV1.ts rename to src/v1/product/driverLicense/driverLicenseV1.ts index 1ccd03ce6..e1c5d52eb 100644 --- a/src/product/driverLicense/driverLicenseV1.ts +++ b/src/v1/product/driverLicense/driverLicenseV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { DriverLicenseV1Document } from "./driverLicenseV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { DriverLicenseV1Document } from "./driverLicenseV1Document.js"; /** * Driver License API version 1 inference prediction. diff --git a/src/product/driverLicense/driverLicenseV1Document.ts b/src/v1/product/driverLicense/driverLicenseV1Document.ts similarity index 96% rename from src/product/driverLicense/driverLicenseV1Document.ts rename to src/v1/product/driverLicense/driverLicenseV1Document.ts index 559dcd5a5..8a75b4f37 100644 --- a/src/product/driverLicense/driverLicenseV1Document.ts +++ b/src/v1/product/driverLicense/driverLicenseV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../parsing/common"; -import { DateField, StringField } from "../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { DateField, StringField } from "@/v1/parsing/standard/index.js"; /** * Driver License API version 1.0 document data. diff --git a/src/product/driverLicense/internal.ts b/src/v1/product/driverLicense/index.ts similarity index 50% rename from src/product/driverLicense/internal.ts rename to src/v1/product/driverLicense/index.ts index b3a91e930..eaa3cc474 100644 --- a/src/product/driverLicense/internal.ts +++ b/src/v1/product/driverLicense/index.ts @@ -1,2 +1,2 @@ -export { DriverLicenseV1 } from "./driverLicenseV1"; -export { DriverLicenseV1Document } from "./driverLicenseV1Document"; +export { DriverLicenseV1 } from "./driverLicenseV1.js"; +export { DriverLicenseV1Document } from "./driverLicenseV1Document.js"; diff --git a/src/product/financialDocument/financialDocumentV1.ts b/src/v1/product/financialDocument/financialDocumentV1.ts similarity index 91% rename from src/product/financialDocument/financialDocumentV1.ts rename to src/v1/product/financialDocument/financialDocumentV1.ts index 6b27a3ee0..b8216510a 100644 --- a/src/product/financialDocument/financialDocumentV1.ts +++ b/src/v1/product/financialDocument/financialDocumentV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { FinancialDocumentV1Document } from "./financialDocumentV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { FinancialDocumentV1Document } from "./financialDocumentV1Document.js"; /** * Financial Document API version 1 inference prediction. diff --git a/src/product/financialDocument/financialDocumentV1Document.ts b/src/v1/product/financialDocument/financialDocumentV1Document.ts similarity index 99% rename from src/product/financialDocument/financialDocumentV1Document.ts rename to src/v1/product/financialDocument/financialDocumentV1Document.ts index d9e0a78b9..91e0e88f8 100644 --- a/src/product/financialDocument/financialDocumentV1Document.ts +++ b/src/v1/product/financialDocument/financialDocumentV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { FinancialDocumentV1LineItem } from "./financialDocumentV1LineItem"; +} from "@/v1/parsing/common/index.js"; +import { FinancialDocumentV1LineItem } from "./financialDocumentV1LineItem.js"; import { AddressField, AmountField, @@ -14,7 +14,7 @@ import { PaymentDetailsField, StringField, Taxes, -} from "../../parsing/standard"; +} from "@/v1/parsing/standard/index.js"; /** * Financial Document API version 1.14 document data. diff --git a/src/product/financialDocument/financialDocumentV1LineItem.ts b/src/v1/product/financialDocument/financialDocumentV1LineItem.ts similarity index 96% rename from src/product/financialDocument/financialDocumentV1LineItem.ts rename to src/v1/product/financialDocument/financialDocumentV1LineItem.ts index dc5caa642..1ec5a2fc1 100644 --- a/src/product/financialDocument/financialDocumentV1LineItem.ts +++ b/src/v1/product/financialDocument/financialDocumentV1LineItem.ts @@ -1,6 +1,6 @@ -import { cleanSpecialChars, floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars, floatToString } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * List of line item present on the document. diff --git a/src/product/financialDocument/internal.ts b/src/v1/product/financialDocument/index.ts similarity index 57% rename from src/product/financialDocument/internal.ts rename to src/v1/product/financialDocument/index.ts index 395078661..1fdd2f1d1 100644 --- a/src/product/financialDocument/internal.ts +++ b/src/v1/product/financialDocument/index.ts @@ -1,3 +1,3 @@ -export { FinancialDocumentV1 } from "./financialDocumentV1"; -export { FinancialDocumentV1Document } from "./financialDocumentV1Document"; -export { FinancialDocumentV1LineItem } from "./financialDocumentV1LineItem"; +export { FinancialDocumentV1 } from "./financialDocumentV1.js"; +export { FinancialDocumentV1Document } from "./financialDocumentV1Document.js"; +export { FinancialDocumentV1LineItem } from "./financialDocumentV1LineItem.js"; diff --git a/src/product/fr/bankAccountDetails/bankAccountDetailsV1.ts b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.ts similarity index 91% rename from src/product/fr/bankAccountDetails/bankAccountDetailsV1.ts rename to src/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.ts index 40ba7aef7..d38530b96 100644 --- a/src/product/fr/bankAccountDetails/bankAccountDetailsV1.ts +++ b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { BankAccountDetailsV1Document } from "./bankAccountDetailsV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { BankAccountDetailsV1Document } from "./bankAccountDetailsV1Document.js"; /** * Bank Account Details API version 1 inference prediction. diff --git a/src/product/fr/bankAccountDetails/bankAccountDetailsV1Document.ts b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV1Document.ts similarity index 91% rename from src/product/fr/bankAccountDetails/bankAccountDetailsV1Document.ts rename to src/v1/product/fr/bankAccountDetails/bankAccountDetailsV1Document.ts index 3f1e373c8..310af901f 100644 --- a/src/product/fr/bankAccountDetails/bankAccountDetailsV1Document.ts +++ b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../../parsing/common"; -import { StringField } from "../../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; /** * Bank Account Details API version 1.0 document data. diff --git a/src/product/fr/bankAccountDetails/bankAccountDetailsV2.ts b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.ts similarity index 91% rename from src/product/fr/bankAccountDetails/bankAccountDetailsV2.ts rename to src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.ts index 71118cef0..2eb6a721e 100644 --- a/src/product/fr/bankAccountDetails/bankAccountDetailsV2.ts +++ b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { BankAccountDetailsV2Document } from "./bankAccountDetailsV2Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { BankAccountDetailsV2Document } from "./bankAccountDetailsV2Document.js"; /** * Bank Account Details API version 2 inference prediction. diff --git a/src/product/fr/bankAccountDetails/bankAccountDetailsV2Bban.ts b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2Bban.ts similarity index 95% rename from src/product/fr/bankAccountDetails/bankAccountDetailsV2Bban.ts rename to src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2Bban.ts index 6f464d633..a2096a174 100644 --- a/src/product/fr/bankAccountDetails/bankAccountDetailsV2Bban.ts +++ b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2Bban.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../../../parsing/common"; -import { Polygon } from "../../../geometry"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * Full extraction of BBAN, including: branch code, bank code, account and key. diff --git a/src/product/fr/bankAccountDetails/bankAccountDetailsV2Document.ts b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2Document.ts similarity index 92% rename from src/product/fr/bankAccountDetails/bankAccountDetailsV2Document.ts rename to src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2Document.ts index 422e139e0..c96053be2 100644 --- a/src/product/fr/bankAccountDetails/bankAccountDetailsV2Document.ts +++ b/src/v1/product/fr/bankAccountDetails/bankAccountDetailsV2Document.ts @@ -2,9 +2,9 @@ import { Prediction, StringDict, cleanOutString, -} from "../../../parsing/common"; -import { BankAccountDetailsV2Bban } from "./bankAccountDetailsV2Bban"; -import { StringField } from "../../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { BankAccountDetailsV2Bban } from "./bankAccountDetailsV2Bban.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; /** * Bank Account Details API version 2.0 document data. diff --git a/src/product/fr/bankAccountDetails/internal.ts b/src/v1/product/fr/bankAccountDetails/index.ts similarity index 51% rename from src/product/fr/bankAccountDetails/internal.ts rename to src/v1/product/fr/bankAccountDetails/index.ts index b39cb7cf2..80219ea3a 100644 --- a/src/product/fr/bankAccountDetails/internal.ts +++ b/src/v1/product/fr/bankAccountDetails/index.ts @@ -1,5 +1,5 @@ -export { BankAccountDetailsV1 } from "./bankAccountDetailsV1"; -export { BankAccountDetailsV1Document } from "./bankAccountDetailsV1Document"; -export { BankAccountDetailsV2 } from "./bankAccountDetailsV2"; -export { BankAccountDetailsV2Bban } from "./bankAccountDetailsV2Bban"; -export { BankAccountDetailsV2Document } from "./bankAccountDetailsV2Document"; +export { BankAccountDetailsV1 } from "./bankAccountDetailsV1.js"; +export { BankAccountDetailsV1Document } from "./bankAccountDetailsV1Document.js"; +export { BankAccountDetailsV2 } from "./bankAccountDetailsV2.js"; +export { BankAccountDetailsV2Bban } from "./bankAccountDetailsV2Bban.js"; +export { BankAccountDetailsV2Document } from "./bankAccountDetailsV2Document.js"; diff --git a/src/product/fr/carteGrise/carteGriseV1.ts b/src/v1/product/fr/carteGrise/carteGriseV1.ts similarity index 86% rename from src/product/fr/carteGrise/carteGriseV1.ts rename to src/v1/product/fr/carteGrise/carteGriseV1.ts index fd8d510f1..b3b240519 100644 --- a/src/product/fr/carteGrise/carteGriseV1.ts +++ b/src/v1/product/fr/carteGrise/carteGriseV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { CarteGriseV1Document } from "./carteGriseV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { CarteGriseV1Document } from "./carteGriseV1Document.js"; /** * Carte Grise API version 1 inference prediction. diff --git a/src/product/fr/carteGrise/carteGriseV1Document.ts b/src/v1/product/fr/carteGrise/carteGriseV1Document.ts similarity index 98% rename from src/product/fr/carteGrise/carteGriseV1Document.ts rename to src/v1/product/fr/carteGrise/carteGriseV1Document.ts index 5d3c19116..421079ff8 100644 --- a/src/product/fr/carteGrise/carteGriseV1Document.ts +++ b/src/v1/product/fr/carteGrise/carteGriseV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../../parsing/common"; -import { DateField, StringField } from "../../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { DateField, StringField } from "@/v1/parsing/standard/index.js"; /** * Carte Grise API version 1.1 document data. diff --git a/src/v1/product/fr/carteGrise/index.ts b/src/v1/product/fr/carteGrise/index.ts new file mode 100644 index 000000000..2aad17b09 --- /dev/null +++ b/src/v1/product/fr/carteGrise/index.ts @@ -0,0 +1,2 @@ +export { CarteGriseV1 } from "./carteGriseV1.js"; +export { CarteGriseV1Document } from "./carteGriseV1Document.js"; diff --git a/src/product/fr/idCard/idCardV2.ts b/src/v1/product/fr/idCard/idCardV2.ts similarity index 82% rename from src/product/fr/idCard/idCardV2.ts rename to src/v1/product/fr/idCard/idCardV2.ts index 6bcac786a..3ebfdfe88 100644 --- a/src/product/fr/idCard/idCardV2.ts +++ b/src/v1/product/fr/idCard/idCardV2.ts @@ -1,6 +1,6 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { IdCardV2Document } from "./idCardV2Document"; -import { IdCardV2Page } from "./idCardV2Page"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { IdCardV2Document } from "./idCardV2Document.js"; +import { IdCardV2Page } from "./idCardV2Page.js"; /** * Carte Nationale d'Identité API version 2 inference prediction. diff --git a/src/product/fr/idCard/idCardV2Document.ts b/src/v1/product/fr/idCard/idCardV2Document.ts similarity index 97% rename from src/product/fr/idCard/idCardV2Document.ts rename to src/v1/product/fr/idCard/idCardV2Document.ts index a09314531..df8e3c6d9 100644 --- a/src/product/fr/idCard/idCardV2Document.ts +++ b/src/v1/product/fr/idCard/idCardV2Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../../parsing/common"; -import { DateField, StringField } from "../../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { DateField, StringField } from "@/v1/parsing/standard/index.js"; /** * Carte Nationale d'Identité API version 2.0 document data. diff --git a/src/product/fr/idCard/idCardV2Page.ts b/src/v1/product/fr/idCard/idCardV2Page.ts similarity index 79% rename from src/product/fr/idCard/idCardV2Page.ts rename to src/v1/product/fr/idCard/idCardV2Page.ts index 70974dd5d..5ad0bcb5a 100644 --- a/src/product/fr/idCard/idCardV2Page.ts +++ b/src/v1/product/fr/idCard/idCardV2Page.ts @@ -1,7 +1,6 @@ -import { StringDict, cleanOutString } from "../../../parsing/common"; -import { ClassificationField } from "../../../parsing/standard"; - -import { IdCardV2Document } from "./idCardV2Document"; +import { StringDict, cleanOutString } from "@/v1/parsing/common/index.js"; +import { ClassificationField } from "@/v1/parsing/standard/index.js"; +import { IdCardV2Document } from "./idCardV2Document.js"; /** * Carte Nationale d'Identité API version 2.0 page data. diff --git a/src/v1/product/fr/idCard/index.ts b/src/v1/product/fr/idCard/index.ts new file mode 100644 index 000000000..5e20b16da --- /dev/null +++ b/src/v1/product/fr/idCard/index.ts @@ -0,0 +1,3 @@ +export { IdCardV2 } from "./idCardV2.js"; +export { IdCardV2Document } from "./idCardV2Document.js"; +export { IdCardV2Page } from "./idCardV2Page.js"; diff --git a/src/v1/product/fr/index.ts b/src/v1/product/fr/index.ts new file mode 100644 index 000000000..d8fa1eebc --- /dev/null +++ b/src/v1/product/fr/index.ts @@ -0,0 +1,7 @@ +export { BankAccountDetailsV1 } from "./bankAccountDetails/index.js"; +export { BankAccountDetailsV2 } from "./bankAccountDetails/index.js"; +export { IdCardV2 } from "./idCard/index.js"; +export { CarteGriseV1 } from "./carteGrise/index.js"; + +export * as bankAccountDetails from "./bankAccountDetails/index.js"; +export * as idCard from "./idCard/index.js"; diff --git a/src/product/generated/generatedV1.ts b/src/v1/product/generated/generatedV1.ts similarity index 81% rename from src/product/generated/generatedV1.ts rename to src/v1/product/generated/generatedV1.ts index daa99c95a..4e3cb5220 100644 --- a/src/product/generated/generatedV1.ts +++ b/src/v1/product/generated/generatedV1.ts @@ -1,6 +1,6 @@ -import { Inference, Page, StringDict } from "../../../src/parsing/common"; -import { GeneratedV1Document } from "./generatedV1Document"; -import { GeneratedV1Page } from "./generatedV1Page"; +import { Inference, Page, StringDict } from "@/v1/parsing/common/index.js"; +import { GeneratedV1Document } from "./generatedV1Document.js"; +import { GeneratedV1Page } from "./generatedV1Page.js"; /** * Generated API V1 inference results. diff --git a/src/product/generated/generatedV1Document.ts b/src/v1/product/generated/generatedV1Document.ts similarity index 81% rename from src/product/generated/generatedV1Document.ts rename to src/v1/product/generated/generatedV1Document.ts index 11477eb43..028d7d170 100644 --- a/src/product/generated/generatedV1Document.ts +++ b/src/v1/product/generated/generatedV1Document.ts @@ -1,7 +1,7 @@ -import { StringDict } from "../../../src/parsing/common"; -import { GeneratedListField, GeneratedObjectField, isGeneratedObject } from "../../../src/parsing/generated"; -import { StringField } from "../../../src/parsing/standard"; -import { GeneratedV1Prediction } from "./generatedV1Prediction"; +import { StringDict } from "@/parsing/stringDict.js"; +import { GeneratedListField, GeneratedObjectField, isGeneratedObject } from "@/v1/parsing/generated/index.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; +import { GeneratedV1Prediction } from "./generatedV1Prediction.js"; /** * Generated V1 document prediction results. diff --git a/src/product/generated/generatedV1Page.ts b/src/v1/product/generated/generatedV1Page.ts similarity index 82% rename from src/product/generated/generatedV1Page.ts rename to src/v1/product/generated/generatedV1Page.ts index e99aff5f5..8116307cb 100644 --- a/src/product/generated/generatedV1Page.ts +++ b/src/v1/product/generated/generatedV1Page.ts @@ -1,7 +1,7 @@ -import { StringDict } from "../../../src/parsing/common"; -import { GeneratedListField, GeneratedObjectField, isGeneratedObject } from "../../../src/parsing/generated"; -import { StringField } from "../../../src/parsing/standard"; -import { GeneratedV1Prediction } from "./generatedV1Prediction"; +import { StringDict } from "@/parsing/stringDict.js"; +import { GeneratedListField, GeneratedObjectField, isGeneratedObject } from "@/v1/parsing/generated/index.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; +import { GeneratedV1Prediction } from "./generatedV1Prediction.js"; /** * Generated V1 page prediction results. diff --git a/src/product/generated/generatedV1Prediction.ts b/src/v1/product/generated/generatedV1Prediction.ts similarity index 91% rename from src/product/generated/generatedV1Prediction.ts rename to src/v1/product/generated/generatedV1Prediction.ts index 701e87beb..925bd503a 100644 --- a/src/product/generated/generatedV1Prediction.ts +++ b/src/v1/product/generated/generatedV1Prediction.ts @@ -1,6 +1,6 @@ -import { Prediction, cleanOutString } from "../../../src/parsing/common"; -import { GeneratedListField, GeneratedObjectField } from "../../../src/parsing/generated"; -import { StringField } from "../../../src/parsing/standard"; +import { Prediction, cleanOutString } from "@/v1/parsing/common/index.js"; +import { GeneratedListField, GeneratedObjectField } from "@/v1/parsing/generated/index.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; export class GeneratedV1Prediction implements Prediction { diff --git a/src/v1/product/generated/index.ts b/src/v1/product/generated/index.ts new file mode 100644 index 000000000..b90e4ba8f --- /dev/null +++ b/src/v1/product/generated/index.ts @@ -0,0 +1,4 @@ +export { GeneratedV1 } from "./generatedV1.js"; +export { GeneratedV1Document } from "./generatedV1Document.js"; +export { GeneratedV1Page } from "./generatedV1Page.js"; +export { GeneratedV1Prediction } from "./generatedV1Prediction.js"; diff --git a/src/v1/product/index.ts b/src/v1/product/index.ts new file mode 100644 index 000000000..32313f020 --- /dev/null +++ b/src/v1/product/index.ts @@ -0,0 +1,28 @@ +export * as fr from "./fr/index.js"; +export * as us from "./us/index.js"; +export { BarcodeReaderV1 } from "./barcodeReader/index.js"; +export { CropperV1 } from "./cropper/index.js"; +export { DriverLicenseV1 } from "./driverLicense/index.js"; +export { FinancialDocumentV1 } from "./financialDocument/index.js"; +export { GeneratedV1 } from "./generated/index.js"; +export { InternationalIdV2 } from "./internationalId/index.js"; +export { InvoiceSplitterV1 } from "./invoiceSplitter/index.js"; +export { InvoiceV4 } from "./invoice/index.js"; +export { MultiReceiptsDetectorV1 } from "./multiReceiptsDetector/index.js"; +export { PassportV1 } from "./passport/index.js"; +export { ReceiptV5 } from "./receipt/index.js"; +export { ResumeV1 } from "./resume/index.js"; + +// not sure if we want to export these -- advanced users can import them directly +export * as barcodeReader from "./barcodeReader/index.js"; +export * as cropper from "./cropper/index.js"; +export * as driverLicense from "./driverLicense/index.js"; +export * as financialDocument from "./financialDocument/index.js"; +export * as generated from "./generated/index.js"; +export * as internationalId from "./internationalId/index.js"; +export * as invoice from "./invoice/index.js"; +export * as invoiceSplitter from "./invoiceSplitter/index.js"; +export * as multiReceiptsDetector from "./multiReceiptsDetector/index.js"; +export * as passport from "./passport/index.js"; +export * as receipt from "./receipt/index.js"; +export * as resume from "./resume/index.js"; diff --git a/src/v1/product/internationalId/index.ts b/src/v1/product/internationalId/index.ts new file mode 100644 index 000000000..50fa64347 --- /dev/null +++ b/src/v1/product/internationalId/index.ts @@ -0,0 +1,2 @@ +export { InternationalIdV2 } from "./internationalIdV2.js"; +export { InternationalIdV2Document } from "./internationalIdV2Document.js"; diff --git a/src/product/internationalId/internationalIdV2.ts b/src/v1/product/internationalId/internationalIdV2.ts similarity index 91% rename from src/product/internationalId/internationalIdV2.ts rename to src/v1/product/internationalId/internationalIdV2.ts index 2225971e4..05dc15124 100644 --- a/src/product/internationalId/internationalIdV2.ts +++ b/src/v1/product/internationalId/internationalIdV2.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { InternationalIdV2Document } from "./internationalIdV2Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { InternationalIdV2Document } from "./internationalIdV2Document.js"; /** * International ID API version 2 inference prediction. diff --git a/src/product/internationalId/internationalIdV2Document.ts b/src/v1/product/internationalId/internationalIdV2Document.ts similarity index 98% rename from src/product/internationalId/internationalIdV2Document.ts rename to src/v1/product/internationalId/internationalIdV2Document.ts index 4cb541e6b..531d41cd4 100644 --- a/src/product/internationalId/internationalIdV2Document.ts +++ b/src/v1/product/internationalId/internationalIdV2Document.ts @@ -2,12 +2,12 @@ import { Prediction, StringDict, cleanOutString, -} from "../../parsing/common"; +} from "@/v1/parsing/common/index.js"; import { ClassificationField, DateField, StringField, -} from "../../parsing/standard"; +} from "@/v1/parsing/standard/index.js"; /** * International ID API version 2.2 document data. diff --git a/src/v1/product/invoice/index.ts b/src/v1/product/invoice/index.ts new file mode 100644 index 000000000..32f41de9b --- /dev/null +++ b/src/v1/product/invoice/index.ts @@ -0,0 +1,3 @@ +export { InvoiceV4 } from "./invoiceV4.js"; +export { InvoiceV4Document } from "./invoiceV4Document.js"; +export { InvoiceV4LineItem } from "./invoiceV4LineItem.js"; diff --git a/src/product/invoice/invoiceV4.ts b/src/v1/product/invoice/invoiceV4.ts similarity index 86% rename from src/product/invoice/invoiceV4.ts rename to src/v1/product/invoice/invoiceV4.ts index 2c396fbf6..2d3cb4df5 100644 --- a/src/product/invoice/invoiceV4.ts +++ b/src/v1/product/invoice/invoiceV4.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { InvoiceV4Document } from "./invoiceV4Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { InvoiceV4Document } from "./invoiceV4Document.js"; /** * Invoice API version 4 inference prediction. diff --git a/src/product/invoice/invoiceV4Document.ts b/src/v1/product/invoice/invoiceV4Document.ts similarity index 98% rename from src/product/invoice/invoiceV4Document.ts rename to src/v1/product/invoice/invoiceV4Document.ts index 745a703d1..4232f33fd 100644 --- a/src/product/invoice/invoiceV4Document.ts +++ b/src/v1/product/invoice/invoiceV4Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { InvoiceV4LineItem } from "./invoiceV4LineItem"; +} from "@/v1/parsing/common/index.js"; +import { InvoiceV4LineItem } from "./invoiceV4LineItem.js"; import { AddressField, AmountField, @@ -14,7 +14,7 @@ import { PaymentDetailsField, StringField, Taxes, -} from "../../parsing/standard"; +} from "@/v1/parsing/standard/index.js"; /** * Invoice API version 4.11 document data. diff --git a/src/product/invoice/invoiceV4LineItem.ts b/src/v1/product/invoice/invoiceV4LineItem.ts similarity index 96% rename from src/product/invoice/invoiceV4LineItem.ts rename to src/v1/product/invoice/invoiceV4LineItem.ts index ebfa4a9b5..0dd06f331 100644 --- a/src/product/invoice/invoiceV4LineItem.ts +++ b/src/v1/product/invoice/invoiceV4LineItem.ts @@ -1,6 +1,6 @@ -import { cleanSpecialChars, floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars, floatToString } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * List of all the line items present on the invoice. diff --git a/src/product/invoiceSplitter/internal.ts b/src/v1/product/invoiceSplitter/index.ts similarity index 56% rename from src/product/invoiceSplitter/internal.ts rename to src/v1/product/invoiceSplitter/index.ts index c3bdb5f63..f4782577d 100644 --- a/src/product/invoiceSplitter/internal.ts +++ b/src/v1/product/invoiceSplitter/index.ts @@ -1,3 +1,3 @@ -export { InvoiceSplitterV1 } from "./invoiceSplitterV1"; -export { InvoiceSplitterV1Document } from "./invoiceSplitterV1Document"; -export { InvoiceSplitterV1InvoicePageGroup } from "./invoiceSplitterV1InvoicePageGroup"; +export { InvoiceSplitterV1 } from "./invoiceSplitterV1.js"; +export { InvoiceSplitterV1Document } from "./invoiceSplitterV1Document.js"; +export { InvoiceSplitterV1InvoicePageGroup } from "./invoiceSplitterV1InvoicePageGroup.js"; diff --git a/src/product/invoiceSplitter/invoiceSplitterV1.ts b/src/v1/product/invoiceSplitter/invoiceSplitterV1.ts similarity index 91% rename from src/product/invoiceSplitter/invoiceSplitterV1.ts rename to src/v1/product/invoiceSplitter/invoiceSplitterV1.ts index 8d242e2b1..97080e19a 100644 --- a/src/product/invoiceSplitter/invoiceSplitterV1.ts +++ b/src/v1/product/invoiceSplitter/invoiceSplitterV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { InvoiceSplitterV1Document } from "./invoiceSplitterV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { InvoiceSplitterV1Document } from "./invoiceSplitterV1Document.js"; /** * Invoice Splitter API version 1 inference prediction. diff --git a/src/product/invoiceSplitter/invoiceSplitterV1Document.ts b/src/v1/product/invoiceSplitter/invoiceSplitterV1Document.ts similarity index 96% rename from src/product/invoiceSplitter/invoiceSplitterV1Document.ts rename to src/v1/product/invoiceSplitter/invoiceSplitterV1Document.ts index fdc50e948..a06ecec26 100644 --- a/src/product/invoiceSplitter/invoiceSplitterV1Document.ts +++ b/src/v1/product/invoiceSplitter/invoiceSplitterV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { InvoiceSplitterV1InvoicePageGroup } from "./invoiceSplitterV1InvoicePageGroup"; +} from "@/v1/parsing/common/index.js"; +import { InvoiceSplitterV1InvoicePageGroup } from "./invoiceSplitterV1InvoicePageGroup.js"; /** diff --git a/src/product/invoiceSplitter/invoiceSplitterV1InvoicePageGroup.ts b/src/v1/product/invoiceSplitter/invoiceSplitterV1InvoicePageGroup.ts similarity index 93% rename from src/product/invoiceSplitter/invoiceSplitterV1InvoicePageGroup.ts rename to src/v1/product/invoiceSplitter/invoiceSplitterV1InvoicePageGroup.ts index ad99026b8..d3f725524 100644 --- a/src/product/invoiceSplitter/invoiceSplitterV1InvoicePageGroup.ts +++ b/src/v1/product/invoiceSplitter/invoiceSplitterV1InvoicePageGroup.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * List of page groups. Each group represents a single invoice within a multi-invoice document. diff --git a/src/product/multiReceiptsDetector/internal.ts b/src/v1/product/multiReceiptsDetector/index.ts similarity index 80% rename from src/product/multiReceiptsDetector/internal.ts rename to src/v1/product/multiReceiptsDetector/index.ts index 1eea01bb0..5a44ecb6c 100644 --- a/src/product/multiReceiptsDetector/internal.ts +++ b/src/v1/product/multiReceiptsDetector/index.ts @@ -1,2 +1,2 @@ -export { MultiReceiptsDetectorV1 } from "./multiReceiptsDetectorV1"; -export { MultiReceiptsDetectorV1Document } from "./multiReceiptsDetectorV1Document"; +export { MultiReceiptsDetectorV1 } from "./multiReceiptsDetectorV1.js"; +export { MultiReceiptsDetectorV1Document } from "./multiReceiptsDetectorV1Document.js"; diff --git a/src/product/multiReceiptsDetector/multiReceiptsDetectorV1.ts b/src/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.ts similarity index 91% rename from src/product/multiReceiptsDetector/multiReceiptsDetectorV1.ts rename to src/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.ts index eb935c8fd..0433d9a0c 100644 --- a/src/product/multiReceiptsDetector/multiReceiptsDetectorV1.ts +++ b/src/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { MultiReceiptsDetectorV1Document } from "./multiReceiptsDetectorV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { MultiReceiptsDetectorV1Document } from "./multiReceiptsDetectorV1Document.js"; /** * Multi Receipts Detector API version 1 inference prediction. diff --git a/src/product/multiReceiptsDetector/multiReceiptsDetectorV1Document.ts b/src/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1Document.ts similarity index 89% rename from src/product/multiReceiptsDetector/multiReceiptsDetectorV1Document.ts rename to src/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1Document.ts index d54709b7d..cdd10464d 100644 --- a/src/product/multiReceiptsDetector/multiReceiptsDetectorV1Document.ts +++ b/src/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../parsing/common"; -import { PositionField } from "../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { PositionField } from "@/v1/parsing/standard/index.js"; /** * Multi Receipts Detector API version 1.1 document data. diff --git a/src/v1/product/passport/index.ts b/src/v1/product/passport/index.ts new file mode 100644 index 000000000..b07d16256 --- /dev/null +++ b/src/v1/product/passport/index.ts @@ -0,0 +1,2 @@ +export { PassportV1 } from "./passportV1.js"; +export { PassportV1Document } from "./passportV1Document.js"; diff --git a/src/product/passport/passportV1.ts b/src/v1/product/passport/passportV1.ts similarity index 86% rename from src/product/passport/passportV1.ts rename to src/v1/product/passport/passportV1.ts index 67ac75a2c..f3c916474 100644 --- a/src/product/passport/passportV1.ts +++ b/src/v1/product/passport/passportV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { PassportV1Document } from "./passportV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { PassportV1Document } from "./passportV1Document.js"; /** * Passport API version 1 inference prediction. diff --git a/src/product/passport/passportV1Document.ts b/src/v1/product/passport/passportV1Document.ts similarity index 96% rename from src/product/passport/passportV1Document.ts rename to src/v1/product/passport/passportV1Document.ts index 2abc757ba..d809c1641 100644 --- a/src/product/passport/passportV1Document.ts +++ b/src/v1/product/passport/passportV1Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString, -} from "../../parsing/common"; -import { DateField, StringField } from "../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { DateField, StringField } from "@/v1/parsing/standard/index.js"; /** * Passport API version 1.1 document data. diff --git a/src/v1/product/receipt/index.ts b/src/v1/product/receipt/index.ts new file mode 100644 index 000000000..006d625cf --- /dev/null +++ b/src/v1/product/receipt/index.ts @@ -0,0 +1,3 @@ +export { ReceiptV5 } from "./receiptV5.js"; +export { ReceiptV5Document } from "./receiptV5Document.js"; +export { ReceiptV5LineItem } from "./receiptV5LineItem.js"; diff --git a/src/product/receipt/receiptV5.ts b/src/v1/product/receipt/receiptV5.ts similarity index 86% rename from src/product/receipt/receiptV5.ts rename to src/v1/product/receipt/receiptV5.ts index ae77f5198..292ac397d 100644 --- a/src/product/receipt/receiptV5.ts +++ b/src/v1/product/receipt/receiptV5.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { ReceiptV5Document } from "./receiptV5Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { ReceiptV5Document } from "./receiptV5Document.js"; /** * Receipt API version 5 inference prediction. diff --git a/src/product/receipt/receiptV5Document.ts b/src/v1/product/receipt/receiptV5Document.ts similarity index 97% rename from src/product/receipt/receiptV5Document.ts rename to src/v1/product/receipt/receiptV5Document.ts index 40fca42eb..8bdbb4112 100644 --- a/src/product/receipt/receiptV5Document.ts +++ b/src/v1/product/receipt/receiptV5Document.ts @@ -2,8 +2,8 @@ import { Prediction, StringDict, cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { ReceiptV5LineItem } from "./receiptV5LineItem"; +} from "@/v1/parsing/common/index.js"; +import { ReceiptV5LineItem } from "./receiptV5LineItem.js"; import { AmountField, ClassificationField, @@ -12,7 +12,7 @@ import { LocaleField, StringField, Taxes, -} from "../../parsing/standard"; +} from "@/v1/parsing/standard/index.js"; /** * Receipt API version 5.4 document data. diff --git a/src/product/receipt/receiptV5LineItem.ts b/src/v1/product/receipt/receiptV5LineItem.ts similarity index 94% rename from src/product/receipt/receiptV5LineItem.ts rename to src/v1/product/receipt/receiptV5LineItem.ts index 7cb6e5c68..6e1ebffbc 100644 --- a/src/product/receipt/receiptV5LineItem.ts +++ b/src/v1/product/receipt/receiptV5LineItem.ts @@ -1,6 +1,6 @@ -import { cleanSpecialChars, floatToString } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars, floatToString } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * List of all line items on the receipt. diff --git a/src/v1/product/resume/index.ts b/src/v1/product/resume/index.ts new file mode 100644 index 000000000..635673199 --- /dev/null +++ b/src/v1/product/resume/index.ts @@ -0,0 +1,7 @@ +export { ResumeV1 } from "./resumeV1.js"; +export { ResumeV1Certificate } from "./resumeV1Certificate.js"; +export { ResumeV1Document } from "./resumeV1Document.js"; +export { ResumeV1Education } from "./resumeV1Education.js"; +export { ResumeV1Language } from "./resumeV1Language.js"; +export { ResumeV1ProfessionalExperience } from "./resumeV1ProfessionalExperience.js"; +export { ResumeV1SocialNetworksUrl } from "./resumeV1SocialNetworksUrl.js"; diff --git a/src/product/resume/resumeV1.ts b/src/v1/product/resume/resumeV1.ts similarity index 86% rename from src/product/resume/resumeV1.ts rename to src/v1/product/resume/resumeV1.ts index 04b190e17..231b3de1a 100644 --- a/src/product/resume/resumeV1.ts +++ b/src/v1/product/resume/resumeV1.ts @@ -1,5 +1,5 @@ -import { Inference, StringDict, Page } from "../../parsing/common"; -import { ResumeV1Document } from "./resumeV1Document"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { ResumeV1Document } from "./resumeV1Document.js"; /** * Resume API version 1 inference prediction. diff --git a/src/product/resume/resumeV1Certificate.ts b/src/v1/product/resume/resumeV1Certificate.ts similarity index 94% rename from src/product/resume/resumeV1Certificate.ts rename to src/v1/product/resume/resumeV1Certificate.ts index b501ecf41..3956fc174 100644 --- a/src/product/resume/resumeV1Certificate.ts +++ b/src/v1/product/resume/resumeV1Certificate.ts @@ -1,7 +1,6 @@ - -import { cleanSpecialChars } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * The list of certificates obtained by the candidate. diff --git a/src/product/resume/resumeV1Document.ts b/src/v1/product/resume/resumeV1Document.ts similarity index 96% rename from src/product/resume/resumeV1Document.ts rename to src/v1/product/resume/resumeV1Document.ts index 3f0766cb3..b129a3b9a 100644 --- a/src/product/resume/resumeV1Document.ts +++ b/src/v1/product/resume/resumeV1Document.ts @@ -2,13 +2,13 @@ import { Prediction, StringDict, cleanOutString,lineSeparator, -} from "../../parsing/common"; -import { ResumeV1SocialNetworksUrl } from "./resumeV1SocialNetworksUrl"; -import { ResumeV1Language } from "./resumeV1Language"; -import { ResumeV1Education } from "./resumeV1Education"; -import { ResumeV1ProfessionalExperience } from "./resumeV1ProfessionalExperience"; -import { ResumeV1Certificate } from "./resumeV1Certificate"; -import { ClassificationField, StringField } from "../../parsing/standard"; +} from "@/v1/parsing/common/index.js"; +import { ResumeV1SocialNetworksUrl } from "./resumeV1SocialNetworksUrl.js"; +import { ResumeV1Language } from "./resumeV1Language.js"; +import { ResumeV1Education } from "./resumeV1Education.js"; +import { ResumeV1ProfessionalExperience } from "./resumeV1ProfessionalExperience.js"; +import { ResumeV1Certificate } from "./resumeV1Certificate.js"; +import { ClassificationField, StringField } from "@/v1/parsing/standard/index.js"; /** * Resume API version 1.2 document data. diff --git a/src/product/resume/resumeV1Education.ts b/src/v1/product/resume/resumeV1Education.ts similarity index 96% rename from src/product/resume/resumeV1Education.ts rename to src/v1/product/resume/resumeV1Education.ts index 9c7a0bcac..7b1de459b 100644 --- a/src/product/resume/resumeV1Education.ts +++ b/src/v1/product/resume/resumeV1Education.ts @@ -1,7 +1,7 @@ -import { cleanSpecialChars } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * The list of the candidate's educational background. diff --git a/src/product/resume/resumeV1Language.ts b/src/v1/product/resume/resumeV1Language.ts similarity index 91% rename from src/product/resume/resumeV1Language.ts rename to src/v1/product/resume/resumeV1Language.ts index e008c9e6c..b9eb36529 100644 --- a/src/product/resume/resumeV1Language.ts +++ b/src/v1/product/resume/resumeV1Language.ts @@ -1,7 +1,7 @@ -import { cleanSpecialChars } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * The list of languages that the candidate is proficient in. diff --git a/src/product/resume/resumeV1ProfessionalExperience.ts b/src/v1/product/resume/resumeV1ProfessionalExperience.ts similarity index 96% rename from src/product/resume/resumeV1ProfessionalExperience.ts rename to src/v1/product/resume/resumeV1ProfessionalExperience.ts index 2d62f8195..f1474e37e 100644 --- a/src/product/resume/resumeV1ProfessionalExperience.ts +++ b/src/v1/product/resume/resumeV1ProfessionalExperience.ts @@ -1,7 +1,7 @@ -import { cleanSpecialChars } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * The list of the candidate's professional experiences. diff --git a/src/product/resume/resumeV1SocialNetworksUrl.ts b/src/v1/product/resume/resumeV1SocialNetworksUrl.ts similarity index 91% rename from src/product/resume/resumeV1SocialNetworksUrl.ts rename to src/v1/product/resume/resumeV1SocialNetworksUrl.ts index bca348ff9..85b73608c 100644 --- a/src/product/resume/resumeV1SocialNetworksUrl.ts +++ b/src/v1/product/resume/resumeV1SocialNetworksUrl.ts @@ -1,7 +1,7 @@ -import { cleanSpecialChars } from "../../parsing/common"; -import { StringDict } from "../../parsing/common"; -import { Polygon } from "../../geometry"; +import { cleanSpecialChars } from "@/v1/parsing/common/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Polygon } from "@/geometry/index.js"; /** * The list of social network profiles of the candidate. diff --git a/src/product/us/bankCheck/bankCheckV1.ts b/src/v1/product/us/bankCheck/bankCheckV1.ts similarity index 81% rename from src/product/us/bankCheck/bankCheckV1.ts rename to src/v1/product/us/bankCheck/bankCheckV1.ts index b45290359..4d8b2c9d8 100644 --- a/src/product/us/bankCheck/bankCheckV1.ts +++ b/src/v1/product/us/bankCheck/bankCheckV1.ts @@ -1,6 +1,6 @@ -import { Inference, StringDict, Page } from "../../../parsing/common"; -import { BankCheckV1Document } from "./bankCheckV1Document"; -import { BankCheckV1Page } from "./bankCheckV1Page"; +import { Inference, StringDict, Page } from "@/v1/parsing/common/index.js"; +import { BankCheckV1Document } from "./bankCheckV1Document.js"; +import { BankCheckV1Page } from "./bankCheckV1Page.js"; /** * Bank Check API version 1 inference prediction. diff --git a/src/product/us/bankCheck/bankCheckV1Document.ts b/src/v1/product/us/bankCheck/bankCheckV1Document.ts similarity index 96% rename from src/product/us/bankCheck/bankCheckV1Document.ts rename to src/v1/product/us/bankCheck/bankCheckV1Document.ts index 60c7291c0..e4c8990fb 100644 --- a/src/product/us/bankCheck/bankCheckV1Document.ts +++ b/src/v1/product/us/bankCheck/bankCheckV1Document.ts @@ -2,12 +2,12 @@ import { Prediction, StringDict, cleanOutString, -} from "../../../parsing/common"; +} from "@/v1/parsing/common/index.js"; import { AmountField, DateField, StringField, -} from "../../../parsing/standard"; +} from "@/v1/parsing/standard/index.js"; /** * Bank Check API version 1.1 document data. diff --git a/src/product/us/bankCheck/bankCheckV1Page.ts b/src/v1/product/us/bankCheck/bankCheckV1Page.ts similarity index 83% rename from src/product/us/bankCheck/bankCheckV1Page.ts rename to src/v1/product/us/bankCheck/bankCheckV1Page.ts index 521636f4b..52ef31ef7 100644 --- a/src/product/us/bankCheck/bankCheckV1Page.ts +++ b/src/v1/product/us/bankCheck/bankCheckV1Page.ts @@ -1,7 +1,6 @@ -import { StringDict, cleanOutString } from "../../../parsing/common"; -import { PositionField } from "../../../parsing/standard"; - -import { BankCheckV1Document } from "./bankCheckV1Document"; +import { StringDict, cleanOutString } from "@/v1/parsing/common/index.js"; +import { PositionField } from "@/v1/parsing/standard/index.js"; +import { BankCheckV1Document } from "./bankCheckV1Document.js"; /** * Bank Check API version 1.1 page data. diff --git a/src/v1/product/us/bankCheck/index.ts b/src/v1/product/us/bankCheck/index.ts new file mode 100644 index 000000000..094ec0075 --- /dev/null +++ b/src/v1/product/us/bankCheck/index.ts @@ -0,0 +1,3 @@ +export { BankCheckV1 } from "./bankCheckV1.js"; +export { BankCheckV1Document } from "./bankCheckV1Document.js"; +export { BankCheckV1Page } from "./bankCheckV1Page.js"; diff --git a/src/v1/product/us/index.ts b/src/v1/product/us/index.ts new file mode 100644 index 000000000..8eff96dc3 --- /dev/null +++ b/src/v1/product/us/index.ts @@ -0,0 +1,2 @@ +export { BankCheckV1 } from "./bankCheck/index.js"; +export * as bankCheck from "./bankCheck/index.js"; diff --git a/src/v2/cli.ts b/src/v2/cli.ts new file mode 100644 index 000000000..fc7fa8fd1 --- /dev/null +++ b/src/v2/cli.ts @@ -0,0 +1,103 @@ +import { Command, OptionValues } from "commander"; +import { Client } from "./client.js"; +import { PathInput } from "../input/index.js"; +import * as console from "console"; +import { BaseInference } from "@/v2/parsing/inference/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; +import { + Classification, + Crop, + Extraction, + Ocr, + Split, +} from "@/v2/product/index.js"; + +const program = new Command(); + +// +// EXECUTE THE COMMANDS +// + +function initClient(options: OptionValues): Client { + return new Client({ + apiKey: options.apiKey, + debug: options.debug, + }); +} + +async function enqueueAndGetInference( + product: typeof BaseProduct, + inputPath: string, + options: OptionValues +): Promise { + const mindeeClient = initClient(options); + const inputSource = new PathInput({ inputPath: inputPath }); + const response = await mindeeClient.enqueueAndGetResult( + product, + inputSource, + { + modelId: options.model, + pollingOptions: { + initialDelaySec: 2, + delaySec: 1.5, + maxRetries: 80, + } + } + ); + if (!response.inference) { + throw Error("Inference could not be retrieved"); + } + printResponse(response.inference); +} + +function printResponse( + document: BaseInference, +): void { + if (document) { + console.log(`\n${document}`); + } +} + +// +// BUILD THE COMMANDS +// + +function addMainOptions(prog: Command) { + prog.requiredOption( + "-m, --model ", + "Model ID (required)" + ); + prog.argument("", "full path to the file"); +} + +export function cli() { + program.name("mindee") + .description("Command line interface for Mindee V2 products.") + .option("-d, --debug", "high verbosity mode") + .option("-k, --api-key ", "your Mindee API key"); + + const inferenceTypes = [ + { name: "extraction", description: "Extract data from a document.", product: Extraction }, + { name: "crop", description: "Crop a document.", product: Crop }, + { name: "split", description: "Split a document into pages.", product: Split }, + { name: "ocr", description: "Read text from a document.", product: Ocr }, + { name: "classification", description: "Classify a document.", product: Classification }, + ]; + + for (const inference of inferenceTypes) { + const inferenceCmd: Command = program.command(inference.name) + .description(inference.description); + + addMainOptions(inferenceCmd); + + inferenceCmd.action(function ( + inputPath: string, + options: OptionValues, + ) { + const allOptions = { ...program.opts(), ...options }; + return enqueueAndGetInference(inference.product, inputPath, allOptions); + }); + } + + program.parse(process.argv); +} diff --git a/src/v2/client.ts b/src/v2/client.ts new file mode 100644 index 000000000..b7c55afbf --- /dev/null +++ b/src/v2/client.ts @@ -0,0 +1,201 @@ +import { setTimeout } from "node:timers/promises"; +import { Dispatcher } from "undici"; +import { InputSource } from "@/input/index.js"; +import { MindeeError } from "@/errors/index.js"; +import { errorHandler } from "@/errors/handler.js"; +import { LOG_LEVELS, logger } from "@/logger.js"; +import { ErrorResponse, JobResponse } from "./parsing/index.js"; +import { MindeeApiV2 } from "./http/mindeeApiV2.js"; +import { MindeeHttpErrorV2 } from "./http/errors.js"; +import { ValidatedPollingOptions } from "./client/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + +/** + * Options for the V2 Mindee Client. + * + * @category ClientV2 + * @example + * const client = new MindeeClientV2({ + * apiKey: "YOUR_API_KEY", + * throwOnError: true, + * debug: false + * }); + */ +export interface ClientOptions { + /** Your API key for all endpoints. */ + apiKey?: string; + /** Log debug messages. */ + debug?: boolean; + /** Custom Dispatcher instance for the HTTP requests. */ + dispatcher?: Dispatcher; +} + +/** + * Mindee Client V2 class that centralizes most basic operations. + * + * @category ClientV2 + */ +export class Client { + /** Mindee V2 API handler. */ + protected mindeeApi: MindeeApiV2; + + /** + * @param {ClientOptions} options options for the initialization of a client. + */ + constructor( + { apiKey, debug, dispatcher }: ClientOptions = { + apiKey: undefined, + debug: false, + dispatcher: undefined, + } + ) { + this.mindeeApi = new MindeeApiV2(dispatcher, apiKey); + errorHandler.throwOnError = true; + logger.level = + debug ?? process.env.MINDEE_DEBUG + ? LOG_LEVELS["debug"] + : LOG_LEVELS["warn"]; + logger.debug("Client V2 Initialized"); + } + + async enqueue

( + product: P, + inputSource: InputSource, + params: InstanceType | ConstructorParameters[0], + ): Promise { + if (inputSource === undefined) { + throw new MindeeError("An input document is required."); + } + const paramsInstance = params instanceof product.parametersClass + ? params + : new product.parametersClass(params); + await inputSource.init(); + const jobResponse = await this.mindeeApi.enqueueProduct( + product, inputSource, paramsInstance + ); + if (jobResponse.job.id === undefined || jobResponse.job.id.length === 0) { + logger.error(`Failed enqueueing:\n${jobResponse.getRawHttp()}`); + throw new MindeeError("Enqueueing of the document failed."); + } + logger.debug( + `Successfully enqueued document with job ID: ${jobResponse.job.id}.` + ); + return jobResponse; + } + + /** + * Retrieves the result of a previously enqueued request. + * + * @param product the product to retrieve. + * @param inferenceId id of the queue to poll. + * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. + * @category Asynchronous + * @returns a `Promise` containing the inference. + */ + async getResult

( + product: P, + inferenceId: string + ): Promise> { + logger.debug( + `Attempting to get inference with ID: ${inferenceId} using response type: ${product.name}` + ); + return await this.mindeeApi.getProductResult(product, inferenceId); + } + + /** + * Get the processing status of a previously enqueued request. + * Can be used for polling. + * + * @param jobId id of the queue to poll. + * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. + * @category Asynchronous + * @returns a `Promise` containing a `Job`, which also contains a `Document` if the + * parsing is complete. + */ + async getJob(jobId: string): Promise { + return await this.mindeeApi.getJob(jobId); + } + + /** + * Enqueue a request and poll the server until the result is sent or + * until the maximum number of tries is reached. + * + * @param product the product to retrieve. + * @param inputSource file or URL to parse. + * @param params parameters relating to prediction options. + * + * @typeParam T an extension of an `Inference`. Can be omitted as it will be inferred from the `productClass`. + * @category Synchronous + * @returns a `Promise` containing parsing results. + */ + async enqueueAndGetResult

( + product: P, + inputSource: InputSource, + params: InstanceType | ConstructorParameters[0], + ): Promise> { + const paramsInstance = new product.parametersClass(params); + + const pollingOptions = paramsInstance.getValidatedPollingOptions(); + + const jobResponse: JobResponse = await this.enqueue( + product, inputSource, paramsInstance + ); + return await this.pollForResult( + product, pollingOptions, jobResponse.job.id + ); + } + + /** + * Send a document to an endpoint and poll the server until the result is sent or + * until the maximum number of tries is reached. + * @protected + */ + protected async pollForResult

( + product: typeof BaseProduct, + pollingOptions: ValidatedPollingOptions, + queueId: string, + ): Promise> { + logger.debug( + `Waiting ${pollingOptions.initialDelaySec} seconds before polling.` + ); + await setTimeout( + pollingOptions.initialDelaySec * 1000, + undefined, + pollingOptions.initialTimerOptions + ); + logger.debug( + `Start polling for inference using job ID: ${queueId}.` + ); + let retryCounter: number = 1; + let pollResults: JobResponse; + while (retryCounter < pollingOptions.maxRetries + 1) { + logger.debug( + `Attempt ${retryCounter} of ${pollingOptions.maxRetries}` + ); + pollResults = await this.getJob(queueId); + const error: ErrorResponse | undefined = pollResults.job.error; + if (error) { + throw new MindeeHttpErrorV2(error); + } + logger.debug(`Job status: ${pollResults.job.status}.`); + if (pollResults.job.status === "Failed") { + break; + } + if (pollResults.job.status === "Processed") { + return this.getResult(product, pollResults.job.id); + } + await setTimeout( + pollingOptions.delaySec * 1000, + undefined, + pollingOptions.recurringTimerOptions + ); + retryCounter++; + } + + throw new MindeeError( + "Asynchronous parsing request timed out after " + + pollingOptions.delaySec * retryCounter + + " seconds" + ); + } +} diff --git a/src/v2/client/baseParameters.ts b/src/v2/client/baseParameters.ts new file mode 100644 index 000000000..e2f3263c3 --- /dev/null +++ b/src/v2/client/baseParameters.ts @@ -0,0 +1,124 @@ +import { ValidatedPollingOptions } from "@/v2/client/pollingOptions.js"; +import { PollingOptions } from "@/v2/index.js"; +import { MindeeConfigurationError } from "@/errors/index.js"; + +/** + * Constructor parameters for BaseParameters and its subclasses. + */ +export interface BaseParametersConstructor { + modelId: string; + alias?: string; + webhookIds?: string[]; + pollingOptions?: PollingOptions; + closeFile?: boolean; +} + +/** + * Parameters accepted by the asynchronous **inference** v2 endpoint. + * + * All fields are optional except `modelId`. + * + * @category ClientV2 + * @example + * const params = { + * modelId: "YOUR_MODEL_ID", + * rag: true, + * alias: "YOUR_ALIAS", + * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], + * pollingOptions: { + * initialDelaySec: 2, + * delaySec: 1.5, + * } + * }; + */ +export abstract class BaseParameters { + /** + * Model ID to use for the inference. **Required.** + */ + modelId: string; + /** + * Use an alias to link the file to your own DB. + * If empty, no alias will be used. + */ + alias?: string; + /** + * Webhook IDs to call after all processing is finished. + * If empty, no webhooks will be used. + */ + webhookIds?: string[]; + /** + * Client-side polling configuration (see {@link PollingOptions}). + */ + pollingOptions?: PollingOptions; + /** + * By default, the file is closed once the upload is finished. + * Set to `false` to keep it open. + */ + closeFile?: boolean; + + protected constructor(params: BaseParametersConstructor) { + if (params.modelId === undefined || params.modelId === null || params.modelId === "") { + throw new MindeeConfigurationError("Model ID must be provided"); + } + this.modelId = params.modelId; + this.alias = params.alias; + this.webhookIds = params.webhookIds; + this.closeFile = params.closeFile; + this.pollingOptions = params.pollingOptions; + } + + /** + * Checks the values for asynchronous parsing. Returns their corrected value if they are undefined. + * @returns A valid `AsyncOptions`. + */ + getValidatedPollingOptions(): ValidatedPollingOptions { + const minDelaySec = 1; + const minInitialDelay = 1; + const minRetries = 2; + let newAsyncParams: PollingOptions; + if (this.pollingOptions === undefined) { + newAsyncParams = { + delaySec: 1.5, + initialDelaySec: 2, + maxRetries: 80 + }; + } else { + newAsyncParams = { ...this.pollingOptions }; + if ( + !newAsyncParams.delaySec || + !newAsyncParams.initialDelaySec || + !newAsyncParams.maxRetries + ) { + throw Error("Invalid polling options."); + } + if (newAsyncParams.delaySec < minDelaySec) { + throw Error(`Cannot set auto-parsing delay to less than ${minDelaySec} second(s).`); + } + if (newAsyncParams.initialDelaySec < minInitialDelay) { + throw Error(`Cannot set initial parsing delay to less than ${minInitialDelay} second(s).`); + } + if (newAsyncParams.maxRetries < minRetries) { + throw Error(`Cannot set retry to less than ${minRetries}.`); + } + } + return newAsyncParams as ValidatedPollingOptions; + } + + /** + * Returns the form data to send to the API. + * @returns A `FormData` object. + */ + getFormData(): FormData { + const form = new FormData(); + + form.set("model_id", this.modelId); + + if (this.alias !== undefined && this.alias !== null) { + form.set("alias", this.alias); + } + if (this.webhookIds && this.webhookIds.length > 0) { + form.set("webhook_ids", this.webhookIds.join(",")); + } + return form; + } +} diff --git a/src/v2/client/index.ts b/src/v2/client/index.ts new file mode 100644 index 000000000..a625efc0d --- /dev/null +++ b/src/v2/client/index.ts @@ -0,0 +1,2 @@ +export type { PollingOptions, ValidatedPollingOptions } from "./pollingOptions.js"; +export { BaseParameters } from "./baseParameters.js"; diff --git a/src/v2/client/pollingOptions.ts b/src/v2/client/pollingOptions.ts new file mode 100644 index 000000000..917632849 --- /dev/null +++ b/src/v2/client/pollingOptions.ts @@ -0,0 +1,50 @@ +/** + * Parameters for the internal polling loop in `enqueueAndGetInference()`. + * + * Default behavior: + * - `initialDelaySec` = 2s + * - `delaySec` = 1.5s + * - `maxRetries` = 80 + * + * Validation rules: + * - `initialDelaySec` >= 1 + * - `delaySec` >= 1 + * - `maxRetries` >= 2 + * + * The `initialTimerOptions` and `recurringTimerOptions` objects let you pass an + * `AbortSignal` or make the timer `unref`-ed to the `setTimeout()`. + * + * @category ClientV2 + * @example + * const params = { + * initialDelaySec: 4, + * delaySec: 2, + * maxRetries: 50 + * }; + * + * const inference = await client.enqueueAndGetInference(inputDoc, params); + */ +export interface PollingOptions { + /** Number of seconds to wait *before the first poll*. */ + initialDelaySec?: number; + /** Interval in seconds between two consecutive polls. */ + delaySec?: number; + /** Maximum number of polling attempts (including the first one). */ + maxRetries?: number; + /** Options passed to the initial `setTimeout()`. */ + initialTimerOptions?: { + ref?: boolean, + signal?: AbortSignal + }; + /** Options passed to every recurring `setTimeout()`. */ + recurringTimerOptions?: { + ref?: boolean, + signal?: AbortSignal + } +} + +export interface ValidatedPollingOptions extends PollingOptions { + initialDelaySec: number; + delaySec: number; + maxRetries: number; +} diff --git a/src/http/apiSettingsV2.ts b/src/v2/http/apiSettingsV2.ts similarity index 51% rename from src/http/apiSettingsV2.ts rename to src/v2/http/apiSettingsV2.ts index 49cf7d4c3..1fb295d79 100644 --- a/src/http/apiSettingsV2.ts +++ b/src/v2/http/apiSettingsV2.ts @@ -1,43 +1,37 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { logger } from "../logger"; -import { BaseSettings, MindeeApiConstructorProps } from "./baseSettings"; -import { MindeeApiV2Error } from "../errors/mindeeError"; +import { logger } from "@/logger.js"; +import { BaseSettings, MindeeApiConstructorProps } from "@/http/baseSettings.js"; +import { MindeeConfigurationError } from "@/errors/index.js"; -export const API_V2_KEY_ENVVAR_NAME: string = "MINDEE_V2_API_KEY"; -export const API_V2_HOST_ENVVAR_NAME: string = "MINDEE_V2_API_HOST"; +const API_V2_KEY_ENVVAR_NAME: string = "MINDEE_V2_API_KEY"; +const API_V2_HOST_ENVVAR_NAME: string = "MINDEE_V2_API_HOST"; const DEFAULT_MINDEE_API_HOST: string = "api-v2.mindee.net"; export class ApiSettingsV2 extends BaseSettings { - apiKey: string; baseHeaders: Record; constructor({ - apiKey = "", + apiKey, + dispatcher, }: MindeeApiConstructorProps) { - super(); - if (!apiKey || apiKey.length === 0) { - this.apiKey = this.apiKeyFromEnv(); - } else { - this.apiKey = apiKey; - } + super(apiKey, dispatcher); if (!this.apiKey || this.apiKey.length === 0) { - throw new MindeeApiV2Error( - "Your API V2 key could not be set, check your Client Configuration\n." + throw new MindeeConfigurationError( + "Your V2 API key could not be set, check your Client Configuration\n." + `You can set this using the ${API_V2_KEY_ENVVAR_NAME} environment variable.` ); } + /* eslint-disable @typescript-eslint/naming-convention */ this.baseHeaders = { "User-Agent": this.getUserAgent(), - Authorization: `${apiKey}`, + Authorization: `${this.apiKey}`, }; } - protected apiKeyFromEnv(): string { const envVarValue = process.env[API_V2_KEY_ENVVAR_NAME]; if (envVarValue) { logger.debug( - `Set API key from environment: ${API_V2_KEY_ENVVAR_NAME}` + "Set the V2 API key from the environment" ); return envVarValue; } @@ -47,11 +41,10 @@ export class ApiSettingsV2 extends BaseSettings { protected hostnameFromEnv(): string { const envVarValue = process.env[API_V2_HOST_ENVVAR_NAME]; if (envVarValue) { - logger.debug(`Set the API hostname to ${envVarValue}`); + logger.debug(`Set the V2 API hostname from the environment to: ${envVarValue}`); return envVarValue; } return DEFAULT_MINDEE_API_HOST; } - } diff --git a/src/v2/http/errors.ts b/src/v2/http/errors.ts new file mode 100644 index 000000000..9c8f05c6c --- /dev/null +++ b/src/v2/http/errors.ts @@ -0,0 +1,20 @@ +import { ErrorDetails, ErrorItem, ErrorResponse } from "@/v2/parsing/index.js"; +import { MindeeError } from "@/errors/index.js"; + +export class MindeeHttpErrorV2 extends MindeeError implements ErrorDetails { + public status: number; + public detail: string; + public title: string; + public code: string; + public errors: ErrorItem[]; + + constructor(error: ErrorResponse) { + super(`HTTP ${error.status} - ${error.title} :: ${error.code} - ${error.detail}`); + this.status = error.status; + this.detail = error.detail; + this.title = error.title; + this.code = error.code; + this.errors = error.errors; + this.name = "MindeeHttpErrorV2"; + } +} diff --git a/src/v2/http/index.ts b/src/v2/http/index.ts new file mode 100644 index 000000000..73bd7b803 --- /dev/null +++ b/src/v2/http/index.ts @@ -0,0 +1,2 @@ +export { MindeeHttpErrorV2 } from "./errors.js"; + diff --git a/src/v2/http/mindeeApiV2.ts b/src/v2/http/mindeeApiV2.ts new file mode 100644 index 000000000..f251c7f04 --- /dev/null +++ b/src/v2/http/mindeeApiV2.ts @@ -0,0 +1,164 @@ +import { ApiSettingsV2 } from "./apiSettingsV2.js"; +import { Dispatcher } from "undici"; +import { BaseParameters } from "@/v2/client/index.js"; +import { + BaseResponse, + ErrorResponse, + ResponseConstructor, + JobResponse, +} from "@/v2/parsing/index.js"; +import { sendRequestAndReadResponse, BaseHttpResponse } from "@/http/apiCore.js"; +import { InputSource, LocalInputSource, UrlInput } from "@/input/index.js"; +import { MindeeDeserializationError } from "@/errors/index.js"; +import { MindeeHttpErrorV2 } from "./errors.js"; +import { logger } from "@/logger.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + + +export class MindeeApiV2 { + settings: ApiSettingsV2; + + constructor(dispatcher?: Dispatcher, apiKey?: string) { + this.settings = new ApiSettingsV2({ dispatcher: dispatcher, apiKey: apiKey }); + } + + /** + * Sends a file to the extraction inference queue. + * @param product product to enqueue. + * @param inputSource Local file loaded as an input. + * @param params {ExtractionParameters} parameters relating to the enqueueing options. + * @category V2 + * @throws Error if the server's response contains one. + * @returns a `Promise` containing a job response. + */ + async enqueueProduct( + product: typeof BaseProduct, + inputSource: InputSource, + params: BaseParameters + ): Promise { + await inputSource.init(); + const result: BaseHttpResponse = await this.#reqPostProductEnqueue( + product, inputSource, params + ); + if (result.data.error !== undefined) { + throw new MindeeHttpErrorV2(result.data.error); + } + return this.#processResponse(result, JobResponse); + } + + /** + * Requests the results of a queued document from the API. + * Throws an error if the server's response contains one. + * @param jobId The document's ID in the queue. + * @category Asynchronous + * @returns a `Promise` containing information on the queue. + */ + async getJob(jobId: string): Promise { + const response = await this.#reqGetJob(jobId); + return this.#processResponse(response, JobResponse); + } + + /** + * Requests the job of a queued document from the API. + * Throws an error if the server's response contains one. + * @param product + * @param inferenceId The document's ID in the queue. + * @category Asynchronous + * @returns a `Promise` containing either the parsed result, or information on the queue. + */ + async getProductResult

( + product: P, + inferenceId: string, + ): Promise> { + const queueResponse: BaseHttpResponse = await this.#reqGetProductResult( + inferenceId, product.slug + ); + return this.#processResponse(queueResponse, product.responseClass) as InstanceType; + } + + #processResponse( + result: BaseHttpResponse, + responseClass: ResponseConstructor, + ): T { + if (result.messageObj?.statusCode && (result.messageObj?.statusCode > 399 || result.messageObj?.statusCode < 200)) { + if (result.data?.status !== null) { + throw new MindeeHttpErrorV2(new ErrorResponse(result.data)); + } + throw new MindeeHttpErrorV2( + new ErrorResponse( + { + status: result.messageObj?.statusCode ?? -1, + title: "Unknown Error", + detail: result.data?.detail ?? "The server returned an Unknown error.", + code: `${result.messageObj?.statusCode ?? -1}-000`, + } + ) + ); + } + try { + return new responseClass(result.data); + } catch (e) { + logger.error(`Raised '${e}' Couldn't deserialize response object:\n${JSON.stringify(result.data)}`); + throw new MindeeDeserializationError("Couldn't deserialize response object."); + } + } + + /** + * Sends a document to the inference queue. + * + * @param product Product to enqueue. + * @param inputSource Local or remote file as an input. + * @param params {ExtractionParameters} parameters relating to the enqueueing options. + */ + async #reqPostProductEnqueue( + product: typeof BaseProduct, + inputSource: InputSource, + params: BaseParameters + ): Promise { + const form = params.getFormData(); + if (inputSource instanceof LocalInputSource) { + form.set("file", new Blob([inputSource.fileObject]), inputSource.filename); + } else { + form.set("url", (inputSource as UrlInput).url); + } + const path = `/v2/products/${product.slug}/enqueue`; + const options = { + method: "POST", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: path, + body: form, + timeout: this.settings.timeout, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); + } + + async #reqGetJob(jobId: string): Promise { + const options = { + method: "GET", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: `/v2/jobs/${jobId}`, + timeout: this.settings.timeout, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); + } + + /** + * Make a request to GET the status of a document in the queue. + * @param inferenceId ID of the inference. + * @param slug "jobs" or "inferences"... + * @category Asynchronous + * @returns a `Promise` containing either the parsed result, or information on the queue. + */ + async #reqGetProductResult(inferenceId: string, slug: string): Promise { + const options = { + method: "GET", + headers: this.settings.baseHeaders, + hostname: this.settings.hostname, + path: `/v2/products/${slug}/results/${inferenceId}`, + timeout: this.settings.timeout, + }; + return await sendRequestAndReadResponse(this.settings.dispatcher, options); + } +} diff --git a/src/v2/index.ts b/src/v2/index.ts new file mode 100644 index 000000000..384491f19 --- /dev/null +++ b/src/v2/index.ts @@ -0,0 +1,12 @@ +export * as http from "./http/index.js"; +export * as parsing from "./parsing/index.js"; +export * as product from "./product/index.js"; +export { LocalResponse } from "./parsing/localResponse.js"; +export { Client } from "./client.js"; +export { + InferenceFile, + InferenceModel, + JobResponse, + ErrorResponse, +} from "./parsing/index.js"; +export type { PollingOptions } from "./client/index.js"; diff --git a/src/parsing/v2/commonResponse.ts b/src/v2/parsing/baseResponse.ts similarity index 59% rename from src/parsing/v2/commonResponse.ts rename to src/v2/parsing/baseResponse.ts index 824f3e6b1..994e3a2f4 100644 --- a/src/parsing/v2/commonResponse.ts +++ b/src/v2/parsing/baseResponse.ts @@ -1,7 +1,7 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; +import { logger } from "@/logger.js"; - -export abstract class CommonResponse { +export abstract class BaseResponse { /** * Raw text representation of the API's response. */ @@ -12,6 +12,7 @@ export abstract class CommonResponse { */ protected constructor(serverResponse: StringDict) { this.rawHttp = serverResponse; + logger.debug("Constructing response instance from plain object."); } /** @@ -22,3 +23,5 @@ export abstract class CommonResponse { return this.rawHttp; } } + +export type ResponseConstructor = new (serverResponse: StringDict) => T; diff --git a/src/v2/parsing/error/errorDetails.ts b/src/v2/parsing/error/errorDetails.ts new file mode 100644 index 000000000..6989e1ef5 --- /dev/null +++ b/src/v2/parsing/error/errorDetails.ts @@ -0,0 +1,24 @@ +import { ErrorItem } from "./errorItem.js"; + +export interface ErrorDetails { + /** + * The HTTP status code returned by the server. + */ + status: number; + /** + * A human-readable explanation specific to the occurrence of the problem. + */ + detail: string; + /** + * A short, human-readable summary of the problem. + */ + title: string; + /** + * A machine-readable code specific to the occurrence of the problem. + */ + code: string; + /** + * A list of explicit error details. + */ + errors: ErrorItem[]; +} diff --git a/src/parsing/v2/errorItem.ts b/src/v2/parsing/error/errorItem.ts similarity index 90% rename from src/parsing/v2/errorItem.ts rename to src/v2/parsing/error/errorItem.ts index b66f49568..4ba4678b1 100644 --- a/src/parsing/v2/errorItem.ts +++ b/src/v2/parsing/error/errorItem.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; /** * Explicit details on a problem. diff --git a/src/parsing/v2/errorResponse.ts b/src/v2/parsing/error/errorResponse.ts similarity index 52% rename from src/parsing/v2/errorResponse.ts rename to src/v2/parsing/error/errorResponse.ts index d23afec3d..a2df695b6 100644 --- a/src/parsing/v2/errorResponse.ts +++ b/src/v2/parsing/error/errorResponse.ts @@ -1,33 +1,12 @@ -import { StringDict } from "../common"; -import { ErrorItem } from "./errorItem"; - -export interface ErrorDetails { - /** - * The HTTP status code returned by the server. - */ - status: number; - /** - * A human-readable explanation specific to the occurrence of the problem. - */ - detail: string; - /** - * A short, human-readable summary of the problem. - */ - title: string; - /** - * A machine-readable code specific to the occurrence of the problem. - */ - code: string; - /** - * A list of explicit error details. - */ - errors: ErrorItem[]; -} +import { StringDict } from "@/parsing/stringDict.js"; +import { ErrorItem } from "./errorItem.js"; +import { ErrorDetails } from "./errorDetails.js"; +import { BaseResponse } from "@/v2/parsing/baseResponse.js"; /** * Error response detailing a problem. The format adheres to RFC 9457. */ -export class ErrorResponse implements ErrorDetails { +export class ErrorResponse extends BaseResponse implements ErrorDetails { status: number; detail: string; title: string; @@ -38,6 +17,7 @@ export class ErrorResponse implements ErrorDetails { * @param serverResponse JSON response from the server. */ constructor(serverResponse: StringDict) { + super(serverResponse); this.status = serverResponse["status"]; this.detail = serverResponse["detail"]; this.title = serverResponse["title"]; diff --git a/src/v2/parsing/error/index.ts b/src/v2/parsing/error/index.ts new file mode 100644 index 000000000..93166e73d --- /dev/null +++ b/src/v2/parsing/error/index.ts @@ -0,0 +1,3 @@ +export { ErrorResponse } from "./errorResponse.js"; +export { ErrorItem } from "./errorItem.js"; +export type { ErrorDetails } from "./errorDetails.js"; diff --git a/src/v2/parsing/index.ts b/src/v2/parsing/index.ts new file mode 100644 index 000000000..b449079d2 --- /dev/null +++ b/src/v2/parsing/index.ts @@ -0,0 +1,19 @@ +export { + ErrorResponse, + ErrorItem, +} from "./error/index.js"; +export type { ErrorDetails } from "./error/index.js"; +export { + Job, + JobResponse, + JobWebhook +} from "./job/index.js"; +export { + BaseInference, + InferenceFile, + InferenceModel, +} from "./inference/index.js"; +export { LocalResponse } from "./localResponse.js"; +export { BaseResponse } from "./baseResponse.js"; +export type { ResponseConstructor } from "./baseResponse.js"; +export * as field from "./inference/field/index.js"; diff --git a/src/v2/parsing/inference/baseInference.ts b/src/v2/parsing/inference/baseInference.ts new file mode 100644 index 000000000..e0d2c5381 --- /dev/null +++ b/src/v2/parsing/inference/baseInference.ts @@ -0,0 +1,33 @@ +import { InferenceModel } from "./inferenceModel.js"; +import { InferenceFile } from "@/v2/index.js"; +import { StringDict } from "@/parsing/index.js"; + +export abstract class BaseInference { + /** + * Model info for the inference. + */ + public model: InferenceModel; + /** + * File info for the inference. + */ + public file: InferenceFile; + /** + * ID of the inference. + */ + public id: string; + + protected constructor(serverResponse: StringDict) { + this.id = serverResponse["id"]; + this.model = new InferenceModel(serverResponse["model"]); + this.file = new InferenceFile(serverResponse["file"]); + } + + toString(): string { + return ( + "Inference\n" + + "#########\n" + + this.model.toString() + "\n" + + this.file.toString() + "\n" + ); + } +} diff --git a/src/parsing/v2/field/baseField.ts b/src/v2/parsing/inference/field/baseField.ts similarity index 80% rename from src/parsing/v2/field/baseField.ts rename to src/v2/parsing/inference/field/baseField.ts index 7d1f1ecab..d1fc37478 100644 --- a/src/parsing/v2/field/baseField.ts +++ b/src/v2/parsing/inference/field/baseField.ts @@ -1,6 +1,6 @@ -import { FieldConfidence } from "./fieldConfidence"; -import { StringDict } from "../../common"; -import { FieldLocation } from "./fieldLocation"; +import { FieldConfidence } from "./fieldConfidence.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { FieldLocation } from "./fieldLocation.js"; export abstract class BaseField { protected _indentLevel: number; diff --git a/src/parsing/v2/field/fieldConfidence.ts b/src/v2/parsing/inference/field/fieldConfidence.ts similarity index 100% rename from src/parsing/v2/field/fieldConfidence.ts rename to src/v2/parsing/inference/field/fieldConfidence.ts diff --git a/src/parsing/v2/field/fieldFactory.ts b/src/v2/parsing/inference/field/fieldFactory.ts similarity index 64% rename from src/parsing/v2/field/fieldFactory.ts rename to src/v2/parsing/inference/field/fieldFactory.ts index afc2e46fa..b5f539d8a 100644 --- a/src/parsing/v2/field/fieldFactory.ts +++ b/src/v2/parsing/inference/field/fieldFactory.ts @@ -1,15 +1,15 @@ /** * Factory helper. */ -import { StringDict } from "../../common"; -import { MindeeApiV2Error } from "../../../errors/mindeeError"; -import { ListField } from "./listField"; -import { ObjectField } from "./objectField"; -import { SimpleField } from "./simpleField"; +import { StringDict } from "@/parsing/stringDict.js"; +import { MindeeDeserializationError } from "@/errors/index.js"; +import { ListField } from "./listField.js"; +import { ObjectField } from "./objectField.js"; +import { SimpleField } from "./simpleField.js"; export function createField(serverResponse: StringDict, indentLevel = 0) { if (typeof serverResponse !== "object" || serverResponse === null) { - throw new MindeeApiV2Error( + throw new MindeeDeserializationError( `Unrecognized field format ${JSON.stringify(serverResponse)}.` ); } @@ -26,7 +26,7 @@ export function createField(serverResponse: StringDict, indentLevel = 0) { return new SimpleField(serverResponse, indentLevel); } - throw new MindeeApiV2Error( + throw new MindeeDeserializationError( `Unrecognized field format in ${JSON.stringify(serverResponse)}.` ); } diff --git a/src/parsing/v2/field/fieldLocation.ts b/src/v2/parsing/inference/field/fieldLocation.ts similarity index 68% rename from src/parsing/v2/field/fieldLocation.ts rename to src/v2/parsing/inference/field/fieldLocation.ts index 9a291d963..dbb314af2 100644 --- a/src/parsing/v2/field/fieldLocation.ts +++ b/src/v2/parsing/inference/field/fieldLocation.ts @@ -1,5 +1,5 @@ -import { Polygon } from "../../../geometry"; -import { StringDict } from "../../common"; +import { Polygon } from "@/geometry/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; /** * Location of a field. @@ -12,7 +12,7 @@ export class FieldLocation { readonly page: number | undefined; constructor(serverResponse: StringDict) { - this.polygon = serverResponse["polygon"] as Polygon; + this.polygon = "polygon" in serverResponse ? new Polygon(...serverResponse["polygon"]) : null; this.page = "page" in serverResponse ? serverResponse["page"] : undefined; } diff --git a/src/v2/parsing/inference/field/index.ts b/src/v2/parsing/inference/field/index.ts new file mode 100644 index 000000000..da7d5b53e --- /dev/null +++ b/src/v2/parsing/inference/field/index.ts @@ -0,0 +1,8 @@ +export { InferenceFields } from "./inferenceFields.js"; +export { FieldConfidence } from "./fieldConfidence.js"; +export { FieldLocation } from "./fieldLocation.js"; +export { ListField } from "./listField.js"; +export { ObjectField } from "./objectField.js"; +export { SimpleField } from "./simpleField.js"; +export { RawText } from "./rawText.js"; +export { RagMetadata } from "./ragMetadata.js"; diff --git a/src/parsing/v2/field/inferenceFields.ts b/src/v2/parsing/inference/field/inferenceFields.ts similarity index 89% rename from src/parsing/v2/field/inferenceFields.ts rename to src/v2/parsing/inference/field/inferenceFields.ts index d2f92c115..af429761e 100644 --- a/src/parsing/v2/field/inferenceFields.ts +++ b/src/v2/parsing/inference/field/inferenceFields.ts @@ -1,8 +1,8 @@ -import { StringDict } from "../../common"; -import type { ListField } from "./listField"; -import type { ObjectField } from "./objectField"; -import type { SimpleField } from "./simpleField"; -import { createField } from "./fieldFactory"; +import { StringDict } from "@/parsing/stringDict.js"; +import type { ListField } from "./listField.js"; +import type { ObjectField } from "./objectField.js"; +import type { SimpleField } from "./simpleField.js"; +import { createField } from "./fieldFactory.js"; export class InferenceFields extends Map { diff --git a/src/parsing/v2/field/listField.ts b/src/v2/parsing/inference/field/listField.ts similarity index 79% rename from src/parsing/v2/field/listField.ts rename to src/v2/parsing/inference/field/listField.ts index 160a395a7..0133d3aa2 100644 --- a/src/parsing/v2/field/listField.ts +++ b/src/v2/parsing/inference/field/listField.ts @@ -1,9 +1,9 @@ -import { MindeeApiV2Error } from "../../../errors/mindeeError"; -import { StringDict } from "../../common"; -import { BaseField } from "./baseField"; -import { ObjectField } from "./objectField"; -import { SimpleField } from "./simpleField"; -import { createField } from "./fieldFactory"; +import { MindeeDeserializationError } from "@/errors/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { BaseField } from "./baseField.js"; +import { ObjectField } from "./objectField.js"; +import { SimpleField } from "./simpleField.js"; +import { createField } from "./fieldFactory.js"; export class ListField extends BaseField { /** @@ -21,7 +21,7 @@ export class ListField extends BaseField { if (item instanceof SimpleField) { result.push(item); } else { - throw new MindeeApiV2Error( + throw new MindeeDeserializationError( `All items must be SimpleField, found item of type ${item.constructor.name}.` ); } @@ -39,7 +39,7 @@ export class ListField extends BaseField { if (item instanceof ObjectField) { result.push(item); } else { - throw new MindeeApiV2Error( + throw new MindeeDeserializationError( `All items must be ObjectField, found item of type ${item.constructor.name}.` ); } @@ -51,7 +51,7 @@ export class ListField extends BaseField { super(serverResponse, indentLevel); if (!Array.isArray(serverResponse["items"])) { - throw new MindeeApiV2Error( + throw new MindeeDeserializationError( `Expected "items" to be an array in ${JSON.stringify(serverResponse)}.` ); } diff --git a/src/parsing/v2/field/objectField.ts b/src/v2/parsing/inference/field/objectField.ts similarity index 94% rename from src/parsing/v2/field/objectField.ts rename to src/v2/parsing/inference/field/objectField.ts index d94c69697..2bb9a0a66 100644 --- a/src/parsing/v2/field/objectField.ts +++ b/src/v2/parsing/inference/field/objectField.ts @@ -1,8 +1,8 @@ -import { InferenceFields } from "./inferenceFields"; -import { StringDict } from "../../common"; -import { BaseField } from "./baseField"; -import type { SimpleField } from "./simpleField"; -import type { ListField } from "./listField"; +import { InferenceFields } from "./inferenceFields.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { BaseField } from "./baseField.js"; +import type { SimpleField } from "./simpleField.js"; +import type { ListField } from "./listField.js"; export class ObjectField extends BaseField { readonly fields: InferenceFields; diff --git a/src/parsing/v2/ragMetadata.ts b/src/v2/parsing/inference/field/ragMetadata.ts similarity index 83% rename from src/parsing/v2/ragMetadata.ts rename to src/v2/parsing/inference/field/ragMetadata.ts index 0195b7d4d..9eb8a4173 100644 --- a/src/parsing/v2/ragMetadata.ts +++ b/src/v2/parsing/inference/field/ragMetadata.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; export class RagMetadata { /** diff --git a/src/parsing/v2/rawText.ts b/src/v2/parsing/inference/field/rawText.ts similarity index 80% rename from src/parsing/v2/rawText.ts rename to src/v2/parsing/inference/field/rawText.ts index d1241394a..46ccd1244 100644 --- a/src/parsing/v2/rawText.ts +++ b/src/v2/parsing/inference/field/rawText.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../common"; -import { RawTextPage } from "./rawTextPage"; +import { StringDict } from "@/parsing/stringDict.js"; +import { RawTextPage } from "./rawTextPage.js"; export class RawText { /** diff --git a/src/parsing/v2/rawTextPage.ts b/src/v2/parsing/inference/field/rawTextPage.ts similarity index 83% rename from src/parsing/v2/rawTextPage.ts rename to src/v2/parsing/inference/field/rawTextPage.ts index df5c3e76a..822a8c701 100644 --- a/src/parsing/v2/rawTextPage.ts +++ b/src/v2/parsing/inference/field/rawTextPage.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; export class RawTextPage { /** diff --git a/src/parsing/v2/field/simpleField.ts b/src/v2/parsing/inference/field/simpleField.ts similarity index 93% rename from src/parsing/v2/field/simpleField.ts rename to src/v2/parsing/inference/field/simpleField.ts index 78a1ae4e0..eff4d1038 100644 --- a/src/parsing/v2/field/simpleField.ts +++ b/src/v2/parsing/inference/field/simpleField.ts @@ -1,5 +1,5 @@ -import { StringDict } from "../../common"; -import { BaseField } from "./baseField"; +import { StringDict } from "@/parsing/stringDict.js"; +import { BaseField } from "./baseField.js"; export class SimpleField extends BaseField { readonly value: string | number | boolean | null; diff --git a/src/v2/parsing/inference/index.ts b/src/v2/parsing/inference/index.ts new file mode 100644 index 000000000..4a2044c5a --- /dev/null +++ b/src/v2/parsing/inference/index.ts @@ -0,0 +1,4 @@ +export { BaseInference } from "./baseInference.js"; +export { InferenceFile } from "./inferenceFile.js"; +export { InferenceModel } from "./inferenceModel.js"; +export * as field from "./field/index.js"; diff --git a/src/parsing/v2/inferenceFile.ts b/src/v2/parsing/inference/inferenceFile.ts similarity index 93% rename from src/parsing/v2/inferenceFile.ts rename to src/v2/parsing/inference/inferenceFile.ts index 8e9998f4e..a8b212710 100644 --- a/src/parsing/v2/inferenceFile.ts +++ b/src/v2/parsing/inference/inferenceFile.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; export class InferenceFile { /** diff --git a/src/parsing/v2/inferenceModel.ts b/src/v2/parsing/inference/inferenceModel.ts similarity index 83% rename from src/parsing/v2/inferenceModel.ts rename to src/v2/parsing/inference/inferenceModel.ts index 6577f1dca..4d45be80f 100644 --- a/src/parsing/v2/inferenceModel.ts +++ b/src/v2/parsing/inference/inferenceModel.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; export class InferenceModel { /** diff --git a/src/v2/parsing/job/index.ts b/src/v2/parsing/job/index.ts new file mode 100644 index 000000000..e67e54a22 --- /dev/null +++ b/src/v2/parsing/job/index.ts @@ -0,0 +1,3 @@ +export { Job } from "./job.js"; +export { JobResponse } from "./jobResponse.js"; +export { JobWebhook } from "./jobWebhook.js"; diff --git a/src/parsing/v2/job.ts b/src/v2/parsing/job/job.ts similarity index 90% rename from src/parsing/v2/job.ts rename to src/v2/parsing/job/job.ts index 7b77349b5..f1262428d 100644 --- a/src/parsing/v2/job.ts +++ b/src/v2/parsing/job/job.ts @@ -1,7 +1,6 @@ -import { StringDict } from "../common"; -import { ErrorResponse } from "./errorResponse"; -import { JobWebhook } from "./jobWebhook"; -import { parseDate } from "../common"; +import { StringDict, parseDate } from "@/parsing/index.js"; +import { ErrorResponse } from "@/v2/index.js"; +import { JobWebhook } from "./jobWebhook.js"; /** * Job information for a V2 polling attempt. diff --git a/src/v2/parsing/job/jobResponse.ts b/src/v2/parsing/job/jobResponse.ts new file mode 100644 index 000000000..2d42a89af --- /dev/null +++ b/src/v2/parsing/job/jobResponse.ts @@ -0,0 +1,15 @@ +import { BaseResponse } from "@/v2/parsing/baseResponse.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { Job } from "./job.js"; + +export class JobResponse extends BaseResponse { + /** + * Job for the polling. + */ + public job: Job; + + constructor(serverResponse: StringDict) { + super(serverResponse); + this.job = new Job(serverResponse["job"]); + } +} diff --git a/src/parsing/v2/jobWebhook.ts b/src/v2/parsing/job/jobWebhook.ts similarity index 84% rename from src/parsing/v2/jobWebhook.ts rename to src/v2/parsing/job/jobWebhook.ts index 399b333b9..48491e95a 100644 --- a/src/parsing/v2/jobWebhook.ts +++ b/src/v2/parsing/job/jobWebhook.ts @@ -1,5 +1,5 @@ -import { ErrorResponse } from "./errorResponse"; -import { StringDict, parseDate } from "../common"; +import { ErrorResponse } from "@/v2/parsing/index.js"; +import { StringDict, parseDate } from "@/parsing/index.js"; /** * JobWebhook information. diff --git a/src/v2/parsing/localResponse.ts b/src/v2/parsing/localResponse.ts new file mode 100644 index 000000000..165c75e8b --- /dev/null +++ b/src/v2/parsing/localResponse.ts @@ -0,0 +1,32 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { MindeeError } from "@/errors/index.js"; +import { LocalResponseBase } from "@/parsing/localResponseBase.js"; +import { BaseResponse } from "./baseResponse.js"; + +/** + * Local response loaded from a file. + * Note: Has to be initialized through init() before use. + */ +export class LocalResponse extends LocalResponseBase { + + /** + * Deserialize the loaded local response into the requested CommonResponse-derived class. + * + * Typically used when dealing with V2 webhook callbacks. + * + * @typeParam ResponseT - A class that extends `CommonResponse`. + * @param responseClass - The constructor of the class into which the payload should be deserialized. + * @returns An instance of `responseClass` populated with the file content. + * @throws MindeeError If the provided class cannot be instantiated. + */ + public async deserializeResponse( + responseClass: new (serverResponse: StringDict) => ResponseT + ): Promise { + try { + return new responseClass(await this.asDict()); + } catch (error) { + console.error(error); + throw new MindeeError(`Invalid response provided: ${error}`); + } + } +} diff --git a/src/v2/product/baseProduct.ts b/src/v2/product/baseProduct.ts new file mode 100644 index 000000000..981779cd8 --- /dev/null +++ b/src/v2/product/baseProduct.ts @@ -0,0 +1,14 @@ +import { BaseParameters } from "@/v2/client/index.js"; +import { ResponseConstructor } from "@/v2/parsing/index.js"; + +export abstract class BaseProduct { + static get parametersClass(): new (...args: any[]) => BaseParameters { + throw new Error("Must define static parameters property"); + } + static get responseClass(): ResponseConstructor { + throw new Error("Must define static response property"); + } + static get slug(): string { + throw new Error("Must define static slug property"); + } +} diff --git a/src/v2/product/classification/classification.ts b/src/v2/product/classification/classification.ts new file mode 100644 index 000000000..dcef46268 --- /dev/null +++ b/src/v2/product/classification/classification.ts @@ -0,0 +1,15 @@ +import { ClassificationResponse } from "./classificationResponse.js"; +import { ClassificationParameters } from "./params/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + +export class Classification extends BaseProduct { + static get parametersClass() { + return ClassificationParameters; + } + static get responseClass() { + return ClassificationResponse; + } + static get slug() { + return "classification"; + } +} diff --git a/src/v2/product/classification/classificationClassifier.ts b/src/v2/product/classification/classificationClassifier.ts new file mode 100644 index 000000000..e2d65ce1e --- /dev/null +++ b/src/v2/product/classification/classificationClassifier.ts @@ -0,0 +1,16 @@ +import { StringDict } from "@/parsing/index.js"; + +/** + * Document level classification. + */ +export class ClassificationClassifier { + documentType: string; + + constructor(serverResponse: StringDict) { + this.documentType = serverResponse["document_type"]; + } + + toString(): string { + return `Document Type: ${this.documentType}`; + } +} diff --git a/src/v2/product/classification/classificationInference.ts b/src/v2/product/classification/classificationInference.ts new file mode 100644 index 000000000..872fa2ee7 --- /dev/null +++ b/src/v2/product/classification/classificationInference.ts @@ -0,0 +1,22 @@ +import { StringDict } from "@/parsing/index.js"; +import { BaseInference } from "@/v2/parsing/inference/baseInference.js"; +import { ClassificationResult } from "./classificationResult.js"; + +export class ClassificationInference extends BaseInference { + /** + * Result of a classification inference. + */ + result: ClassificationResult; + + constructor(serverResponse: StringDict) { + super(serverResponse); + this.result = new ClassificationResult(serverResponse["result"]); + } + + toString(): string { + return ( + super.toString() + + this.result.toString() + "\n" + ); + } +} diff --git a/src/v2/product/classification/classificationResponse.ts b/src/v2/product/classification/classificationResponse.ts new file mode 100644 index 000000000..f1195ea2a --- /dev/null +++ b/src/v2/product/classification/classificationResponse.ts @@ -0,0 +1,18 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { ClassificationInference } from "./classificationInference.js"; +import { BaseResponse } from "@/v2/parsing/index.js"; + +export class ClassificationResponse extends BaseResponse { + /** + * The inference result for a classification utility request. + */ + inference: ClassificationInference; + + /** + * @param serverResponse JSON response from the server. + */ + constructor(serverResponse: StringDict) { + super(serverResponse); + this.inference = new ClassificationInference(serverResponse["inference"]); + } +} diff --git a/src/v2/product/classification/classificationResult.ts b/src/v2/product/classification/classificationResult.ts new file mode 100644 index 000000000..99f16b08d --- /dev/null +++ b/src/v2/product/classification/classificationResult.ts @@ -0,0 +1,17 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { ClassificationClassifier } from "./classificationClassifier.js"; + +export class ClassificationResult { + /** + * Fields contained in the inference. + */ + public classification: ClassificationClassifier; + + constructor(serverResponse: StringDict) { + this.classification = new ClassificationClassifier(serverResponse["classification"]); + } + + toString(): string { + return `Classification\n==============\n${this.classification}`; + } +} diff --git a/src/v2/product/classification/index.ts b/src/v2/product/classification/index.ts new file mode 100644 index 000000000..fad96ecc8 --- /dev/null +++ b/src/v2/product/classification/index.ts @@ -0,0 +1,6 @@ +export { Classification } from "./classification.js"; +export { ClassificationParameters } from "./params/index.js"; +export { ClassificationResponse } from "./classificationResponse.js"; +export { ClassificationInference } from "./classificationInference.js"; +export { ClassificationResult } from "./classificationResult.js"; +export { ClassificationClassifier } from "./classificationClassifier.js"; diff --git a/src/v2/product/classification/params/classificationParameters.ts b/src/v2/product/classification/params/classificationParameters.ts new file mode 100644 index 000000000..41021cc30 --- /dev/null +++ b/src/v2/product/classification/params/classificationParameters.ts @@ -0,0 +1,26 @@ +import { BaseParameters, BaseParametersConstructor } from "@/v2/client/baseParameters.js"; +import { logger } from "@/logger.js"; + +/** + * Parameters accepted by the asynchronous **inference** v2 endpoint. + * + * All fields are optional except `modelId`. + * + * @category ClientV2 + * @example + * const params = { + * modelId: "YOUR_MODEL_ID", + * alias: "YOUR_ALIAS", + * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], + * pollingOptions: { + * initialDelaySec: 2, + * delaySec: 1.5, + * } + * }; + */ +export class ClassificationParameters extends BaseParameters { + constructor(params: BaseParametersConstructor & {}) { + super({ ...params }); + logger.debug("Classification parameters initialized."); + } +} diff --git a/src/v2/product/classification/params/index.ts b/src/v2/product/classification/params/index.ts new file mode 100644 index 000000000..60d0d0e56 --- /dev/null +++ b/src/v2/product/classification/params/index.ts @@ -0,0 +1 @@ +export { ClassificationParameters } from "./classificationParameters.js"; diff --git a/src/v2/product/crop/crop.ts b/src/v2/product/crop/crop.ts new file mode 100644 index 000000000..aaf1ade05 --- /dev/null +++ b/src/v2/product/crop/crop.ts @@ -0,0 +1,15 @@ +import { CropResponse } from "./cropResponse.js"; +import { CropParameters } from "./params/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + +export class Crop extends BaseProduct { + static get parametersClass() { + return CropParameters; + } + static get responseClass() { + return CropResponse; + } + static get slug() { + return "crop"; + } +} diff --git a/src/v2/product/crop/cropInference.ts b/src/v2/product/crop/cropInference.ts new file mode 100644 index 000000000..ecfbc7131 --- /dev/null +++ b/src/v2/product/crop/cropInference.ts @@ -0,0 +1,22 @@ +import { StringDict } from "@/parsing/index.js"; +import { BaseInference } from "@/v2/parsing/inference/baseInference.js"; +import { CropResult } from "@/v2/product/crop/cropResult.js"; + +export class CropInference extends BaseInference { + /** + * Result of a crop utility inference. + */ + result: CropResult; + + constructor(serverResponse: StringDict) { + super(serverResponse); + this.result = new CropResult(serverResponse["result"]); + } + + toString(): string { + return ( + super.toString() + + this.result.toString() + "\n" + ); + } +} diff --git a/src/v2/product/crop/cropItem.ts b/src/v2/product/crop/cropItem.ts new file mode 100644 index 000000000..a36c37167 --- /dev/null +++ b/src/v2/product/crop/cropItem.ts @@ -0,0 +1,16 @@ +import { FieldLocation } from "@/v2/parsing/inference/field/index.js"; +import { StringDict } from "@/parsing/index.js"; + +export class CropItem { + objectType: string; + location: FieldLocation; + + constructor(serverResponse: StringDict) { + this.objectType = serverResponse["object_type"]; + this.location = new FieldLocation(serverResponse["location"]); + } + + toString(): string { + return `${this.objectType}: ${this.location}`; + } +} diff --git a/src/v2/product/crop/cropResponse.ts b/src/v2/product/crop/cropResponse.ts new file mode 100644 index 000000000..c50b2518a --- /dev/null +++ b/src/v2/product/crop/cropResponse.ts @@ -0,0 +1,18 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { CropInference } from "./cropInference.js"; +import { BaseResponse } from "@/v2/parsing/index.js"; + +export class CropResponse extends BaseResponse { + /** + * Response for a crop utility inference. + */ + inference: CropInference; + + /** + * @param serverResponse JSON response from the server. + */ + constructor(serverResponse: StringDict) { + super(serverResponse); + this.inference = new CropInference(serverResponse["inference"]); + } +} diff --git a/src/v2/product/crop/cropResult.ts b/src/v2/product/crop/cropResult.ts new file mode 100644 index 000000000..e15dc5278 --- /dev/null +++ b/src/v2/product/crop/cropResult.ts @@ -0,0 +1,18 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { CropItem } from "@/v2/product/crop/cropItem.js"; + +export class CropResult { + /** + * Fields contained in the inference. + */ + public crops: CropItem[] = []; + + constructor(serverResponse: StringDict) { + this.crops = serverResponse["crops"].map((cropItem: StringDict) => new CropItem(cropItem)); + } + + toString(): string { + const crops = this.crops.map(item => item.toString()).join("\n * "); + return `Crop\n====\n * ${crops}`; + } +} diff --git a/src/v2/product/crop/index.ts b/src/v2/product/crop/index.ts new file mode 100644 index 000000000..13fcc6493 --- /dev/null +++ b/src/v2/product/crop/index.ts @@ -0,0 +1,6 @@ +export { Crop } from "./crop.js"; +export { CropParameters } from "./params/index.js"; +export { CropInference } from "./cropInference.js"; +export { CropItem } from "./cropItem.js"; +export { CropResponse } from "./cropResponse.js"; +export { CropResult } from "./cropResult.js"; diff --git a/src/v2/product/crop/params/cropParameters.ts b/src/v2/product/crop/params/cropParameters.ts new file mode 100644 index 000000000..ba53eb1fc --- /dev/null +++ b/src/v2/product/crop/params/cropParameters.ts @@ -0,0 +1,26 @@ +import { BaseParameters, BaseParametersConstructor } from "@/v2/client/baseParameters.js"; +import { logger } from "@/logger.js"; + +/** + * Parameters accepted by the asynchronous **inference** v2 endpoint. + * + * All fields are optional except `modelId`. + * + * @category ClientV2 + * @example + * const params = { + * modelId: "YOUR_MODEL_ID", + * alias: "YOUR_ALIAS", + * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], + * pollingOptions: { + * initialDelaySec: 2, + * delaySec: 1.5, + * } + * }; + */ +export class CropParameters extends BaseParameters { + constructor(params: BaseParametersConstructor & {}) { + super({ ...params }); + logger.debug("Crop parameters initialized."); + } +} diff --git a/src/v2/product/crop/params/index.ts b/src/v2/product/crop/params/index.ts new file mode 100644 index 000000000..71b2e6e46 --- /dev/null +++ b/src/v2/product/crop/params/index.ts @@ -0,0 +1 @@ +export { CropParameters } from "./cropParameters.js"; diff --git a/src/parsing/v2/dataSchemaActiveOption.ts b/src/v2/product/extraction/dataSchemaActiveOption.ts similarity index 87% rename from src/parsing/v2/dataSchemaActiveOption.ts rename to src/v2/product/extraction/dataSchemaActiveOption.ts index ded792710..841241ac4 100644 --- a/src/parsing/v2/dataSchemaActiveOption.ts +++ b/src/v2/product/extraction/dataSchemaActiveOption.ts @@ -1,4 +1,4 @@ -import { StringDict } from "../common"; +import { StringDict } from "@/parsing/stringDict.js"; /** * Data schema options activated during the inference. diff --git a/src/v2/product/extraction/extraction.ts b/src/v2/product/extraction/extraction.ts new file mode 100644 index 000000000..96febbc2d --- /dev/null +++ b/src/v2/product/extraction/extraction.ts @@ -0,0 +1,15 @@ +import { ExtractionResponse } from "./extractionResponse.js"; +import { ExtractionParameters } from "./params/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + +export class Extraction extends BaseProduct { + static get parametersClass() { + return ExtractionParameters; + } + static get responseClass() { + return ExtractionResponse; + } + static get slug() { + return "extraction"; + } +} diff --git a/src/parsing/v2/inferenceActiveOptions.ts b/src/v2/product/extraction/extractionActiveOptions.ts similarity index 93% rename from src/parsing/v2/inferenceActiveOptions.ts rename to src/v2/product/extraction/extractionActiveOptions.ts index 5223dd021..7846205fd 100644 --- a/src/parsing/v2/inferenceActiveOptions.ts +++ b/src/v2/product/extraction/extractionActiveOptions.ts @@ -1,7 +1,7 @@ -import { StringDict } from "../common"; -import { DataSchemaActiveOption } from "./dataSchemaActiveOption"; +import { StringDict } from "@/parsing/stringDict.js"; +import { DataSchemaActiveOption } from "./dataSchemaActiveOption.js"; -export class InferenceActiveOptions { +export class ExtractionActiveOptions { /** * Whether the RAG feature was activated. */ diff --git a/src/v2/product/extraction/extractionInference.ts b/src/v2/product/extraction/extractionInference.ts new file mode 100644 index 000000000..aca2702c9 --- /dev/null +++ b/src/v2/product/extraction/extractionInference.ts @@ -0,0 +1,29 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { ExtractionResult } from "./extractionResult.js"; +import { ExtractionActiveOptions } from "./extractionActiveOptions.js"; +import { BaseInference } from "@/v2/parsing/inference/baseInference.js"; + +export class ExtractionInference extends BaseInference { + /** + * Result of the inference. + */ + public result: ExtractionResult; + /** + * Active options for the inference. + */ + public activeOptions: ExtractionActiveOptions; + + constructor(serverResponse: StringDict) { + super(serverResponse); + this.result = new ExtractionResult(serverResponse["result"]); + this.activeOptions = new ExtractionActiveOptions(serverResponse["active_options"]); + } + + toString(): string { + return ( + super.toString() + + this.activeOptions.toString() + "\n" + + this.result.toString() + "\n" + ); + } +} diff --git a/src/v2/product/extraction/extractionResponse.ts b/src/v2/product/extraction/extractionResponse.ts new file mode 100644 index 000000000..39e637b10 --- /dev/null +++ b/src/v2/product/extraction/extractionResponse.ts @@ -0,0 +1,18 @@ +import { ExtractionInference } from "./extractionInference.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { BaseResponse } from "@/v2/parsing/index.js"; + +export class ExtractionResponse extends BaseResponse { + /** + * The inference result for an extraction request. + */ + inference: ExtractionInference; + + /** + * @param serverResponse JSON response from the server. + */ + constructor(serverResponse: StringDict) { + super(serverResponse); + this.inference = new ExtractionInference(serverResponse["inference"]); + } +} diff --git a/src/parsing/v2/inferenceResult.ts b/src/v2/product/extraction/extractionResult.ts similarity index 67% rename from src/parsing/v2/inferenceResult.ts rename to src/v2/product/extraction/extractionResult.ts index 1468234f1..8e3ce1b0b 100644 --- a/src/parsing/v2/inferenceResult.ts +++ b/src/v2/product/extraction/extractionResult.ts @@ -1,9 +1,9 @@ -import { InferenceFields } from "./field"; -import { StringDict } from "../common"; -import { RawText } from "./rawText"; -import { RagMetadata } from "./ragMetadata"; +import { InferenceFields } from "@/v2/parsing/inference/field/index.js"; +import { StringDict } from "@/parsing/stringDict.js"; +import { RawText } from "@/v2/parsing/inference/field/rawText.js"; +import { RagMetadata } from "@/v2/parsing/inference/field/ragMetadata.js"; -export class InferenceResult { +export class ExtractionResult { /** * Fields contained in the inference. */ diff --git a/src/v2/product/extraction/index.ts b/src/v2/product/extraction/index.ts new file mode 100644 index 000000000..fd6960081 --- /dev/null +++ b/src/v2/product/extraction/index.ts @@ -0,0 +1,8 @@ +export { Extraction } from "./extraction.js"; +export { ExtractionParameters } from "./params/index.js"; +export * as params from "./params/index.js"; +export { ExtractionInference } from "./extractionInference.js"; +export { ExtractionActiveOptions } from "./extractionActiveOptions.js"; +export { ExtractionResponse } from "./extractionResponse.js"; +export { ExtractionResult } from "./extractionResult.js"; +export { DataSchemaActiveOption } from "./dataSchemaActiveOption.js"; diff --git a/src/v2/product/extraction/params/dataSchema.ts b/src/v2/product/extraction/params/dataSchema.ts new file mode 100644 index 000000000..29a23254e --- /dev/null +++ b/src/v2/product/extraction/params/dataSchema.ts @@ -0,0 +1,29 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { DataSchemaReplace } from "./dataSchemaReplace.js"; + +/** + * Modify the Data Schema. + */ +export class DataSchema { + /** + * If set, completely replaces the data schema of the model. + */ + replace?: DataSchemaReplace; + + constructor(dataSchema: StringDict | string) { + if (typeof dataSchema === "string") { + this.replace = new DataSchemaReplace(JSON.parse(dataSchema)["replace"]); + } else if (dataSchema instanceof DataSchema) { + this.replace = dataSchema.replace; + } else { + this.replace = new DataSchemaReplace(dataSchema["replace"] as StringDict); + } + } + + toJSON() { + return { replace: this.replace?.toJSON() }; + } + toString() { + return JSON.stringify(this.toJSON()); + } +} diff --git a/src/input/dataSchema.ts b/src/v2/product/extraction/params/dataSchemaField.ts similarity index 59% rename from src/input/dataSchema.ts rename to src/v2/product/extraction/params/dataSchemaField.ts index 5cfd35154..3b53fcd01 100644 --- a/src/input/dataSchema.ts +++ b/src/v2/product/extraction/params/dataSchemaField.ts @@ -1,48 +1,39 @@ -import { StringDict } from "../parsing/common"; -import { MindeeError } from "../errors"; +import { StringDict } from "@/parsing/index.js"; export class DataSchemaField { /** * Display name for the field, also impacts inference results. */ public title: string; - /** * Name of the field in the data schema. */ public name: string; - /** * Whether this field can contain multiple values. */ public isArray: boolean; - /** * Data type of the field. */ public type: string; - /** * Allowed values when type is `classification`. Leave empty for other types. */ public classificationValues?: Array; - /** * Whether to remove duplicate values in the array. * Only applicable if `is_array` is True. */ public uniqueValues?: boolean; - /** * Detailed description of what this field represents. */ public description?: string; - /** * Optional extraction guidelines. */ public guidelines?: string; - /** * Subfields when type is `nested_object`. Leave empty for other types. */ @@ -85,58 +76,3 @@ export class DataSchemaField { return JSON.stringify(this.toJSON()); } } - -/** - * The structure to completely replace the data schema of the model. - */ -export class DataSchemaReplace { - /** - * List of fields in the Data Schema. - */ - fields: Array; - - constructor(dataSchemaReplace: StringDict) { - if (!dataSchemaReplace || !dataSchemaReplace.fields ) { - throw new MindeeError("Invalid Data Schema provided."); - } - if (dataSchemaReplace["fields"].length === 0) { - throw new TypeError("Data Schema replacement fields cannot be empty."); - } - this.fields = dataSchemaReplace["fields"].map((field: StringDict) => (new DataSchemaField(field))); - } - - toJSON() { - return { fields: this.fields.map(e => e.toJSON()) }; - } - - toString() { - return JSON.stringify(this.toJSON()); - } -} - -/** - * Modify the Data Schema. - */ -export class DataSchema { - /** - * If set, completely replaces the data schema of the model. - */ - replace?: DataSchemaReplace; - - constructor(dataSchema: StringDict | string) { - if (typeof dataSchema === "string") { - this.replace = new DataSchemaReplace(JSON.parse(dataSchema)["replace"]); - } else if (dataSchema instanceof DataSchema) { - this.replace = dataSchema.replace; - } else { - this.replace = new DataSchemaReplace(dataSchema["replace"] as StringDict); - } - } - - toJSON() { - return { replace: this.replace?.toJSON() }; - } - toString() { - return JSON.stringify(this.toJSON()); - } -} diff --git a/src/v2/product/extraction/params/dataSchemaReplace.ts b/src/v2/product/extraction/params/dataSchemaReplace.ts new file mode 100644 index 000000000..b56b8564f --- /dev/null +++ b/src/v2/product/extraction/params/dataSchemaReplace.ts @@ -0,0 +1,31 @@ +import { StringDict } from "@/parsing/index.js"; +import { DataSchemaField } from "./dataSchemaField.js"; +import { MindeeError } from "@/errors/index.js"; + +/** + * The structure to completely replace the data schema of the model. + */ +export class DataSchemaReplace { + /** + * List of fields in the Data Schema. + */ + fields: Array; + + constructor(dataSchemaReplace: StringDict) { + if (!dataSchemaReplace || !dataSchemaReplace.fields ) { + throw new MindeeError("Invalid Data Schema provided."); + } + if (dataSchemaReplace["fields"].length === 0) { + throw new TypeError("Data Schema replacement fields cannot be empty."); + } + this.fields = dataSchemaReplace["fields"].map((field: StringDict) => (new DataSchemaField(field))); + } + + toJSON() { + return { fields: this.fields.map(e => e.toJSON()) }; + } + + toString() { + return JSON.stringify(this.toJSON()); + } +} diff --git a/src/v2/product/extraction/params/extractionParameters.ts b/src/v2/product/extraction/params/extractionParameters.ts new file mode 100644 index 000000000..fe3b8f1c6 --- /dev/null +++ b/src/v2/product/extraction/params/extractionParameters.ts @@ -0,0 +1,106 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { DataSchema } from "./dataSchema.js"; +import { BaseParameters, BaseParametersConstructor } from "@/v2/client/baseParameters.js"; +import { logger } from "@/logger.js"; + +/** + * Parameters accepted by the asynchronous **inference** v2 endpoint. + * + * All fields are optional except `modelId`. + * + * @category ClientV2 + * @example + * const params = { + * modelId: "YOUR_MODEL_ID", + * rag: true, + * alias: "YOUR_ALIAS", + * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], + * pollingOptions: { + * initialDelaySec: 2, + * delaySec: 1.5, + * } + * }; + */ +export class ExtractionParameters extends BaseParameters { + /** + * Use Retrieval-Augmented Generation during inference. + */ + rag?: boolean; + /** + * Extract the entire text from the document as strings, and fill the `rawText` attribute. + */ + rawText?: boolean; + /** + * Calculate bounding box polygons for values, and fill the `locations` attribute of fields. + */ + polygon?: boolean; + /** + * Calculate confidence scores for values, and fill the `confidence` attribute of fields. + * Useful for automation. + */ + confidence?: boolean; + /** + * Additional text context used by the model during inference. + * *Not recommended*, for specific use only. + */ + textContext?: string; + /** + * Dynamic changes to the data schema of the model for this inference. + * Not recommended, for specific use only. + */ + dataSchema?: DataSchema | StringDict | string; + + constructor(params: BaseParametersConstructor & { + rag?: boolean; + rawText?: boolean; + polygon?: boolean; + confidence?: boolean; + textContext?: string; + dataSchema?: DataSchema | StringDict | string; + }) { + super({ ...params }); + this.rag = params.rag; + this.rawText = params.rawText; + this.polygon = params.polygon; + this.confidence = params.confidence; + this.textContext = params.textContext; + + if (params.dataSchema !== undefined && params.dataSchema !== null) { + if (!(params.dataSchema instanceof DataSchema)){ + this.dataSchema = new DataSchema(params.dataSchema); + } else { + this.dataSchema = params.dataSchema; + } + } + logger.debug("Extraction parameters initialized."); + } + + getFormData(): FormData { + const form = new FormData(); + + form.set("model_id", this.modelId); + + if (this.rag !== undefined && this.rag !== null) { + form.set("rag", this.rag.toString()); + } + if (this.polygon !== undefined && this.polygon !== null) { + form.set("polygon", this.polygon.toString().toLowerCase()); + } + if (this.confidence !== undefined && this.confidence !== null) { + form.set("confidence", this.confidence.toString().toLowerCase()); + } + if (this.rawText !== undefined && this.rawText !== null) { + form.set("raw_text", this.rawText.toString().toLowerCase()); + } + if (this.textContext !== undefined && this.textContext !== null) { + form.set("text_context", this.textContext); + } + if (this.dataSchema !== undefined && this.dataSchema !== null) { + form.set("data_schema", this.dataSchema.toString()); + } + if (this.webhookIds && this.webhookIds.length > 0) { + form.set("webhook_ids", this.webhookIds.join(",")); + } + return form; + } +} diff --git a/src/v2/product/extraction/params/index.ts b/src/v2/product/extraction/params/index.ts new file mode 100644 index 000000000..f79e739ff --- /dev/null +++ b/src/v2/product/extraction/params/index.ts @@ -0,0 +1,4 @@ +export { ExtractionParameters } from "./extractionParameters.js"; +export { DataSchema } from "./dataSchema.js"; +export { DataSchemaReplace } from "./dataSchemaReplace.js"; +export { DataSchemaField } from "./dataSchemaField.js"; diff --git a/src/v2/product/index.ts b/src/v2/product/index.ts new file mode 100644 index 000000000..fe65c3713 --- /dev/null +++ b/src/v2/product/index.ts @@ -0,0 +1,14 @@ +export { Classification, ClassificationResponse } from "./classification/index.js"; +export * as classification from "./classification/index.js"; + +export { Crop, CropResponse } from "./crop/index.js"; +export * as crop from "./crop/index.js"; + +export { Extraction, ExtractionResponse } from "./extraction/index.js"; +export * as extraction from "./extraction/index.js"; + +export { Ocr, OcrResponse } from "./ocr/index.js"; +export * as ocr from "./ocr/index.js"; + +export { Split, SplitResponse } from "./split/index.js"; +export * as split from "./split/index.js"; diff --git a/src/v2/product/ocr/index.ts b/src/v2/product/ocr/index.ts new file mode 100644 index 000000000..781483dbd --- /dev/null +++ b/src/v2/product/ocr/index.ts @@ -0,0 +1,7 @@ +export { Ocr } from "./ocr.js"; +export { OcrParameters } from "./params/index.js"; +export { OcrResponse } from "./ocrResponse.js"; +export { OcrInference } from "./ocrInference.js"; +export { OcrResult } from "./ocrResult.js"; +export { OcrPage } from "./ocrPage.js"; +export { OcrWord } from "./ocrWord.js"; diff --git a/src/v2/product/ocr/ocr.ts b/src/v2/product/ocr/ocr.ts new file mode 100644 index 000000000..adce9d1c9 --- /dev/null +++ b/src/v2/product/ocr/ocr.ts @@ -0,0 +1,15 @@ +import { OcrResponse } from "./ocrResponse.js"; +import { OcrParameters } from "./params/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + +export class Ocr extends BaseProduct { + static get parametersClass() { + return OcrParameters; + } + static get responseClass() { + return OcrResponse; + } + static get slug() { + return "ocr"; + } +} diff --git a/src/v2/product/ocr/ocrInference.ts b/src/v2/product/ocr/ocrInference.ts new file mode 100644 index 000000000..284461f8b --- /dev/null +++ b/src/v2/product/ocr/ocrInference.ts @@ -0,0 +1,25 @@ +import { StringDict } from "@/parsing/index.js"; +import { BaseInference } from "@/v2/parsing/inference/baseInference.js"; +import { OcrResult } from "@/v2/product/ocr/ocrResult.js"; + +export class OcrInference extends BaseInference { + /** + * Result of an OCR inference. + */ + result: OcrResult; + + constructor(serverResponse: StringDict) { + super(serverResponse); + this.result = new OcrResult(serverResponse["result"]); + } + + toString(): string { + return ( + "Inference\n" + + "#########\n" + + this.model.toString() + "\n" + + this.file.toString() + "\n" + + this.result.toString() + "\n" + ); + } +} diff --git a/src/v2/product/ocr/ocrPage.ts b/src/v2/product/ocr/ocrPage.ts new file mode 100644 index 000000000..b531025b1 --- /dev/null +++ b/src/v2/product/ocr/ocrPage.ts @@ -0,0 +1,29 @@ +import { OcrWord } from "./ocrWord.js"; +import { StringDict } from "@/parsing/index.js"; + +export class OcrPage { + /** + * List of words extracted from the document page. + */ + words: OcrWord[]; + + /** + * Full text content extracted from the document page. + */ + content: string; + + constructor(serverResponse: StringDict) { + this.words = (serverResponse["words"] as any[]).map(word => new OcrWord(word)); + this.content = serverResponse["content"] as string; + } + + toString(): string { + let ocrWords = "\n"; + if (this.words.length > 0) { + ocrWords += this.words.map(word => word.toString()).join("\n\n"); + } + let outStr = `OCR Words\n---------${ocrWords}`; + outStr += `\n\n:Content: ${this.content}`; + return outStr; + } +} diff --git a/src/v2/product/ocr/ocrResponse.ts b/src/v2/product/ocr/ocrResponse.ts new file mode 100644 index 000000000..2e6ddb3dd --- /dev/null +++ b/src/v2/product/ocr/ocrResponse.ts @@ -0,0 +1,18 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { OcrInference } from "./ocrInference.js"; +import { BaseResponse } from "@/v2/parsing/index.js"; + +export class OcrResponse extends BaseResponse { + /** + * Response for an OCR utility inference. + */ + inference: OcrInference; + + /** + * @param serverResponse JSON response from the server. + */ + constructor(serverResponse: StringDict) { + super(serverResponse); + this.inference = new OcrInference(serverResponse["inference"]); + } +} diff --git a/src/v2/product/ocr/ocrResult.ts b/src/v2/product/ocr/ocrResult.ts new file mode 100644 index 000000000..4473a5f80 --- /dev/null +++ b/src/v2/product/ocr/ocrResult.ts @@ -0,0 +1,24 @@ +import { StringDict } from "@/parsing/index.js"; +import { OcrPage } from "./ocrPage.js"; + +/** + * OCR result info. + */ +export class OcrResult { + /** + * List of OCR results for each page in the document. + */ + pages: OcrPage[]; + + constructor(serverResponse: StringDict) { + this.pages = serverResponse.pages.map((ocr: any) => new OcrPage(ocr)); + } + + toString(): string { + let pages = "\n"; + if (this.pages.length > 0) { + pages += this.pages.map(ocr => ocr.toString()).join("\n\n"); + } + return `Pages\n======${pages}`; + } +} diff --git a/src/v2/product/ocr/ocrWord.ts b/src/v2/product/ocr/ocrWord.ts new file mode 100644 index 000000000..faa264fa7 --- /dev/null +++ b/src/v2/product/ocr/ocrWord.ts @@ -0,0 +1,23 @@ +import { Polygon } from "@/geometry/index.js"; +import { StringDict } from "@/parsing/index.js"; + +export class OcrWord { + /** + * Text content of the word. + */ + content: string; + + /** + * Position information as a list of points in clockwise order. + */ + polygon: Polygon; + + constructor(serverResponse: StringDict) { + this.content = serverResponse["content"]; + this.polygon = new Polygon(...serverResponse["polygon"]); + } + + toString(): string { + return this.content; + } +} diff --git a/src/v2/product/ocr/params/index.ts b/src/v2/product/ocr/params/index.ts new file mode 100644 index 000000000..705ef889e --- /dev/null +++ b/src/v2/product/ocr/params/index.ts @@ -0,0 +1 @@ +export { OcrParameters } from "./ocrParameters.js"; diff --git a/src/v2/product/ocr/params/ocrParameters.ts b/src/v2/product/ocr/params/ocrParameters.ts new file mode 100644 index 000000000..94bbfbc0d --- /dev/null +++ b/src/v2/product/ocr/params/ocrParameters.ts @@ -0,0 +1,26 @@ +import { BaseParameters, BaseParametersConstructor } from "@/v2/client/baseParameters.js"; +import { logger } from "@/logger.js"; + +/** + * Parameters accepted by the asynchronous **inference** v2 endpoint. + * + * All fields are optional except `modelId`. + * + * @category ClientV2 + * @example + * const params = { + * modelId: "YOUR_MODEL_ID", + * alias: "YOUR_ALIAS", + * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], + * pollingOptions: { + * initialDelaySec: 2, + * delaySec: 1.5, + * } + * }; + */ +export class OcrParameters extends BaseParameters { + constructor(params: BaseParametersConstructor & {}) { + super({ ...params }); + logger.debug("OCR parameters initialized."); + } +} diff --git a/src/v2/product/split/index.ts b/src/v2/product/split/index.ts new file mode 100644 index 000000000..d92687bc8 --- /dev/null +++ b/src/v2/product/split/index.ts @@ -0,0 +1,6 @@ +export { Split } from "./split.js"; +export { SplitParameters } from "./params/index.js"; +export { SplitResponse } from "./splitResponse.js"; +export { SplitInference } from "./splitInference.js"; +export { SplitRange } from "./splitRange.js"; +export { SplitResult } from "./splitResult.js"; diff --git a/src/v2/product/split/params/index.ts b/src/v2/product/split/params/index.ts new file mode 100644 index 000000000..964b656ad --- /dev/null +++ b/src/v2/product/split/params/index.ts @@ -0,0 +1 @@ +export { SplitParameters } from "./splitParameters.js"; diff --git a/src/v2/product/split/params/splitParameters.ts b/src/v2/product/split/params/splitParameters.ts new file mode 100644 index 000000000..71cc146e2 --- /dev/null +++ b/src/v2/product/split/params/splitParameters.ts @@ -0,0 +1,29 @@ +import { + BaseParameters, + BaseParametersConstructor, +} from "@/v2/client/baseParameters.js"; +import { logger } from "@/logger.js"; + +/** + * Parameters accepted by the asynchronous **inference** v2 endpoint. + * + * All fields are optional except `modelId`. + * + * @category ClientV2 + * @example + * const params = { + * modelId: "YOUR_MODEL_ID", + * alias: "YOUR_ALIAS", + * webhookIds: ["YOUR_WEBHOOK_ID_1", "YOUR_WEBHOOK_ID_2"], + * pollingOptions: { + * initialDelaySec: 2, + * delaySec: 1.5, + * } + * }; + */ +export class SplitParameters extends BaseParameters { + constructor(params: BaseParametersConstructor & {}) { + super({ ...params }); + logger.debug("Split parameters initialized."); + } +} diff --git a/src/v2/product/split/split.ts b/src/v2/product/split/split.ts new file mode 100644 index 000000000..8933c863e --- /dev/null +++ b/src/v2/product/split/split.ts @@ -0,0 +1,15 @@ +import { SplitResponse } from "./splitResponse.js"; +import { SplitParameters } from "./params/index.js"; +import { BaseProduct } from "@/v2/product/baseProduct.js"; + +export class Split extends BaseProduct { + static get parametersClass() { + return SplitParameters; + } + static get responseClass() { + return SplitResponse; + } + static get slug() { + return "split"; + } +} diff --git a/src/v2/product/split/splitInference.ts b/src/v2/product/split/splitInference.ts new file mode 100644 index 000000000..eefcbb29f --- /dev/null +++ b/src/v2/product/split/splitInference.ts @@ -0,0 +1,22 @@ +import { StringDict } from "@/parsing/index.js"; +import { BaseInference } from "@/v2/parsing/inference/baseInference.js"; +import { SplitResult } from "./splitResult.js"; + +export class SplitInference extends BaseInference { + /** + * Result of a split inference. + */ + result: SplitResult; + + constructor(serverResponse: StringDict) { + super(serverResponse); + this.result = new SplitResult(serverResponse["result"]); + } + + toString(): string { + return ( + super.toString() + + this.result.toString() + "\n" + ); + } +} diff --git a/src/v2/product/split/splitRange.ts b/src/v2/product/split/splitRange.ts new file mode 100644 index 000000000..0974b4352 --- /dev/null +++ b/src/v2/product/split/splitRange.ts @@ -0,0 +1,27 @@ +import { StringDict } from "@/parsing/index.js"; + +/** + * Split inference result. + */ +export class SplitRange { + /** + * 0-based page indexes, where the first integer indicates the start page and the + * second integer indicates the end page. + */ + pageRange: number[]; + + /** + * The document type, as identified on given classification values. + */ + documentType: string; + + constructor(serverResponse: StringDict) { + this.pageRange = serverResponse["page_range"]; + this.documentType = serverResponse["document_type"]; + } + + toString(): string { + const pageRange = this.pageRange.join(","); + return `* :Page Range: ${pageRange}\n :Document Type: ${this.documentType}`; + } +} diff --git a/src/v2/product/split/splitResponse.ts b/src/v2/product/split/splitResponse.ts new file mode 100644 index 000000000..891cb30de --- /dev/null +++ b/src/v2/product/split/splitResponse.ts @@ -0,0 +1,18 @@ +import { StringDict } from "@/parsing/stringDict.js"; +import { SplitInference } from "./splitInference.js"; +import { BaseResponse } from "@/v2/parsing/index.js"; + +export class SplitResponse extends BaseResponse { + /** + * Response for an OCR utility inference. + */ + inference: SplitInference; + + /** + * @param serverResponse JSON response from the server. + */ + constructor(serverResponse: StringDict) { + super(serverResponse); + this.inference = new SplitInference(serverResponse["inference"]); + } +} diff --git a/src/v2/product/split/splitResult.ts b/src/v2/product/split/splitResult.ts new file mode 100644 index 000000000..af3b4ec76 --- /dev/null +++ b/src/v2/product/split/splitResult.ts @@ -0,0 +1,24 @@ +import { SplitRange } from "./splitRange.js"; +import { StringDict } from "@/parsing/index.js"; + +/** + * Split result info. + */ +export class SplitResult { + /** + * List of split ranges. + */ + splits: SplitRange[]; + + constructor(rawResponse: StringDict) { + this.splits = rawResponse.splits.map((split: StringDict) => new SplitRange(split)); + } + + toString(): string { + let splits = "\n"; + if (this.splits.length > 0) { + splits += this.splits.map(split => split.toString()).join("\n\n"); + } + return `Splits\n======${splits}`; + } +} diff --git a/tests/data b/tests/data index 359d5e88e..37f2e3de4 160000 --- a/tests/data +++ b/tests/data @@ -1 +1 @@ -Subproject commit 359d5e88e33f95f44ac9e5d7324ff6825dfec667 +Subproject commit 37f2e3de48918e3b1a0e4604a9292aaeae05c637 diff --git a/tests/dependency/missingDependencies.integration.ts b/tests/dependency/missingDependencies.integration.ts new file mode 100644 index 000000000..9494d461c --- /dev/null +++ b/tests/dependency/missingDependencies.integration.ts @@ -0,0 +1,40 @@ +import * as mindee from "@/index.js"; +import { InvoiceSplitterV1 } from "@/v1/product/index.js"; +import { expect } from "chai"; +import path from "path"; +import { V1_PRODUCT_PATH } from "../index.js"; + +describe("Light Environment Sanity Check #omitOptionalDeps", function () { + let client: mindee.v1.Client; + + beforeEach(() => { + client = new mindee.v1.Client(); + }); + + it("should NOT be able to split invoices", async function () { + try { + const sample = new mindee.PathInput({ + inputPath: path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") + }); + + const response = await client.enqueueAndParse( + mindee.v1.product.InvoiceSplitterV1, sample + ); + const invoiceSplitterInference = response.document?.inference; + expect(invoiceSplitterInference).to.be.an.instanceof(InvoiceSplitterV1); + await mindee.v1.extraction.extractInvoices( + sample, + invoiceSplitterInference as InvoiceSplitterV1 + ); + } catch (error: any) { + const isModuleNotFound = error.code === "ERR_MODULE_NOT_FOUND"; + const isBinaryMissing = error.message && error.message.includes("Could not load the \"@cantoo/pdf-lib\" module"); + const isOptionalDependencyMissing = error.message && + error.message.includes("requires the optional dependency '@cantoo/pdf-lib'"); + + if (!isModuleNotFound && !isBinaryMissing && !isOptionalDependencyMissing) { + throw error; + } + } + }).timeout(60000); +}); diff --git a/tests/dependency/missingDependencies.spec.ts b/tests/dependency/missingDependencies.spec.ts new file mode 100644 index 000000000..cfc135a20 --- /dev/null +++ b/tests/dependency/missingDependencies.spec.ts @@ -0,0 +1,39 @@ +import { expect } from "chai"; + +describe("Light Environment Sanity Check #omitOptionalDeps", function () { + + it("should NOT have sharp installed", async function () { + try { + const moduleName = "sharp"; + await import(moduleName); + expect.fail("sharp should not be installed in this environment, but it was found!"); + } catch (error: any) { + const isModuleNotFound = error.code === "ERR_MODULE_NOT_FOUND"; + const isSharpBinaryMissing = error.message && error.message.includes("Could not load the \"sharp\" module"); + + if (!isModuleNotFound && !isSharpBinaryMissing) { + throw error; + } + } + }); + + it("should NOT have pdf.js-extract installed", async function () { + try { + const moduleName = "pdf.js-extract"; + await import(moduleName); + expect.fail("pdf.js-extract should not be installed, but it was found!"); + } catch (error: any) { + expect(error.code).to.equal("ERR_MODULE_NOT_FOUND"); + } + }); + + it("should NOT have @cantoo/pdf-lib installed", async function () { + try { + const moduleName = "@cantoo/pdf-lib"; + await import(moduleName); + expect.fail("@cantoo/pdf-lib should not be installed, but it was found!"); + } catch (error: any) { + expect(error.code).to.equal("ERR_MODULE_NOT_FOUND"); + } + }); +}); diff --git a/tests/geometry.spec.ts b/tests/geometry.spec.ts index 1a25eff80..2d85e5fea 100644 --- a/tests/geometry.spec.ts +++ b/tests/geometry.spec.ts @@ -1,4 +1,4 @@ -import * as geometry from "../src/geometry"; +import * as geometry from "@/geometry/index.js"; import { expect } from "chai"; describe("Geometry functions", () => { diff --git a/tests/imageOperations/multiReceiptsExtractor.spec.ts b/tests/imageOperations/multiReceiptsExtractor.spec.ts deleted file mode 100644 index ea8c65b65..000000000 --- a/tests/imageOperations/multiReceiptsExtractor.spec.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { expect } from "chai"; -import { promises as fs } from "fs"; -import path from "path"; -import { MultiReceiptsDetectorV1 } from "../../src/product"; -import { extractReceipts } from "../../src/imageOperations"; -import { PathInput } from "../../src"; -import { V1_PRODUCT_PATH } from "../index"; - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/response_v1/complete.json"), - fileSample: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/default_sample.jpg"), - completeMultiPage: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/response_v1/multipage_sample.json"), - multiPageSample: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/multipage_sample.pdf"), -}; - -describe("A single-page multi-receipts document", () => { - it("should be split properly.", async () => { - const jsonDataNA = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonDataNA.toString()); - const doc = new MultiReceiptsDetectorV1(response.document.inference); - const inputSample = new PathInput({ inputPath: dataPath.fileSample }); - await inputSample.init(); - const extractedReceipts = await extractReceipts(inputSample, doc); - expect(extractedReceipts.length).to.be.equals(6); - for (let i = 0; i < extractedReceipts.length; i++) { - expect(extractedReceipts[i].buffer).to.be.not.null; - expect(extractedReceipts[i].pageId).to.be.equals(0); - expect(extractedReceipts[i].receiptId).to.be.equals(i); - } - }); -}); -describe("A multi-page multi-receipts document", () => { - it("should be split properly.", async () => { - const jsonDataNA = await fs.readFile(path.resolve(dataPath.completeMultiPage)); - const response = JSON.parse(jsonDataNA.toString()); - const doc = new MultiReceiptsDetectorV1(response.document.inference); - const inputSample = new PathInput({ inputPath: dataPath.multiPageSample }); - await inputSample.init(); - const extractedReceipts = await extractReceipts(inputSample, doc); - expect(extractedReceipts.length).to.be.equals(5); - - expect(extractedReceipts[0].buffer).to.be.not.null; - expect(extractedReceipts[0].pageId).to.be.equals(0); - expect(extractedReceipts[0].receiptId).to.be.equals(0); - - expect(extractedReceipts[1].buffer).to.be.not.null; - expect(extractedReceipts[1].pageId).to.be.equals(0); - expect(extractedReceipts[1].receiptId).to.be.equals(1); - - expect(extractedReceipts[2].buffer).to.be.not.null; - expect(extractedReceipts[2].pageId).to.be.equals(0); - expect(extractedReceipts[2].receiptId).to.be.equals(2); - - expect(extractedReceipts[3].buffer).to.be.not.null; - expect(extractedReceipts[3].pageId).to.be.equals(1); - expect(extractedReceipts[3].receiptId).to.be.equals(0); - - expect(extractedReceipts[4].buffer).to.be.not.null; - expect(extractedReceipts[4].pageId).to.be.equals(1); - expect(extractedReceipts[4].receiptId).to.be.equals(1); - }); -}); diff --git a/tests/index.ts b/tests/index.ts index 20f6f125c..2d161c7d7 100644 --- a/tests/index.ts +++ b/tests/index.ts @@ -1,6 +1,9 @@ -import path from "node:path"; +import { fileURLToPath } from "url"; +import { dirname } from "path"; +import path from "path"; -export const RESOURCE_PATH = path.join(__dirname, "data"); +const currentDirName = dirname(fileURLToPath(import.meta.url)); +export const RESOURCE_PATH = path.join(currentDirName, "data"); export const V1_RESOURCE_PATH = path.join(RESOURCE_PATH, "v1"); export const V1_PRODUCT_PATH = path.join(V1_RESOURCE_PATH, "products"); diff --git a/tests/input/compression.spec.ts b/tests/input/compression.spec.ts new file mode 100644 index 000000000..a08f134c0 --- /dev/null +++ b/tests/input/compression.spec.ts @@ -0,0 +1,223 @@ +import { + PathInput, +} from "@/input/index.js"; +import * as fs from "fs"; +import * as path from "path"; +import { expect } from "chai"; +import { compressImage } from "@/image/index.js"; +import { compressPdf } from "@/pdf/index.js"; +import { extractTextFromPdf } from "@/pdf/pdfUtils.js"; +import { logger } from "@/logger.js"; +import { RESOURCE_PATH, V1_PRODUCT_PATH } from "../index.js"; + +describe("Input Sources - compression and resize #includeOptionalDeps", () => { + const outputPath = path.join(RESOURCE_PATH, "output"); + + before(async () => { + await fs.promises.mkdir(outputPath, { recursive: true }); + }); + + it("Image Quality Compress From Input Source", async () => { + const receiptInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); + await receiptInput.compress(40); + await fs.promises.writeFile(path.join(outputPath, "compress_indirect.jpg"), receiptInput.fileObject); + + const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); + const renderedFileStats = await fs.promises.stat(path.join(outputPath, "compress_indirect.jpg")); + expect(renderedFileStats.size).to.be.lessThan(initialFileStats.size); + }); + + it("Image Quality Compresses From Compressor", async () => { + const receiptInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); + await receiptInput.init(); + const compresses = [ + await compressImage(receiptInput.fileObject, 100), + await compressImage(receiptInput.fileObject), + await compressImage(receiptInput.fileObject, 50), + await compressImage(receiptInput.fileObject, 10), + await compressImage(receiptInput.fileObject, 1) + ]; + + const fileNames = ["compress100.jpg", "compress75.jpg", "compress50.jpg", "compress10.jpg", "compress1.jpg"]; + for (let i = 0; i < compresses.length; i++) { + await fs.promises.writeFile(path.join(outputPath, fileNames[i]), compresses[i]); + } + + const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); + const renderedFileStats = await Promise.all( + fileNames.map(fileName => fs.promises.stat(path.join(outputPath, fileName))) + ); + + expect(initialFileStats.size).to.be.lessThan(renderedFileStats[0].size); + expect(initialFileStats.size).to.be.lessThan(renderedFileStats[1].size); + expect(renderedFileStats[1].size).to.be.greaterThan(renderedFileStats[2].size); + expect(renderedFileStats[2].size).to.be.greaterThan(renderedFileStats[3].size); + expect(renderedFileStats[3].size).to.be.greaterThan(renderedFileStats[4].size); + }); + + it("Image Resize From InputSource", async () => { + const imageResizeInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); + await imageResizeInput.init(); + + await imageResizeInput.compress(75, 250, 1000); + await fs.promises.writeFile(path.join(outputPath, "resize_indirect.jpg"), imageResizeInput.fileObject); + + const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); + const renderedFileStats = await fs.promises.stat(path.join(outputPath, "resize_indirect.jpg")); + expect(renderedFileStats.size).to.be.lessThan(initialFileStats.size); + const sharp = await import("sharp"); + const metadata = await sharp.default(imageResizeInput.fileObject).metadata(); + expect(metadata.width).to.equal(250); + expect(metadata.height).to.equal(333); + }); + + it("Image Resize From Compressor", async () => { + const imageResizeInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); + await imageResizeInput.init(); + + const resizes = [ + await compressImage(imageResizeInput.fileObject, 75, 500), + await compressImage(imageResizeInput.fileObject, 75, 250, 500), + await compressImage(imageResizeInput.fileObject, 75, 500, 250), + await compressImage(imageResizeInput.fileObject, 75, null, 250) + ]; + + const fileNames = ["resize500xnull.jpg", "resize250x500.jpg", "resize500x250.jpg", "resizenullx250.jpg"]; + for (let i = 0; i < resizes.length; i++) { + await fs.promises.writeFile(path.join(outputPath, fileNames[i]), resizes[i]); + } + + const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); + const renderedFileStats = await Promise.all( + fileNames.map(fileName => fs.promises.stat(path.join(outputPath, fileName))) + ); + + expect(initialFileStats.size).to.be.greaterThan(renderedFileStats[0].size); + expect(renderedFileStats[0].size).to.be.greaterThan(renderedFileStats[1].size); + expect(renderedFileStats[1].size).to.be.greaterThan(renderedFileStats[2].size); + expect(renderedFileStats[2].size).to.be.equals(renderedFileStats[3].size); + }); + + + it("PDF Input Has Text", async () => { + const hasSourceTextPath = path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf"); + const hasNoSourceTextPath = path.join(RESOURCE_PATH, "file_types/pdf/blank_1.pdf"); + const hasNoSourceTextSinceItsImagePath = path.join(RESOURCE_PATH, "file_types/receipt.jpg"); + + const hasSourceTextInput = new PathInput({ inputPath: hasSourceTextPath }); + const hasNoSourceTextInput = new PathInput({ inputPath: hasNoSourceTextPath }); + const hasNoSourceTextSinceItsImageInput = new PathInput({ inputPath: hasNoSourceTextSinceItsImagePath }); + + expect(await hasSourceTextInput.hasSourceText()).to.be.true; + expect(await hasNoSourceTextInput.hasSourceText()).to.be.false; + expect(await hasNoSourceTextSinceItsImageInput.hasSourceText()).to.be.false; + }); + + it("PDF Compress From InputSource", async () => { + const pdfResizeInput = new PathInput( + { inputPath: path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") } + ); + await pdfResizeInput.init(); + + const compressedPdf = await compressPdf( + pdfResizeInput.fileObject, 75, true + ); + await fs.promises.writeFile(path.join(outputPath, "resize_indirect.pdf"), compressedPdf); + + const initialFileStats = await fs.promises.stat( + path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") + ); + const renderedFileStats = await fs.promises.stat( + path.join(outputPath, "resize_indirect.pdf") + ); + expect(renderedFileStats.size).to.be.lessThan(initialFileStats.size); + }).timeout(10000); + + it("PDF Compress From Compressor", async () => { + const pdfResizeInput = new PathInput( + { inputPath: path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") } + ); + await pdfResizeInput.init(); + + const resizes = [ + await compressPdf(pdfResizeInput.fileObject, 85), + await compressPdf(pdfResizeInput.fileObject, 75), + await compressPdf(pdfResizeInput.fileObject, 50), + await compressPdf(pdfResizeInput.fileObject, 10) + ]; + + const fileNames = ["compress85.pdf", "compress75.pdf", "compress50.pdf", "compress10.pdf"]; + for (let i = 0; i < resizes.length; i++) { + await fs.promises.writeFile(path.join(outputPath, fileNames[i]), resizes[i]); + } + + const initialFileStats = await fs.promises.stat( + path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") + ); + const renderedFileStats = await Promise.all( + fileNames.map(fileName => fs.promises.stat(path.join(outputPath, fileName))) + ); + + expect(initialFileStats.size).to.be.greaterThan(renderedFileStats[0].size); + expect(renderedFileStats[0].size).to.be.greaterThan(renderedFileStats[1].size); + expect(renderedFileStats[1].size).to.be.greaterThan(renderedFileStats[2].size); + expect(renderedFileStats[2].size).to.be.greaterThan(renderedFileStats[3].size); + }).timeout(20000); + + it("PDF Compress With Text Keeps Text", async () => { + const initialWithText = new PathInput( + { inputPath: path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf") } + ); + await initialWithText.init(); + + const compressedWithText = await compressPdf( + initialWithText.fileObject, 100, true, false + ); + const originalText = (await extractTextFromPdf(initialWithText.fileObject)).getConcatenatedText(); + const compressedText = (await extractTextFromPdf(compressedWithText)).getConcatenatedText(); + + expect(compressedText).to.equal(originalText); + }).timeout(60000); + + it("PDF Compress With Text Does Not Compress", async () => { + const initialWithText = new PathInput( + { inputPath: path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf") } + ); + await initialWithText.init(); + + const compressedWithText = await compressPdf(initialWithText.fileObject, 50); + + expect(compressedWithText).to.deep.equal(initialWithText.fileObject); + }).timeout(10000); + + after(async function () { + const createdFiles: string[] = [ + "compress10.pdf", + "compress50.pdf", + "compress75.pdf", + "compress85.pdf", + "resize_indirect.pdf", + "compress1.jpg", + "compress10.jpg", + "compress50.jpg", + "compress75.jpg", + "compress100.jpg", + "compress_indirect.jpg", + "resize250x500.jpg", + "resize500x250.jpg", + "resize500xnull.jpg", + "resize_indirect.jpg", + "resizenullx250.jpg", + ]; + + for (const filePath of createdFiles) { + try { + await fs.promises.unlink(path.join(RESOURCE_PATH, "output", filePath)); + } catch (error) { + if ((error as NodeJS.ErrnoException).code !== "ENOENT") { + logger.warn(`Could not delete file '${filePath}': ${(error as Error).message}`); + } + } + } + }); +}); diff --git a/tests/input/pageOperations.spec.ts b/tests/input/pageOperations.spec.ts index d8839a071..790843660 100644 --- a/tests/input/pageOperations.spec.ts +++ b/tests/input/pageOperations.spec.ts @@ -2,13 +2,13 @@ import { PathInput, PageOptionsOperation, INPUT_TYPE_PATH, -} from "../../src/input"; +} from "@/input/index.js"; import * as fs from "fs"; import * as path from "path"; import { expect } from "chai"; -import { RESOURCE_PATH } from "../index"; +import { RESOURCE_PATH } from "../index.js"; -describe("High level multi-page operations", () => { +describe("Input Sources - high level multi-page operations #includeOptionalDeps", () => { it("should cut a PDF", async () => { const input = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf"), diff --git a/tests/input/sources.spec.ts b/tests/input/sources.spec.ts index 39458629a..672e8d3af 100644 --- a/tests/input/sources.spec.ts +++ b/tests/input/sources.spec.ts @@ -1,3 +1,4 @@ +import { Readable } from "stream"; import { Base64Input, BufferInput, @@ -9,25 +10,16 @@ import { INPUT_TYPE_STREAM, PathInput, StreamInput, -} from "../../src/input"; +} from "@/input/index.js"; import * as fs from "fs"; import * as path from "path"; import { expect } from "chai"; -import sharp from "sharp"; import { Buffer } from "node:buffer"; -import { compressImage } from "../../src/imageOperations"; -import { compressPdf } from "../../src/pdf"; -import { extractTextFromPdf } from "../../src/pdf/pdfUtils"; -import { logger } from "../../src/logger"; -import { RESOURCE_PATH, V1_PRODUCT_PATH } from "../index"; -import { Readable } from "stream"; +import { MindeeInputSourceError } from "@/errors/index.js"; +import { RESOURCE_PATH, V1_PRODUCT_PATH } from "../index.js"; -describe("Test different types of input", () => { - const outputPath = path.join(RESOURCE_PATH, "output"); +describe("Input Sources: - load different types of input", () => { - before(async () => { - await fs.promises.mkdir(outputPath, { recursive: true }); - }); it("should accept base64 inputs", async () => { const b64Input = await fs.promises.readFile( path.join(RESOURCE_PATH, "file_types/receipt.txt") @@ -155,7 +147,8 @@ describe("Test different types of input", () => { await streamInput.init(); expect.fail("Should have thrown an error"); } catch (e: any) { - expect(e.toString()).to.eq("Error: Error converting stream - Error: aborted"); + expect(e).to.be.instanceOf(MindeeInputSourceError); + expect(e.toString()).to.eq("MindeeInputSourceError: Error converting stream - Error: aborted"); } }); @@ -174,7 +167,8 @@ describe("Test different types of input", () => { await streamInput.init(); expect.fail("Should have thrown an error"); } catch (e: any) { - expect(e.toString()).to.equal("MindeeError: Stream is already closed"); + expect(e).to.be.instanceOf(MindeeInputSourceError); + expect(e.toString()).to.equal("MindeeInputSourceError: Stream is already closed"); } }); @@ -200,7 +194,8 @@ describe("Test different types of input", () => { try { await streamInput.init(); } catch (e: any) { - expect(e.toString()).to.eq("Error: Error converting stream - Error: aborted"); + expect(e).to.be.instanceOf(MindeeInputSourceError); + expect(e.toString()).to.eq("MindeeInputSourceError: Error converting stream - Error: aborted"); } }); @@ -239,211 +234,10 @@ describe("Test different types of input", () => { expect(inputSource.inputType).to.equals(INPUT_TYPE_BUFFER); expect(inputSource.filename).to.equals(filename); expect(inputSource.isPdf()).to.be.true; - expect(await inputSource.getPageCount()).to.equals(10); + it("#includeOptionalDeps", async () => { + expect(await inputSource.getPageCount()).to.equals(10); + }); expect(inputSource.fileObject).to.be.instanceOf(Buffer); }); - - it("Image Quality Compress From Input Source", async () => { - const receiptInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); - await receiptInput.compress(40); - await fs.promises.writeFile(path.join(outputPath, "compress_indirect.jpg"), receiptInput.fileObject); - - const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); - const renderedFileStats = await fs.promises.stat(path.join(outputPath, "compress_indirect.jpg")); - expect(renderedFileStats.size).to.be.lessThan(initialFileStats.size); - }); - - it("Image Quality Compresses From Compressor", async () => { - const receiptInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); - await receiptInput.init(); - const compresses = [ - await compressImage(receiptInput.fileObject, 100), - await compressImage(receiptInput.fileObject), - await compressImage(receiptInput.fileObject, 50), - await compressImage(receiptInput.fileObject, 10), - await compressImage(receiptInput.fileObject, 1) - ]; - - const fileNames = ["compress100.jpg", "compress75.jpg", "compress50.jpg", "compress10.jpg", "compress1.jpg"]; - for (let i = 0; i < compresses.length; i++) { - await fs.promises.writeFile(path.join(outputPath, fileNames[i]), compresses[i]); - } - - const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); - const renderedFileStats = await Promise.all( - fileNames.map(fileName => fs.promises.stat(path.join(outputPath, fileName))) - ); - - expect(initialFileStats.size).to.be.lessThan(renderedFileStats[0].size); - expect(initialFileStats.size).to.be.lessThan(renderedFileStats[1].size); - expect(renderedFileStats[1].size).to.be.greaterThan(renderedFileStats[2].size); - expect(renderedFileStats[2].size).to.be.greaterThan(renderedFileStats[3].size); - expect(renderedFileStats[3].size).to.be.greaterThan(renderedFileStats[4].size); - }); - - it("Image Resize From InputSource", async () => { - const imageResizeInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); - await imageResizeInput.init(); - - await imageResizeInput.compress(75, 250, 1000); - await fs.promises.writeFile(path.join(outputPath, "resize_indirect.jpg"), imageResizeInput.fileObject); - - const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); - const renderedFileStats = await fs.promises.stat(path.join(outputPath, "resize_indirect.jpg")); - expect(renderedFileStats.size).to.be.lessThan(initialFileStats.size); - const metadata = await sharp(imageResizeInput.fileObject).metadata(); - expect(metadata.width).to.equal(250); - expect(metadata.height).to.equal(333); - }); - - it("Image Resize From Compressor", async () => { - const imageResizeInput = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/receipt.jpg") }); - await imageResizeInput.init(); - - const resizes = [ - await compressImage(imageResizeInput.fileObject, 75, 500), - await compressImage(imageResizeInput.fileObject, 75, 250, 500), - await compressImage(imageResizeInput.fileObject, 75, 500, 250), - await compressImage(imageResizeInput.fileObject, 75, null, 250) - ]; - - const fileNames = ["resize500xnull.jpg", "resize250x500.jpg", "resize500x250.jpg", "resizenullx250.jpg"]; - for (let i = 0; i < resizes.length; i++) { - await fs.promises.writeFile(path.join(outputPath, fileNames[i]), resizes[i]); - } - - const initialFileStats = await fs.promises.stat(path.join(RESOURCE_PATH, "file_types/receipt.jpg")); - const renderedFileStats = await Promise.all( - fileNames.map(fileName => fs.promises.stat(path.join(outputPath, fileName))) - ); - - expect(initialFileStats.size).to.be.greaterThan(renderedFileStats[0].size); - expect(renderedFileStats[0].size).to.be.greaterThan(renderedFileStats[1].size); - expect(renderedFileStats[1].size).to.be.greaterThan(renderedFileStats[2].size); - expect(renderedFileStats[2].size).to.be.equals(renderedFileStats[3].size); - }); - - - it("PDF Input Has Text", async () => { - const hasSourceTextPath = path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf"); - const hasNoSourceTextPath = path.join(RESOURCE_PATH, "file_types/pdf/blank_1.pdf"); - const hasNoSourceTextSinceItsImagePath = path.join(RESOURCE_PATH, "file_types/receipt.jpg"); - - const hasSourceTextInput = new PathInput({ inputPath: hasSourceTextPath }); - const hasNoSourceTextInput = new PathInput({ inputPath: hasNoSourceTextPath }); - const hasNoSourceTextSinceItsImageInput = new PathInput({ inputPath: hasNoSourceTextSinceItsImagePath }); - - expect(await hasSourceTextInput.hasSourceText()).to.be.true; - expect(await hasNoSourceTextInput.hasSourceText()).to.be.false; - expect(await hasNoSourceTextSinceItsImageInput.hasSourceText()).to.be.false; - }); - - it("PDF Compress From InputSource", async () => { - const pdfResizeInput = new PathInput( - { inputPath: path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") } - ); - await pdfResizeInput.init(); - - const compressedPdf = await compressPdf( - pdfResizeInput.fileObject, 75, true - ); - await fs.promises.writeFile(path.join(outputPath, "resize_indirect.pdf"), compressedPdf); - - const initialFileStats = await fs.promises.stat( - path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") - ); - const renderedFileStats = await fs.promises.stat( - path.join(outputPath, "resize_indirect.pdf") - ); - expect(renderedFileStats.size).to.be.lessThan(initialFileStats.size); - }).timeout(10000); - - it("PDF Compress From Compressor", async () => { - const pdfResizeInput = new PathInput( - { inputPath: path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") } - ); - await pdfResizeInput.init(); - - const resizes = [ - await compressPdf(pdfResizeInput.fileObject, 85), - await compressPdf(pdfResizeInput.fileObject, 75), - await compressPdf(pdfResizeInput.fileObject, 50), - await compressPdf(pdfResizeInput.fileObject, 10) - ]; - - const fileNames = ["compress85.pdf", "compress75.pdf", "compress50.pdf", "compress10.pdf"]; - for (let i = 0; i < resizes.length; i++) { - await fs.promises.writeFile(path.join(outputPath, fileNames[i]), resizes[i]); - } - - const initialFileStats = await fs.promises.stat( - path.join(V1_PRODUCT_PATH, "invoice_splitter/default_sample.pdf") - ); - const renderedFileStats = await Promise.all( - fileNames.map(fileName => fs.promises.stat(path.join(outputPath, fileName))) - ); - - expect(initialFileStats.size).to.be.greaterThan(renderedFileStats[0].size); - expect(renderedFileStats[0].size).to.be.greaterThan(renderedFileStats[1].size); - expect(renderedFileStats[1].size).to.be.greaterThan(renderedFileStats[2].size); - expect(renderedFileStats[2].size).to.be.greaterThan(renderedFileStats[3].size); - }).timeout(20000); - - it("PDF Compress With Text Keeps Text", async () => { - const initialWithText = new PathInput( - { inputPath: path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf") } - ); - await initialWithText.init(); - - const compressedWithText = await compressPdf( - initialWithText.fileObject, 100, true, false - ); - const originalText = (await extractTextFromPdf(initialWithText.fileObject)).getConcatenatedText(); - const compressedText = (await extractTextFromPdf(compressedWithText)).getConcatenatedText(); - - expect(compressedText).to.equal(originalText); - }).timeout(60000); - - it("PDF Compress With Text Does Not Compress", async () => { - const initialWithText = new PathInput( - { inputPath: path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf") } - ); - await initialWithText.init(); - - const compressedWithText = await compressPdf(initialWithText.fileObject, 50); - - expect(compressedWithText).to.deep.equal(initialWithText.fileObject); - }).timeout(10000); - - after(async function () { - const createdFiles: string[] = [ - "compress10.pdf", - "compress50.pdf", - "compress75.pdf", - "compress85.pdf", - "resize_indirect.pdf", - "compress1.jpg", - "compress10.jpg", - "compress50.jpg", - "compress75.jpg", - "compress100.jpg", - "compress_indirect.jpg", - "resize250x500.jpg", - "resize500x250.jpg", - "resize500xnull.jpg", - "resize_indirect.jpg", - "resizenullx250.jpg", - ]; - - for (const filePath of createdFiles) { - try { - await fs.promises.unlink(path.join(RESOURCE_PATH, "output", filePath)); - } catch (error) { - if ((error as NodeJS.ErrnoException).code !== "ENOENT") { - logger.warn(`Could not delete file '${filePath}': ${(error as Error).message}`); - } - } - } - }); }); diff --git a/tests/input/urlInputSource.spec.ts b/tests/input/urlInputSource.spec.ts index d474ab8c1..ff777f76a 100644 --- a/tests/input/urlInputSource.spec.ts +++ b/tests/input/urlInputSource.spec.ts @@ -1,21 +1,26 @@ -import { BytesInput, UrlInput } from "../../src"; -import { LocalInputSource } from "../../src/input"; +import { BytesInput, UrlInput } from "@/index.js"; +import { LocalInputSource } from "@/input/index.js"; import { expect } from "chai"; -import nock from "nock"; +import { MockAgent, setGlobalDispatcher } from "undici"; -describe("Test URL input source", () => { +const mockAgent = new MockAgent(); +setGlobalDispatcher(mockAgent); +const mockPool = mockAgent.get("https://dummy-host"); + +describe("Input Sources - URL input source", () => { describe("initializing", () => { it("should accept a URL", async () => { const input = new UrlInput({ url: "https://upload.wikimedia.org/wikipedia/commons/thumb/0/0b/ReceiptSwiss.jpg/576px-ReceiptSwiss.jpg", + dispatcher: mockAgent, }); await input.init(); expect(input.fileObject).to.be.a("string"); }); it("should throw an error for non-HTTPS URL", async () => { - const url = "http://example.com/file.pdf"; - const urlSource = new UrlInput({ url }); + const url = "http://dummy-host/file.pdf"; + const urlSource = new UrlInput({ url, dispatcher: mockAgent }); try { await urlSource.init(); @@ -28,24 +33,16 @@ describe("Test URL input source", () => { describe("asLocalInputSource", () => { - beforeEach(() => { - nock.disableNetConnect(); - }); - - afterEach(() => { - nock.cleanAll(); - nock.enableNetConnect(); - }); it("should download file and return LocalInputSource", async () => { - const url = "https://example.com/file.pdf"; + const url = "https://dummy-host/file.pdf"; const fileContent = Buffer.from("dummy PDF content"); - nock("https://example.com") - .get("/file.pdf") + mockPool + .intercept({ path: "/file.pdf", method: "GET" }) .reply(200, fileContent); - const urlInput = new UrlInput({ url }); + const urlInput = new UrlInput({ url, dispatcher: mockAgent }); await urlInput.init(); const localInput = await urlInput.asLocalInputSource(); await localInput.init(); @@ -56,19 +53,25 @@ describe("Test URL input source", () => { }); it("should handle redirects", async () => { - const originalUrl = "https://example.com/original.pdf"; - const redirectUrl = "https://example.com/redirected.pdf"; + const originalUrl = "https://dummy-host/original.pdf"; + const redirectUrl = "https://dummy-host/redirected.pdf"; const fileContent = Buffer.from("redirected PDF content"); - nock("https://example.com") - .get("/original.pdf") - .reply(302, "", { location: redirectUrl }); // Not sure about that one. + mockPool + .intercept({ path: "/original.pdf", method: "GET" }) + .reply( + 302, + "", + { + headers: { location: redirectUrl } + } + ); - nock("https://example.com") - .get("/redirected.pdf") + mockPool + .intercept({ path: "/redirected.pdf", method: "GET" }) .reply(200, fileContent); - const urlInput = new UrlInput({ url: originalUrl }); + const urlInput = new UrlInput({ url: originalUrl, dispatcher: mockAgent }); const localInput = await urlInput.asLocalInputSource(); await localInput.init(); @@ -78,13 +81,13 @@ describe("Test URL input source", () => { }); it("should throw an error for HTTP error responses", async () => { - const url = "https://example.com/not-found.pdf"; + const url = "https://dummy-host/not-found.pdf"; - nock("https://example.com") - .get("/not-found.pdf") - .reply(404); + mockPool + .intercept({ path: "/not-found.pdf", method: "GET" }) + .reply(404, ""); - const urlInput = new UrlInput({ url }); + const urlInput = new UrlInput({ url, dispatcher: mockAgent }); try { await urlInput.asLocalInputSource(); @@ -96,14 +99,14 @@ describe("Test URL input source", () => { }); it("should use provided filename", async () => { - const url = "https://example.com/file.pdf"; + const url = "https://dummy-host/file.pdf"; const fileContent = Buffer.from("dummy PDF content"); - nock("https://example.com") - .get("/file.pdf") + mockPool + .intercept({ path: "/file.pdf", method: "GET" }) .reply(200, fileContent); - const urlInput = new UrlInput({ url }); + const urlInput = new UrlInput({ url, dispatcher: mockAgent }); const localInput = await urlInput.asLocalInputSource({ filename: "custom.pdf" }); await localInput.init(); @@ -111,12 +114,12 @@ describe("Test URL input source", () => { }); it("should throw an error for invalid filename", async () => { - nock("https://example.com") - .get("/file") + mockPool + .intercept({ path: "/file.pdf", method: "GET" }) .reply(200, "toto"); - const url = "https://example.com/file"; - const urlInput = new UrlInput({ url }); + const url = "https://dummy-host/file.pdf"; + const urlInput = new UrlInput({ url, dispatcher: mockAgent }); try { const localInput = await urlInput.asLocalInputSource({ filename: "invalid" }); diff --git a/tests/pdf/pdfOperation.spec.ts b/tests/pdf/pdfOperation.spec.ts index 16cb99669..4a00c7b6e 100644 --- a/tests/pdf/pdfOperation.spec.ts +++ b/tests/pdf/pdfOperation.spec.ts @@ -1,12 +1,12 @@ -import * as pdf from "../../src/pdf"; +import * as pdf from "@/pdf/index.js"; import * as path from "path"; import * as fs from "fs"; import { expect } from "chai"; -import { PageOptions, PageOptionsOperation } from "../../src"; -import { PathInput } from "../../src"; -import { RESOURCE_PATH } from "../index"; +import { PageOptions, PageOptionsOperation } from "@/index.js"; +import { PathInput } from "@/index.js"; +import { RESOURCE_PATH } from "../index.js"; -describe("Test pdf operation", () => { +describe("Test pdf operation #includeOptionalDeps", () => { it("should cut a PDF to get 2 pages", async () => { const inputSource = new PathInput({ inputPath: path.join(RESOURCE_PATH, "file_types/pdf/multipage.pdf"), diff --git a/tests/pdf/pdfTypes.spec.ts b/tests/pdf/pdfTypes.spec.ts index 64ddc6c90..4bd6238c2 100644 --- a/tests/pdf/pdfTypes.spec.ts +++ b/tests/pdf/pdfTypes.spec.ts @@ -1,17 +1,12 @@ -import * as mindee from "../../src"; import path from "path"; import { expect } from "chai"; -import * as pdf from "../../src/pdf"; -import { PageOptions } from "../../src/input"; -import {PageOptionsOperation, PathInput} from "../../src"; -import * as fs from "node:fs"; -import { RESOURCE_PATH } from "../index"; +import * as pdf from "@/pdf/index.js"; +import { PageOptions } from "@/input/index.js"; +import { PageOptionsOperation, PathInput } from "@/index.js"; +import { RESOURCE_PATH } from "../index.js"; + +describe("Test pdf lib #includeOptionalDeps", () => { -describe("Test pdf lib", () => { - let client: mindee.Client; - beforeEach(async () => { - client = new mindee.Client(); - }); it("should open a simple XFA form PDF.", async () => { const inputDoc = new PathInput( { inputPath: path.join(RESOURCE_PATH, "file_types/pdf/XfaForm.pdf") } diff --git a/tests/test_code_samples.sh b/tests/test_v1_code_samples.sh similarity index 62% rename from tests/test_code_samples.sh rename to tests/test_v1_code_samples.sh index a3529d7ae..944489706 100755 --- a/tests/test_code_samples.sh +++ b/tests/test_v1_code_samples.sh @@ -1,12 +1,9 @@ #!/bin/sh set -e -OUTPUT_FILE='../test_code_samples/_test.js' +OUTPUT_FILE='../test_code_samples/_test_v1.js' ACCOUNT=$1 ENDPOINT=$2 -API_KEY=$3 -API_KEY_V2=$4 -MODEL_ID=$5 rm -fr ../test_code_samples mkdir ../test_code_samples @@ -15,32 +12,18 @@ cd ../test_code_samples npm install ../mindee-api-nodejs/dist --ignore-scripts --no-bin-links cd - -for f in $(find docs/code_samples -maxdepth 1 -name "*.txt" -not -name "workflow_*.txt" | sort -h) +for f in $( + find docs/code_samples -maxdepth 1 -name "*.txt" -not -name "workflow_*.txt" -not -name "v2_*.txt" | sort -h +) do - if echo "${f}" | grep -q "default_v2.txt"; then - if [ -z "${API_KEY_V2}" ] || [ -z "${MODEL_ID}" ]; then - echo "Skipping ${f} (API_KEY_V2 or MODEL_ID not supplied)" - echo - continue - fi - fi - echo "###############################################" echo "${f}" echo "###############################################" echo - sed "s/my-api-key/$API_KEY/" "${f}" > $OUTPUT_FILE + sed "s/my-api-key/${MINDEE_API_KEY}/" "${f}" > $OUTPUT_FILE sed -i "s/\/path\/to\/the\/file.ext/..\/mindee-api-nodejs\/tests\/data\/file_types\/pdf\/blank_1.pdf/" $OUTPUT_FILE - if echo "${f}" | grep -q "default_v2.txt" - then - sed -i "s/MY_API_KEY/$API_KEY_V2/" $OUTPUT_FILE - sed -i "s/MY_MODEL_ID/$MODEL_ID/" $OUTPUT_FILE - else - sed -i "s/my-api-key/$API_KEY/" $OUTPUT_FILE - fi - if echo "$f" | grep -q "custom_v1.txt" then sed -i "s/my-account/$ACCOUNT/g" $OUTPUT_FILE diff --git a/tests/test_v2_cli.sh b/tests/test_v2_cli.sh new file mode 100755 index 000000000..6a07b1d8c --- /dev/null +++ b/tests/test_v2_cli.sh @@ -0,0 +1,41 @@ +#!/bin/sh +set -e + +TEST_FILE='./tests/data/file_types/pdf/blank_1.pdf' + +for f in $( + find docs/code_samples -maxdepth 1 -name "v2_*.txt" | sort -h +) +do + echo "###############################################" + echo "${f}" + echo "###############################################" + echo + + if echo "${f}" | grep -q "v2_classification.txt" + then + node ./dist/bin/mindeeV2.js -d classification -m "${MINDEE_V2_SE_TESTS_CLASSIFICATION_MODEL_ID}" "${TEST_FILE}" + fi + + if echo "${f}" | grep -q "v2_crop.txt" + then + node ./dist/bin/mindeeV2.js -d crop -m "${MINDEE_V2_SE_TESTS_CROP_MODEL_ID}" "${TEST_FILE}" + fi + + if echo "${f}" | grep -q "v2_extraction.txt" + then + node ./dist/bin/mindeeV2.js -d extraction -m "${MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID}" "${TEST_FILE}" + fi + + if echo "${f}" | grep -q "v2_ocr.txt" + then + node ./dist/bin/mindeeV2.js -d ocr -m "${MINDEE_V2_SE_TESTS_OCR_MODEL_ID}" "${TEST_FILE}" + fi + + if echo "${f}" | grep -q "v2_split.txt" + then + node ./dist/bin/mindeeV2.js -d split -m "${MINDEE_V2_SE_TESTS_SPLIT_MODEL_ID}" "${TEST_FILE}" + fi + + sleep 0.5 # avoid too many request errors +done diff --git a/tests/test_v2_code_samples.sh b/tests/test_v2_code_samples.sh new file mode 100755 index 000000000..4e74e2589 --- /dev/null +++ b/tests/test_v2_code_samples.sh @@ -0,0 +1,52 @@ +#!/bin/sh +set -e + +OUTPUT_FILE='../test_code_samples/_test_v2.mjs' + +rm -fr ../test_code_samples +mkdir ../test_code_samples + +cd ../test_code_samples +npm install ../mindee-api-nodejs/dist --ignore-scripts --no-bin-links +cd - + +for f in $( + find docs/code_samples -maxdepth 1 -name "v2_*.txt" | sort -h +) +do + echo "###############################################" + echo "${f}" + echo "###############################################" + echo + + sed "s/MY_API_KEY/${MINDEE_V2_API_KEY}/" "${f}" > $OUTPUT_FILE + sed -i "s/\/path\/to\/the\/file.ext/..\/mindee-api-nodejs\/tests\/data\/file_types\/pdf\/blank_1.pdf/" $OUTPUT_FILE + + if echo "${f}" | grep -q "v2_classification.txt" + then + sed -i "s/MY_MODEL_ID/${MINDEE_V2_SE_TESTS_CLASSIFICATION_MODEL_ID}/" $OUTPUT_FILE + fi + + if echo "${f}" | grep -q "v2_crop.txt" + then + sed -i "s/MY_MODEL_ID/${MINDEE_V2_SE_TESTS_CROP_MODEL_ID}/" $OUTPUT_FILE + fi + + if echo "${f}" | grep -q "v2_extraction.txt" + then + sed -i "s/MY_MODEL_ID/${MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID}/" $OUTPUT_FILE + fi + + if echo "${f}" | grep -q "v2_ocr.txt" + then + sed -i "s/MY_MODEL_ID/${MINDEE_V2_SE_TESTS_OCR_MODEL_ID}/" $OUTPUT_FILE + fi + + if echo "${f}" | grep -q "v2_split.txt" + then + sed -i "s/MY_MODEL_ID/${MINDEE_V2_SE_TESTS_SPLIT_MODEL_ID}/" $OUTPUT_FILE + fi + + sleep 0.5 # avoid too many request errors + node $OUTPUT_FILE +done diff --git a/tests/v1/api/asyncResponse.spec.ts b/tests/v1/api/asyncResponse.spec.ts index 9055791ed..4501d95a3 100644 --- a/tests/v1/api/asyncResponse.spec.ts +++ b/tests/v1/api/asyncResponse.spec.ts @@ -1,11 +1,11 @@ import { expect } from "chai"; -import { AsyncPredictResponse } from "../../../src"; +import { AsyncPredictResponse } from "@/v1/index.js"; import { promises as fs } from "fs"; import * as path from "path"; -import { StringDict } from "../../../src/parsing/common"; -import { InvoiceSplitterV1 } from "../../../src/product"; -import { cleanRequestData, isValidAsyncResponse } from "../../../src/http"; -import { RESOURCE_PATH } from "../../index"; +import { StringDict } from "@/v1/parsing/common/index.js"; +import { InvoiceSplitterV1 } from "@/v1/product/index.js"; +import { cleanRequestData, isValidAsyncResponse } from "@/http/index.js"; +import { RESOURCE_PATH } from "../../index.js"; describe("MindeeV1 - Asynchronous API predict response", () => { it("should parse a successful enqueue", async () => { diff --git a/tests/v1/api/endpoint.spec.ts b/tests/v1/api/endpoint.spec.ts index ab473cf05..460222cd2 100644 --- a/tests/v1/api/endpoint.spec.ts +++ b/tests/v1/api/endpoint.spec.ts @@ -1,91 +1,119 @@ -import nock from "nock"; +import * as fs from "node:fs"; import * as path from "path"; import { expect } from "chai"; -import * as mindee from "../../../src"; -import { RESOURCE_PATH, V1_RESOURCE_PATH } from "../../index"; +import { MockAgent, setGlobalDispatcher } from "undici"; +import { PathInput } from "@/index.js"; +import { Client, product } from "@/v1/index.js"; +import { RESOURCE_PATH, V1_RESOURCE_PATH } from "../../index.js"; +import assert from "node:assert/strict"; +import { + MindeeHttp400Error, MindeeHttp401Error, MindeeHttp429Error, MindeeHttp500Error +} from "@/v1/http/index.js"; + +const mockAgent = new MockAgent(); +setGlobalDispatcher(mockAgent); +const mockPool = mockAgent.get("https://v1-endpoint-host"); + +function setInterceptor(httpCode: number, httpResultFile: string) { + const filePath = path.resolve(path.join(V1_RESOURCE_PATH, httpResultFile)); + mockPool + .intercept({ path: /.*/, method: "POST" }) + .reply( + httpCode, + fs.readFileSync(filePath, "utf8"), + { + // eslint-disable-next-line @typescript-eslint/naming-convention + headers: { "content-type": "application/json" } + } + ); +} describe("MindeeV1 - HTTP calls", () => { - before(function() { - process.env.MINDEE_API_HOST = "local.mindee.net"; + const doc = new PathInput({ + inputPath: path.join(RESOURCE_PATH, "file_types/pdf/blank_1.pdf") }); - after(function() { - delete process.env.MINDEE_API_HOST; + beforeEach(async function() { + process.env.MINDEE_API_HOST = "v1-endpoint-host"; }); - async function sendRequest(httpCode: number, httpResultFile: string) { - const owner = "mindee"; - const urlName = "invoices"; - const version = "4"; - - nock("https://local.mindee.net") - .post(`/v1/products/${owner}/${urlName}/v${version}/predict`) - .replyWithFile(httpCode, path.resolve(httpResultFile)); - - const mindeeClient = new mindee.Client({ apiKey: "my-api-key", debug: true }); - const doc = new mindee.PathInput({ - inputPath: path.join(RESOURCE_PATH, "file_types/pdf/blank_1.pdf") - }); - return await mindeeClient.parse(mindee.product.InvoiceV4, doc); - } + afterEach(async function() { + delete process.env.MINDEE_API_HOST; + }); it("should fail on 400 response with object", async () => { - try { - await sendRequest(400, path.join(V1_RESOURCE_PATH, "errors/error_400_with_object_in_detail.json")); - } catch (error: any) { - expect(error.name).to.be.equals("MindeeHttp400Error"); - expect(error.code).to.be.equals(400); - expect(error.message).to.be.undefined; - expect(error.details).to.deep.equal({ document: ["error message"] }); - } + setInterceptor(400, "errors/error_400_with_object_in_detail.json"); + const client = new Client({ apiKey: "my-api-key", debug: true, dispatcher: mockAgent }); + await assert.rejects( + client.parse(product.InvoiceV4, doc), + (error: any) => { + expect(error).to.be.instanceOf(MindeeHttp400Error); + expect(error.code).to.be.equals(400); + expect(error.message).to.be.undefined; + expect(error.details).to.deep.equal({ document: ["error message"] }); + return true; + }); }); it("should fail on 401 response", async () => { - try { - await sendRequest(401, path.join(V1_RESOURCE_PATH, "errors/error_401_no_token.json")); - } catch (error: any) { - expect(error.name).to.be.equals("MindeeHttp401Error"); - expect(error.code).to.be.equals(401); - expect(error.message).to.be.equals("Authorization required"); - expect(error.details).to.be.equals("No token provided"); - } + setInterceptor(401, "errors/error_401_no_token.json"); + const client = new Client({ apiKey: "my-api-key", debug: true, dispatcher: mockAgent }); + await assert.rejects( + client.parse(product.InvoiceV4, doc), + (error: any) => { + expect(error).to.be.instanceOf(MindeeHttp401Error); + expect(error.code).to.be.equals(401); + expect(error.message).to.be.equals("Authorization required"); + expect(error.details).to.be.equals("No token provided"); + return true; + }); }); it("should fail on 429 response", async () => { - try { - await sendRequest(429, path.join(V1_RESOURCE_PATH, "errors/error_429_too_many_requests.json")); - } catch (error: any) { - expect(error.name).to.be.equals("MindeeHttp429Error"); - expect(error.code).to.be.equals(429); - expect(error.message).to.be.equals("Too many requests"); - expect(error.details).to.be.equals("Too Many Requests."); - } + setInterceptor(429, "errors/error_429_too_many_requests.json"); + const client = new Client({ apiKey: "my-api-key", debug: true, dispatcher: mockAgent }); + await assert.rejects( + client.parse(product.InvoiceV4, doc), + (error: any) => { + expect(error).to.be.instanceOf(MindeeHttp429Error); + expect(error.code).to.be.equals(429); + expect(error.message).to.be.equals("Too many requests"); + expect(error.details).to.be.equals("Too Many Requests."); + return true; + }); }); + it("should fail on 500 response", async () => { - try { - await sendRequest(500, path.join(V1_RESOURCE_PATH, "errors/error_500_inference_fail.json")); - } catch (error: any) { - expect(error.name).to.be.equals("MindeeHttp500Error"); - expect(error.code).to.be.equals(500); - expect(error.details).to.be.equals("Can not run prediction: "); - expect(error.message).to.be.equals("Inference failed"); - } + setInterceptor(500, "errors/error_500_inference_fail.json"); + const client = new Client({ apiKey: "my-api-key", debug: true, dispatcher: mockAgent }); + await assert.rejects( + client.parse(product.InvoiceV4, doc), + (error: any) => { + expect(error).to.be.instanceOf(MindeeHttp500Error); + expect(error.code).to.be.equals(500); + expect(error.message).to.be.equals("Inference failed"); + expect(error.details).to.be.equals("Can not run prediction: "); + return true; + }); }); it("should fail on HTML response", async () => { - try { - await sendRequest(500, path.join(V1_RESOURCE_PATH, "errors/error_50x.html")); - } catch (error: any) { - expect(error.name).to.be.equals("MindeeHttp500Error"); - expect(error.code).to.be.equals(500); - } + setInterceptor(500, "errors/error_50x.html"); + const client = new Client({ apiKey: "my-api-key", debug: true, dispatcher: mockAgent }); + await assert.rejects( + client.parse(product.InvoiceV4, doc), + (error: any) => { + expect(error).to.be.instanceOf(MindeeHttp500Error); + expect(error.code).to.be.equals(500); + return true; + }); }); }); describe ("Endpoint parameters" , () => { it ("should initialize default parameters properly", async () => { - const mindeeClient = new mindee.Client({ apiKey: "dummy-api-key" }); - const customEndpoint = mindeeClient.createEndpoint( + const client = new Client({ apiKey: "dummy-api-key", debug: true }); + const customEndpoint = client.createEndpoint( "dummy-endpoint", "dummy-account" ); @@ -96,17 +124,18 @@ describe ("Endpoint parameters" , () => { }); it ("should initialize environment parameters properly", async () => { - process.env.MINDEE_API_HOST = "dummy-host"; + process.env.MINDEE_API_HOST = "v1-endpoint-host"; process.env.MINDEE_API_KEY = "dummy-key"; process.env.MINDEE_REQUEST_TIMEOUT = "30"; - const mindeeClient = new mindee.Client(); - const customEndpoint = mindeeClient.createEndpoint( + + const client = new Client({ debug: true }); + const customEndpoint = client.createEndpoint( "dummy-endpoint", "dummy-account" ); expect(customEndpoint.version).to.equal("1"); expect(customEndpoint.settings.timeout).to.equal(30); - expect(customEndpoint.settings.hostname).to.equal("dummy-host"); + expect(customEndpoint.settings.hostname).to.equal("v1-endpoint-host"); expect(customEndpoint.settings.apiKey).to.equal("dummy-key"); delete process.env.MINDEE_API_HOST; diff --git a/tests/v1/api/feedbackResponse.spec.ts b/tests/v1/api/feedbackResponse.spec.ts index bfa57fbb7..eeb26cc58 100644 --- a/tests/v1/api/feedbackResponse.spec.ts +++ b/tests/v1/api/feedbackResponse.spec.ts @@ -1,8 +1,8 @@ import path from "path"; import { expect } from "chai"; import { promises as fs } from "fs"; -import { FeedbackResponse } from "../../../src/parsing/common"; -import { V1_PRODUCT_PATH } from "../../index"; +import { FeedbackResponse } from "@/v1/parsing/common/index.js"; +import { V1_PRODUCT_PATH } from "../../index.js"; describe("MindeeV1 - Feedback response", () => { it("should load an empty feedback response", async () => { diff --git a/tests/v1/api/multiReceiptsReconstruction.integration.ts b/tests/v1/api/multiReceiptsReconstruction.integration.ts deleted file mode 100644 index 871313c0b..000000000 --- a/tests/v1/api/multiReceiptsReconstruction.integration.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { expect } from "chai"; -import * as path from "path"; -import { Client, PathInput } from "../../../src"; -import { MultiReceiptsDetectorV1, ReceiptV5 } from "../../../src/product"; -import { extractReceipts } from "../../../src/imageOperations"; -import { V1_PRODUCT_PATH } from "../../index"; -import { LocalInputSource } from "../../../src/input"; -import { setTimeout } from "node:timers/promises"; - -const apiKey = process.env.MINDEE_API_KEY; -let client: Client; -let sourceDoc: LocalInputSource; - -describe("MindeeV1 - A Multi-Receipt Image", () => { - before(async () => { - sourceDoc = new PathInput({ - inputPath: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/default_sample.jpg"), - }); - await sourceDoc.init(); - client = new Client({ apiKey }); - }); - - // NOTE: rotation causes flakiness in receipt order, causing the test to fail. - // it("should send to the server and cut properly", async () => { - // const multiReceiptResult = await client.parse(MultiReceiptsDetectorV1, sourceDoc); - // expect(multiReceiptResult.document?.inference.prediction.receipts.length).to.be.equals(6); - // expect(multiReceiptResult.document?.inference.pages[0].orientation?.value).to.be.equals(90); - // const receipts = await extractReceipts(sourceDoc, multiReceiptResult.document!.inference); - // expect(receipts.length).to.be.equals(6); - // const extractedReceipts = await extractReceipts(sourceDoc, multiReceiptResult.document!.inference); - // expect(extractedReceipts.length).to.be.equals(6); - // const receiptsResults = []; - // for (const extractedReceipt of extractedReceipts) { - // const localInput = extractedReceipt.asSource(); - // receiptsResults.push(await client.parse(ReceiptV5, localInput)); - // await setTimeout(1000); - // } - // - // expect(receiptsResults[0].document.inference.prediction.lineItems.length).to.be.equals(0); - // - // expect(receiptsResults[1].document.inference.prediction.lineItems.length).to.be.equals(1); - // expect(receiptsResults[1].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(21.5); - // - // expect(receiptsResults[2].document.inference.prediction.lineItems.length).to.be.equals(2); - // expect(receiptsResults[2].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(11.5); - // expect(receiptsResults[2].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(2); - // - // expect(receiptsResults[3].document.inference.prediction.lineItems.length).to.be.equals(1); - // expect(receiptsResults[3].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(16.5); - // - // expect(receiptsResults[4].document.inference.prediction.lineItems.length).to.be.equals(2); - // expect(receiptsResults[4].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(10.5); - // expect(receiptsResults[4].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(4); - // - // expect(receiptsResults[5].document.inference.prediction.lineItems.length).to.be.equals(0); - // }).timeout(60000); -}); - - -describe("MindeeV1 - A Multi-Receipt Document", () => { - before(async () => { - sourceDoc = new PathInput({ - inputPath: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/multipage_sample.pdf"), - }); - await sourceDoc.init(); - client = new Client({ apiKey }); - }); - - it("should send to the server and cut properly", async () => { - const multiReceiptResult = await client.parse(MultiReceiptsDetectorV1, sourceDoc); - expect(multiReceiptResult.document?.inference.prediction.receipts.length).to.be.equals(5); - const extractedReceipts = await extractReceipts(sourceDoc, multiReceiptResult.document!.inference); - expect(extractedReceipts.length).to.be.equals(5); - expect(multiReceiptResult.document?.inference.pages[0].orientation?.value).to.be.equals(0); - expect(multiReceiptResult.document?.inference.pages[1].orientation?.value).to.be.equals(0); - const receiptsResults = []; - for (const extractedReceipt of extractedReceipts) { - const localInput = extractedReceipt.asSource(); - receiptsResults.push(await client.parse(ReceiptV5, localInput)); - await setTimeout(1000); - } - expect(receiptsResults[0].document.inference.prediction.lineItems.length).to.be.equals(5); - expect(receiptsResults[0].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(70); - expect(receiptsResults[0].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(12); - expect(receiptsResults[0].document.inference.prediction.lineItems[2].totalAmount).to.be.equals(14); - expect(receiptsResults[0].document.inference.prediction.lineItems[3].totalAmount).to.be.equals(11); - expect(receiptsResults[0].document.inference.prediction.lineItems[4].totalAmount).to.be.equals(5.6); - - expect(receiptsResults[1].document.inference.prediction.lineItems.length).to.be.equals(7); - expect(receiptsResults[1].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(6); - expect(receiptsResults[1].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(11); - expect(receiptsResults[1].document.inference.prediction.lineItems[2].totalAmount).to.be.equals(67.2); - expect(receiptsResults[1].document.inference.prediction.lineItems[3].totalAmount).to.be.equals(19.2); - expect(receiptsResults[1].document.inference.prediction.lineItems[4].totalAmount).to.be.equals(7); - expect(receiptsResults[1].document.inference.prediction.lineItems[5].totalAmount).to.be.equals(5.5); - expect(receiptsResults[1].document.inference.prediction.lineItems[6].totalAmount).to.be.equals(36); - - expect(receiptsResults[2].document.inference.prediction.lineItems.length).to.be.equals(1); - expect(receiptsResults[2].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(275); - - expect(receiptsResults[3].document.inference.prediction.lineItems.length).to.be.equals(2); - expect(receiptsResults[3].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(11.5); - expect(receiptsResults[3].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(2); - - expect(receiptsResults[4].document.inference.prediction.lineItems.length).to.be.equals(1); - expect(receiptsResults[4].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(16.5); - - - }).timeout(60000); -}); - - -describe("MindeeV1 - A Single-Receipt Image", () => { - before(async () => { - sourceDoc = new PathInput({ - inputPath: path.join(V1_PRODUCT_PATH, "expense_receipts/default_sample.jpg"), - }); - await sourceDoc.init(); - client = new Client({ apiKey }); - }); - - it("should send to the server and cut properly", async () => { - const multiReceiptResult = await client.parse(MultiReceiptsDetectorV1, sourceDoc); - expect(multiReceiptResult.document?.inference.prediction.receipts.length).to.be.equals(1); - const receipts = await extractReceipts(sourceDoc, multiReceiptResult.document!.inference); - expect(receipts.length).to.be.equals(1); - const receiptResult = await client.parse(ReceiptV5, receipts[0].asSource()); - expect(receiptResult.document.inference.prediction.lineItems.length).to.be.equals(1); - expect(receiptResult.document.inference.prediction.lineItems[0].totalAmount).to.be.equals(10.2); - expect(receiptResult.document.inference.prediction.taxes.length).to.be.equals(1); - expect(receiptResult.document.inference.prediction.taxes[0].value).to.be.equals(1.7); - }).timeout(60000); -}); diff --git a/tests/v1/api/response.spec.ts b/tests/v1/api/response.spec.ts index bc960f59c..226ff069e 100644 --- a/tests/v1/api/response.spec.ts +++ b/tests/v1/api/response.spec.ts @@ -1,9 +1,9 @@ import { promises as fs } from "fs"; import * as path from "path"; import { expect } from "chai"; -import { PredictResponse } from "../../../src"; -import { CustomV1, InvoiceV4, ReceiptV5 } from "../../../src/product"; -import { V1_PRODUCT_PATH } from "../../index"; +import { PredictResponse } from "@/v1/index.js"; +import { InvoiceV4, ReceiptV5 } from "@/v1/product/index.js"; +import { V1_PRODUCT_PATH } from "../../index.js"; const dataPath = { receiptV5: path.join(V1_PRODUCT_PATH, "expense_receipts/response_v5/complete.json"), @@ -38,16 +38,4 @@ describe("MindeeV1 - Synchronous API predict response", () => { }); }); - it("should build a Custom Doc response", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.customV1)); - const httpResponse = JSON.parse(jsonData.toString()); - const response = new PredictResponse(CustomV1, httpResponse); - expect(response.document.inference.prediction).to.not.be.undefined; - expect(response.document.inference.pages.length).to.be.equals(2); - expect(response.document.nPages).to.be.equals(2); - response.document.inference.pages.forEach((page, idx) => { - expect(page.id).to.be.equals(idx); - expect(page.toString()).to.not.be.undefined; - }); - }); }); diff --git a/tests/index.spec.ts b/tests/v1/clientInit.spec.ts similarity index 54% rename from tests/index.spec.ts rename to tests/v1/clientInit.spec.ts index 09eef040f..bdb59a748 100644 --- a/tests/index.spec.ts +++ b/tests/v1/clientInit.spec.ts @@ -1,10 +1,14 @@ -import { Client } from "../src"; +import { Client } from "@/v1/index.js"; import { expect } from "chai"; +import assert from "node:assert/strict"; describe("Test client initialization", () => { - it("should create an empty client", () => { - const client = new Client({}); - expect(client).to.exist; + it("should not create an empty client", () => { + assert.throws( + () => { + new Client({}); + } + ); }); it("should create a client with an API key", () => { @@ -13,7 +17,7 @@ describe("Test client initialization", () => { }); it("should create a client in debug mode", () => { - const client = new Client({ debug: true }); + const client = new Client({ apiKey: "invalid-api-key", debug: true }); expect(client).to.exist; }); }); diff --git a/tests/imageOperations/invoiceSplitterExtractor.spec.ts b/tests/v1/extraction/invoiceSplitterExtractor.spec.ts similarity index 81% rename from tests/imageOperations/invoiceSplitterExtractor.spec.ts rename to tests/v1/extraction/invoiceSplitterExtractor.spec.ts index 4d2eaddd2..d79c63014 100644 --- a/tests/imageOperations/invoiceSplitterExtractor.spec.ts +++ b/tests/v1/extraction/invoiceSplitterExtractor.spec.ts @@ -1,17 +1,17 @@ import { expect } from "chai"; import { promises as fs } from "fs"; import path from "path"; -import { InvoiceSplitterV1 } from "../../src/product"; -import { extractInvoices } from "../../src/imageOperations"; -import { PathInput } from "../../src"; -import { V1_PRODUCT_PATH } from "../index"; +import { InvoiceSplitterV1 } from "@/v1/product/index.js"; +import { extractInvoices } from "@/v1/extraction/index.js"; +import { PathInput } from "@/index.js"; +import { V1_PRODUCT_PATH } from "../../index.js"; const dataPath = { complete: path.join(V1_PRODUCT_PATH, "invoice_splitter/response_v1/complete.json"), fileSample: path.join(V1_PRODUCT_PATH, "invoice_splitter/invoice_5p.pdf"), }; -describe("A multi-page invoice document", () => { +describe("A multi-page invoice document #includeOptionalDeps", () => { it("should be split properly.", async () => { const jsonDataNA = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonDataNA.toString()); diff --git a/tests/v1/api/invoiceSplitterReconstruction.spec.ts b/tests/v1/extraction/invoiceSplitterReconstruction.spec.ts similarity index 85% rename from tests/v1/api/invoiceSplitterReconstruction.spec.ts rename to tests/v1/extraction/invoiceSplitterReconstruction.spec.ts index 7d9425daf..1f7d08a55 100644 --- a/tests/v1/api/invoiceSplitterReconstruction.spec.ts +++ b/tests/v1/extraction/invoiceSplitterReconstruction.spec.ts @@ -1,14 +1,13 @@ import { expect } from "chai"; import { promises as fs } from "fs"; import * as path from "path"; -import { Document } from "../../../src"; -import { InvoiceSplitterV1 } from "../../../src/product"; -import { extractInvoices } from "../../../src/imageOperations"; -import { PathInput } from "../../../src"; -import { V1_PRODUCT_PATH } from "../../index"; - -describe("MindeeV1 - A Multipage Invoice Document", () => { +import { Document } from "@/v1/index.js"; +import { InvoiceSplitterV1 } from "@/v1/product/index.js"; +import { extractInvoices } from "@/v1/extraction/index.js"; +import { PathInput } from "@/index.js"; +import { V1_PRODUCT_PATH } from "../../index.js"; +describe("MindeeV1 - A Multipage Invoice Document #includeOptionalDeps", () => { it("should be split into the proper invoices", async () => { const jsonData = await fs.readFile( path.join(V1_PRODUCT_PATH, "invoice_splitter/response_v1/complete.json") diff --git a/tests/v1/extraction/multiReceiptsExtractor.spec.ts b/tests/v1/extraction/multiReceiptsExtractor.spec.ts new file mode 100644 index 000000000..16f7b4271 --- /dev/null +++ b/tests/v1/extraction/multiReceiptsExtractor.spec.ts @@ -0,0 +1,63 @@ +import { expect } from "chai"; +import { promises as fs } from "fs"; +import path from "path"; +import { MultiReceiptsDetectorV1 } from "@/v1/product/index.js"; +import { extractReceipts } from "@/v1/extraction/index.js"; +import { PathInput } from "@/index.js"; +import { V1_PRODUCT_PATH } from "../../index.js"; + +const dataPath = { + complete: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/response_v1/complete.json"), + fileSample: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/default_sample.jpg"), + completeMultiPage: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/response_v1/multipage_sample.json"), + multiPageSample: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/multipage_sample.pdf"), +}; +describe("Multi-Receipt #includeOptionalDeps", () => { + describe("A single-page multi-receipts document", () => { + it("should be split properly.", async () => { + const jsonDataNA = await fs.readFile(path.resolve(dataPath.complete)); + const response = JSON.parse(jsonDataNA.toString()); + const doc = new MultiReceiptsDetectorV1(response.document.inference); + const inputSample = new PathInput({ inputPath: dataPath.fileSample }); + await inputSample.init(); + const extractedReceipts = await extractReceipts(inputSample, doc); + expect(extractedReceipts.length).to.be.equals(6); + for (let i = 0; i < extractedReceipts.length; i++) { + expect(extractedReceipts[i].buffer).to.be.not.null; + expect(extractedReceipts[i].pageId).to.be.equals(0); + expect(extractedReceipts[i].receiptId).to.be.equals(i); + } + }); + }); + describe("A multi-page multi-receipts document", () => { + it("should be split properly.", async () => { + const jsonDataNA = await fs.readFile(path.resolve(dataPath.completeMultiPage)); + const response = JSON.parse(jsonDataNA.toString()); + const doc = new MultiReceiptsDetectorV1(response.document.inference); + const inputSample = new PathInput({ inputPath: dataPath.multiPageSample }); + await inputSample.init(); + const extractedReceipts = await extractReceipts(inputSample, doc); + expect(extractedReceipts.length).to.be.equals(5); + + expect(extractedReceipts[0].buffer).to.be.not.null; + expect(extractedReceipts[0].pageId).to.be.equals(0); + expect(extractedReceipts[0].receiptId).to.be.equals(0); + + expect(extractedReceipts[1].buffer).to.be.not.null; + expect(extractedReceipts[1].pageId).to.be.equals(0); + expect(extractedReceipts[1].receiptId).to.be.equals(1); + + expect(extractedReceipts[2].buffer).to.be.not.null; + expect(extractedReceipts[2].pageId).to.be.equals(0); + expect(extractedReceipts[2].receiptId).to.be.equals(2); + + expect(extractedReceipts[3].buffer).to.be.not.null; + expect(extractedReceipts[3].pageId).to.be.equals(1); + expect(extractedReceipts[3].receiptId).to.be.equals(0); + + expect(extractedReceipts[4].buffer).to.be.not.null; + expect(extractedReceipts[4].pageId).to.be.equals(1); + expect(extractedReceipts[4].receiptId).to.be.equals(1); + }); + }); +}); diff --git a/tests/v1/extraction/multiReceiptsReconstruction.integration.ts b/tests/v1/extraction/multiReceiptsReconstruction.integration.ts new file mode 100644 index 000000000..699485ad2 --- /dev/null +++ b/tests/v1/extraction/multiReceiptsReconstruction.integration.ts @@ -0,0 +1,99 @@ +import { expect } from "chai"; +import * as path from "path"; +import { Client } from "@/v1/index.js"; +import { MultiReceiptsDetectorV1, ReceiptV5 } from "@/v1/product/index.js"; +import { extractReceipts } from "@/v1/extraction/index.js"; +import { V1_PRODUCT_PATH } from "../../index.js"; +import { LocalInputSource, PathInput } from "@/input/index.js"; +import { setTimeout } from "node:timers/promises"; + +const apiKey = process.env.MINDEE_API_KEY; +let client: Client; +let sourceDoc: LocalInputSource; +describe("MindeeV1 - #includeOptionalDeps", () => { + describe("MindeeV1 - A Multi-Receipt Image", () => { + before(async () => { + sourceDoc = new PathInput({ + inputPath: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/default_sample.jpg"), + }); + await sourceDoc.init(); + client = new Client({ apiKey }); + }); + }); + + + describe("MindeeV1 - A Multi-Receipt Document", () => { + before(async () => { + sourceDoc = new PathInput({ + inputPath: path.join(V1_PRODUCT_PATH, "multi_receipts_detector/multipage_sample.pdf"), + }); + await sourceDoc.init(); + client = new Client({ apiKey }); + }); + + it("should send to the server and cut properly", async () => { + const multiReceiptResult = await client.parse(MultiReceiptsDetectorV1, sourceDoc); + expect(multiReceiptResult.document?.inference.prediction.receipts.length).to.be.equals(5); + const extractedReceipts = await extractReceipts(sourceDoc, multiReceiptResult.document!.inference); + expect(extractedReceipts.length).to.be.equals(5); + expect(multiReceiptResult.document?.inference.pages[0].orientation?.value).to.be.equals(0); + expect(multiReceiptResult.document?.inference.pages[1].orientation?.value).to.be.equals(0); + const receiptsResults = []; + for (const extractedReceipt of extractedReceipts) { + const localInput = extractedReceipt.asSource(); + receiptsResults.push(await client.parse(ReceiptV5, localInput)); + await setTimeout(1000); + } + expect(receiptsResults[0].document.inference.prediction.lineItems.length).to.be.equals(5); + expect(receiptsResults[0].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(70); + expect(receiptsResults[0].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(12); + expect(receiptsResults[0].document.inference.prediction.lineItems[2].totalAmount).to.be.equals(14); + expect(receiptsResults[0].document.inference.prediction.lineItems[3].totalAmount).to.be.equals(11); + expect(receiptsResults[0].document.inference.prediction.lineItems[4].totalAmount).to.be.equals(5.6); + + expect(receiptsResults[1].document.inference.prediction.lineItems.length).to.be.equals(7); + expect(receiptsResults[1].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(6); + expect(receiptsResults[1].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(11); + expect(receiptsResults[1].document.inference.prediction.lineItems[2].totalAmount).to.be.equals(67.2); + expect(receiptsResults[1].document.inference.prediction.lineItems[3].totalAmount).to.be.equals(19.2); + expect(receiptsResults[1].document.inference.prediction.lineItems[4].totalAmount).to.be.equals(7); + expect(receiptsResults[1].document.inference.prediction.lineItems[5].totalAmount).to.be.equals(5.5); + expect(receiptsResults[1].document.inference.prediction.lineItems[6].totalAmount).to.be.equals(36); + + expect(receiptsResults[2].document.inference.prediction.lineItems.length).to.be.equals(1); + expect(receiptsResults[2].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(275); + + expect(receiptsResults[3].document.inference.prediction.lineItems.length).to.be.equals(2); + expect(receiptsResults[3].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(11.5); + expect(receiptsResults[3].document.inference.prediction.lineItems[1].totalAmount).to.be.equals(2); + + expect(receiptsResults[4].document.inference.prediction.lineItems.length).to.be.equals(1); + expect(receiptsResults[4].document.inference.prediction.lineItems[0].totalAmount).to.be.equals(16.5); + + + }).timeout(60000); + }); + + + describe("MindeeV1 - A Single-Receipt Image", () => { + before(async () => { + sourceDoc = new PathInput({ + inputPath: path.join(V1_PRODUCT_PATH, "expense_receipts/default_sample.jpg"), + }); + await sourceDoc.init(); + client = new Client({ apiKey }); + }); + + it("should send to the server and cut properly", async () => { + const multiReceiptResult = await client.parse(MultiReceiptsDetectorV1, sourceDoc); + expect(multiReceiptResult.document?.inference.prediction.receipts.length).to.be.equals(1); + const receipts = await extractReceipts(sourceDoc, multiReceiptResult.document!.inference); + expect(receipts.length).to.be.equals(1); + const receiptResult = await client.parse(ReceiptV5, receipts[0].asSource()); + expect(receiptResult.document.inference.prediction.lineItems.length).to.be.equals(1); + expect(receiptResult.document.inference.prediction.lineItems[0].totalAmount).to.be.equals(10.2); + expect(receiptResult.document.inference.prediction.taxes.length).to.be.equals(1); + expect(receiptResult.document.inference.prediction.taxes[0].value).to.be.equals(1.7); + }).timeout(60000); + }); +}); diff --git a/tests/v1/api/multiReceiptsReconstruction.spec.ts b/tests/v1/extraction/multiReceiptsReconstruction.spec.ts similarity index 89% rename from tests/v1/api/multiReceiptsReconstruction.spec.ts rename to tests/v1/extraction/multiReceiptsReconstruction.spec.ts index b7b98963e..c0ede55cd 100644 --- a/tests/v1/api/multiReceiptsReconstruction.spec.ts +++ b/tests/v1/extraction/multiReceiptsReconstruction.spec.ts @@ -1,10 +1,11 @@ import { expect } from "chai"; import { promises as fs } from "fs"; import * as path from "path"; -import { Document, PathInput } from "../../../src"; -import { MultiReceiptsDetectorV1 } from "../../../src/product"; -import { extractReceipts } from "../../../src/imageOperations"; -import { RESOURCE_PATH, V1_PRODUCT_PATH } from "../../index"; +import { PathInput } from "@/index.js"; +import { Document } from "@/v1/index.js"; +import { MultiReceiptsDetectorV1 } from "@/v1/product/index.js"; +import { extractReceipts } from "@/v1/extraction/index.js"; +import { RESOURCE_PATH, V1_PRODUCT_PATH } from "../../index.js"; const rotations = [ { angle: 0, suffix: "" }, @@ -14,7 +15,7 @@ const rotations = [ ]; rotations.forEach(({ angle, suffix }) => { - describe(`Multi-Receipt Document - ${angle}° rotation`, () => { + describe(`Multi-Receipt Document - ${angle}° rotation #includeOptionalDeps`, () => { let extractedReceipts: any[]; let sourceDoc: PathInput; diff --git a/tests/v1/extras/extras.integration.ts b/tests/v1/extras/extras.integration.ts index 2f4dcc22a..2d8faeb14 100644 --- a/tests/v1/extras/extras.integration.ts +++ b/tests/v1/extras/extras.integration.ts @@ -1,14 +1,14 @@ import { expect } from "chai"; -import * as mindee from "../../../src"; +import * as mindee from "@/index.js"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../index"; +import { V1_PRODUCT_PATH } from "../../index.js"; describe("MindeeV1 - Extras Integration Tests", async () => { - let client: mindee.Client; + let client: mindee.v1.Client; beforeEach(() => { - client = new mindee.Client(); + client = new mindee.v1.Client(); }); it("should send cropper extra", async () => { @@ -17,10 +17,10 @@ describe("MindeeV1 - Extras Integration Tests", async () => { }); await sample.init(); const response = await client.parse( - mindee.product.InvoiceV4, sample, { cropper: true } + mindee.v1.product.InvoiceV4, sample, { cropper: true } ); expect(response.document.inference.pages[0]?.extras?.cropper).to.exist; - }).timeout(60000); + }).timeout(70000); it("should send full text OCR extra", async () => { const sample = new mindee.PathInput({ @@ -28,11 +28,11 @@ describe("MindeeV1 - Extras Integration Tests", async () => { }); await sample.init(); const response = await client.enqueueAndParse( - mindee.product.InternationalIdV2, sample, { fullText: true } + mindee.v1.product.InternationalIdV2, sample, { fullText: true } ); expect(response.document?.extras?.fullTextOcr).to.exist; - }).timeout(60000); + }).timeout(70000); it("should send OCR words synchronously", async () => { const sample = new mindee.PathInput({ @@ -40,12 +40,12 @@ describe("MindeeV1 - Extras Integration Tests", async () => { }); await sample.init(); const response = await client.parse( - mindee.product.FinancialDocumentV1, sample, { allWords: true } + mindee.v1.product.FinancialDocumentV1, sample, { allWords: true } ); expect(response.document?.ocr).to.exist; expect(response.document?.ocr?.toString()).to.not.be.empty; - }).timeout(65000); + }).timeout(70000); it("should send OCR words asynchronously", async () => { const sample = new mindee.PathInput({ @@ -53,10 +53,10 @@ describe("MindeeV1 - Extras Integration Tests", async () => { }); await sample.init(); const response = await client.enqueueAndParse( - mindee.product.FinancialDocumentV1, sample, { allWords: true } + mindee.v1.product.FinancialDocumentV1, sample, { allWords: true } ); expect(response.document?.ocr).to.exist; expect(response.document?.ocr?.toString()).to.not.be.empty; - }).timeout(65000); + }).timeout(70000); }); diff --git a/tests/v1/extras/fullTextOcr.spec.ts b/tests/v1/extras/fullTextOcr.spec.ts index 115c47773..d91874f3c 100644 --- a/tests/v1/extras/fullTextOcr.spec.ts +++ b/tests/v1/extras/fullTextOcr.spec.ts @@ -1,9 +1,9 @@ import { promises as fs } from "fs"; import path from "path"; import { expect } from "chai"; -import { AsyncPredictResponse } from "../../../src"; -import { InternationalIdV2 } from "../../../src/product"; -import { RESOURCE_PATH } from "../../index"; +import { AsyncPredictResponse } from "@/v1/index.js"; +import { InternationalIdV2 } from "@/v1/product/index.js"; +import { RESOURCE_PATH } from "../../index.js"; const fullTextOcrDir = path.join(RESOURCE_PATH, "v1/extras/full_text_ocr"); diff --git a/tests/v1/extras/ocr.spec.ts b/tests/v1/extras/ocr.spec.ts index 95a3c6baf..5a1093486 100644 --- a/tests/v1/extras/ocr.spec.ts +++ b/tests/v1/extras/ocr.spec.ts @@ -1,9 +1,9 @@ import { promises as fs } from "fs"; import * as path from "path"; import { expect } from "chai"; -import { ReceiptV5 } from "../../../src/product"; -import { Document } from "../../../src"; -import { RESOURCE_PATH } from "../../index"; +import { ReceiptV5 } from "@/v1/product/index.js"; +import { Document } from "@/v1/index.js"; +import { RESOURCE_PATH } from "../../index.js"; const dataPath = { complete: path.join(RESOURCE_PATH, "v1/extras/ocr/complete.json"), diff --git a/tests/v1/input/localResponse.spec.ts b/tests/v1/input/localResponse.spec.ts index 742c28b9b..436920718 100644 --- a/tests/v1/input/localResponse.spec.ts +++ b/tests/v1/input/localResponse.spec.ts @@ -1,9 +1,9 @@ import * as fs from "node:fs/promises"; import { expect } from "chai"; -import { Client, PredictResponse, AsyncPredictResponse, LocalResponse } from "../../../src"; -import { InternationalIdV2, InvoiceV4, MultiReceiptsDetectorV1 } from "../../../src/product"; import path from "path"; -import { V1_RESOURCE_PATH, V1_PRODUCT_PATH } from "../../index"; +import { AsyncPredictResponse, LocalResponse, PredictResponse } from "@/v1/index.js"; +import { InternationalIdV2, InvoiceV4, MultiReceiptsDetectorV1 } from "@/v1/product/index.js"; +import { V1_RESOURCE_PATH, V1_PRODUCT_PATH } from "../../index.js"; const signature: string = "5ed1673e34421217a5dbfcad905ee62261a3dd66c442f3edd19302072bbf70d0"; const dummySecretKey: string = "ogNjY44MhvKPGTtVsI8zG82JqWQa68woYQH"; @@ -50,8 +50,7 @@ describe("MindeeV1 - Load Local Response", () => { it("should load into a sync prediction.", async () => { const fileObj = await fs.readFile(multiReceiptsDetectorPath, { encoding: "utf-8" }); const localResponse = new LocalResponse(fileObj); - const dummyClient = new Client({ apiKey: "dummy-key" }); - const prediction = await dummyClient.loadPrediction(MultiReceiptsDetectorV1, localResponse); + const prediction = await localResponse.loadPrediction(MultiReceiptsDetectorV1); expect(prediction).to.be.an.instanceof(PredictResponse); expect(JSON.stringify(prediction.getRawHttp())).to.eq(JSON.stringify(JSON.parse(fileObj))); @@ -60,8 +59,7 @@ describe("MindeeV1 - Load Local Response", () => { it("should load a failed prediction.", async () => { const fileObj = await fs.readFile(failedPath, { encoding: "utf-8" }); const localResponse = new LocalResponse(fileObj); - const dummyClient = new Client({ apiKey: "dummy-key" }); - const prediction = await dummyClient.loadPrediction(InvoiceV4, localResponse); + const prediction = await localResponse.loadPrediction(InvoiceV4); expect(prediction).to.be.an.instanceof(AsyncPredictResponse); expect((prediction as AsyncPredictResponse).job.status).to.be.eq("failed"); }); @@ -69,8 +67,7 @@ describe("MindeeV1 - Load Local Response", () => { it("should load into an async prediction.", async () => { const fileObj = await fs.readFile(internationalIdPath, { encoding: "utf-8" }); const localResponse = new LocalResponse(fileObj); - const dummyClient = new Client({ apiKey: "dummy-key" }); - const prediction = await dummyClient.loadPrediction(InternationalIdV2, localResponse); + const prediction = await localResponse.loadPrediction(InternationalIdV2); expect(prediction).to.be.an.instanceof(AsyncPredictResponse); expect(JSON.stringify(prediction.getRawHttp())).to.eq(JSON.stringify(JSON.parse(fileObj))); diff --git a/tests/v1/input/sources.integration.ts b/tests/v1/input/sources.integration.ts index 5bdf6c864..a4a485a2e 100644 --- a/tests/v1/input/sources.integration.ts +++ b/tests/v1/input/sources.integration.ts @@ -1,18 +1,18 @@ -import * as mindee from "../../../src"; -import { InvoiceV4 } from "../../../src/product"; +import * as mindee from "@/index.js"; +import { InvoiceV4 } from "@/v1/product/index.js"; import { expect } from "chai"; import { promises as fs } from "fs"; import { createReadStream } from "node:fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../index"; -import { PathInput, Base64Input, BufferInput, BytesInput, UrlInput } from "../../../src"; +import { V1_PRODUCT_PATH } from "../../index.js"; +import { PathInput, Base64Input, BufferInput, BytesInput, UrlInput } from "@/index.js"; describe("MindeeV1 - File Input Integration Tests", async () => { - let client: mindee.Client; + let client: mindee.v1.Client; let filePath: string; beforeEach(() => { - client = new mindee.Client(); + client = new mindee.v1.Client(); filePath = path.join(V1_PRODUCT_PATH, "invoices/default_sample.jpg"); }); diff --git a/tests/v1/input/urlInputSource.integration.ts b/tests/v1/input/urlInputSource.integration.ts index 431be3944..5e1d16357 100644 --- a/tests/v1/input/urlInputSource.integration.ts +++ b/tests/v1/input/urlInputSource.integration.ts @@ -1,10 +1,10 @@ import { expect } from "chai"; -import { UrlInput } from "../../../src"; -import { Client } from "../../../src"; -import { InvoiceV4 } from "../../../src/product"; +import { UrlInput } from "@/index.js"; +import { Client } from "@/v1/index.js"; +import { InvoiceV4 } from "@/v1/product/index.js"; describe("MindeeV1 - URL Input Integration Test", async () => { - it("should retrieve and parse a remote file", async () => { + it("should retrieve and parse a remote file with redirection", async () => { const apiKey = process.env.MINDEE_API_KEY; if (!apiKey) { throw new Error("MINDEE_API_KEY environment variable is not set"); diff --git a/tests/v1/parsing/standard/amount.spec.ts b/tests/v1/parsing/standard/amount.spec.ts index b21452cb5..584c23f5c 100644 --- a/tests/v1/parsing/standard/amount.spec.ts +++ b/tests/v1/parsing/standard/amount.spec.ts @@ -1,4 +1,4 @@ -import { AmountField } from "../../../../src/parsing/standard"; +import { AmountField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test AmountField field", () => { diff --git a/tests/v1/parsing/standard/classification.spec.ts b/tests/v1/parsing/standard/classification.spec.ts index f0073a4b2..cf7674cc4 100644 --- a/tests/v1/parsing/standard/classification.spec.ts +++ b/tests/v1/parsing/standard/classification.spec.ts @@ -1,4 +1,4 @@ -import { ClassificationField } from "../../../../src/parsing/standard"; +import { ClassificationField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test Classification field", () => { diff --git a/tests/v1/parsing/standard/date.spec.ts b/tests/v1/parsing/standard/date.spec.ts index ef120f24d..a928f97ce 100644 --- a/tests/v1/parsing/standard/date.spec.ts +++ b/tests/v1/parsing/standard/date.spec.ts @@ -1,4 +1,4 @@ -import { DateField } from "../../../../src/parsing/standard"; +import { DateField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test Date field", () => { diff --git a/tests/v1/parsing/standard/field.spec.ts b/tests/v1/parsing/standard/field.spec.ts index e68cd2fdb..2bcbfd079 100644 --- a/tests/v1/parsing/standard/field.spec.ts +++ b/tests/v1/parsing/standard/field.spec.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { Field } from "../../../../src/parsing/standard"; +import { Field } from "@/v1/parsing/standard/index.js"; describe("Test different inits of Field", () => { it("Should create a Field", () => { diff --git a/tests/v1/parsing/standard/locale.spec.ts b/tests/v1/parsing/standard/locale.spec.ts index 0aa8d786b..01a0984bd 100644 --- a/tests/v1/parsing/standard/locale.spec.ts +++ b/tests/v1/parsing/standard/locale.spec.ts @@ -1,4 +1,4 @@ -import { LocaleField } from "../../../../src/parsing/standard"; +import { LocaleField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test LocaleField field", () => { diff --git a/tests/v1/parsing/standard/orientation.spec.ts b/tests/v1/parsing/standard/orientation.spec.ts index 97d95a4a8..5ec946d5f 100644 --- a/tests/v1/parsing/standard/orientation.spec.ts +++ b/tests/v1/parsing/standard/orientation.spec.ts @@ -1,4 +1,4 @@ -import { OrientationField } from "../../../../src/parsing/common"; +import { OrientationField } from "@/v1/parsing/common/index.js"; import { expect } from "chai"; describe("Test Orientation field", () => { diff --git a/tests/v1/parsing/standard/paymentDetails.spec.ts b/tests/v1/parsing/standard/paymentDetails.spec.ts index 0f2e723b1..4506a35a9 100644 --- a/tests/v1/parsing/standard/paymentDetails.spec.ts +++ b/tests/v1/parsing/standard/paymentDetails.spec.ts @@ -1,4 +1,4 @@ -import { PaymentDetailsField } from "../../../../src/parsing/standard"; +import { PaymentDetailsField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test PaymentDetailsField field", () => { diff --git a/tests/v1/parsing/standard/position.spec.ts b/tests/v1/parsing/standard/position.spec.ts index eee669132..64ef3e8f5 100644 --- a/tests/v1/parsing/standard/position.spec.ts +++ b/tests/v1/parsing/standard/position.spec.ts @@ -1,4 +1,4 @@ -import { PositionField } from "../../../../src/parsing/standard"; +import { PositionField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test Position field", () => { diff --git a/tests/v1/parsing/standard/tax.spec.ts b/tests/v1/parsing/standard/tax.spec.ts index 62e9c703a..d5c124bb3 100644 --- a/tests/v1/parsing/standard/tax.spec.ts +++ b/tests/v1/parsing/standard/tax.spec.ts @@ -1,4 +1,4 @@ -import { TaxField } from "../../../../src/parsing/standard"; +import { TaxField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test Tax field", () => { diff --git a/tests/v1/parsing/standard/text.spec.ts b/tests/v1/parsing/standard/text.spec.ts index 23c62d8e3..53986db6b 100644 --- a/tests/v1/parsing/standard/text.spec.ts +++ b/tests/v1/parsing/standard/text.spec.ts @@ -1,4 +1,4 @@ -import { StringField } from "../../../../src/parsing/standard"; +import { StringField } from "@/v1/parsing/standard/index.js"; import { expect } from "chai"; describe("Test String field", () => { diff --git a/tests/v1/product/barcodeReader/barcodeReaderV1.spec.ts b/tests/v1/product/barcodeReader/barcodeReaderV1.spec.ts index 3aff7da70..623524c8f 100644 --- a/tests/v1/product/barcodeReader/barcodeReaderV1.spec.ts +++ b/tests/v1/product/barcodeReader/barcodeReaderV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - BarcodeReaderV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.BarcodeReaderV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.BarcodeReaderV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.codes1D.length).to.be.equals(0); expect(docPrediction.codes2D.length).to.be.equals(0); @@ -25,7 +25,7 @@ describe("MindeeV1 - BarcodeReaderV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.BarcodeReaderV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.BarcodeReaderV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/billOfLading/billOfLadingV1.spec.ts b/tests/v1/product/billOfLading/billOfLadingV1.spec.ts deleted file mode 100644 index 1934d7417..000000000 --- a/tests/v1/product/billOfLading/billOfLadingV1.spec.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "bill_of_lading/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "bill_of_lading/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "bill_of_lading/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "bill_of_lading/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - BillOfLadingV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.BillOfLadingV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.billOfLadingNumber.value).to.be.undefined; - expect(docPrediction.shipper.address).to.be.null; - expect(docPrediction.shipper.email).to.be.null; - expect(docPrediction.shipper.name).to.be.null; - expect(docPrediction.shipper.phone).to.be.null; - expect(docPrediction.consignee.address).to.be.null; - expect(docPrediction.consignee.email).to.be.null; - expect(docPrediction.consignee.name).to.be.null; - expect(docPrediction.consignee.phone).to.be.null; - expect(docPrediction.notifyParty.address).to.be.null; - expect(docPrediction.notifyParty.email).to.be.null; - expect(docPrediction.notifyParty.name).to.be.null; - expect(docPrediction.notifyParty.phone).to.be.null; - expect(docPrediction.carrier.name).to.be.null; - expect(docPrediction.carrier.professionalNumber).to.be.null; - expect(docPrediction.carrier.scac).to.be.null; - expect(docPrediction.carrierItems.length).to.be.equals(0); - expect(docPrediction.portOfLoading.value).to.be.undefined; - expect(docPrediction.portOfDischarge.value).to.be.undefined; - expect(docPrediction.placeOfDelivery.value).to.be.undefined; - expect(docPrediction.dateOfIssue.value).to.be.undefined; - expect(docPrediction.departureDate.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.BillOfLadingV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/businessCard/businessCardV1.spec.ts b/tests/v1/product/businessCard/businessCardV1.spec.ts deleted file mode 100644 index 04e4c0548..000000000 --- a/tests/v1/product/businessCard/businessCardV1.spec.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "business_card/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "business_card/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "business_card/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "business_card/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - BusinessCardV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.BusinessCardV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.firstname.value).to.be.undefined; - expect(docPrediction.lastname.value).to.be.undefined; - expect(docPrediction.jobTitle.value).to.be.undefined; - expect(docPrediction.company.value).to.be.undefined; - expect(docPrediction.email.value).to.be.undefined; - expect(docPrediction.phoneNumber.value).to.be.undefined; - expect(docPrediction.mobileNumber.value).to.be.undefined; - expect(docPrediction.faxNumber.value).to.be.undefined; - expect(docPrediction.address.value).to.be.undefined; - expect(docPrediction.website.value).to.be.undefined; - expect(docPrediction.socialMedia.length).to.be.equals(0); - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.BusinessCardV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/cropper/cropperV1.spec.ts b/tests/v1/product/cropper/cropperV1.spec.ts index d606b4583..1fc19cf48 100644 --- a/tests/v1/product/cropper/cropperV1.spec.ts +++ b/tests/v1/product/cropper/cropperV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - CropperV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.CropperV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.CropperV1, response.document); const pagePrediction = doc.inference.pages[0].prediction; expect(pagePrediction.cropping.length).to.be.equals(0); }); @@ -24,14 +24,14 @@ describe("MindeeV1 - CropperV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.CropperV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.CropperV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); it("should load a complete page 0 prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.CropperV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.CropperV1, response.document); const page0 = doc.inference.pages[0]; const docString = await fs.readFile(path.join(dataPath.page0String)); expect(page0.toString()).to.be.equals(docString.toString()); diff --git a/tests/v1/product/custom/customDocument.spec.ts b/tests/v1/product/custom/customDocument.spec.ts deleted file mode 100644 index 8de7b43df..000000000 --- a/tests/v1/product/custom/customDocument.spec.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { promises as fs } from "fs"; -import * as path from "path"; -import { expect } from "chai"; -import * as mindee from "../../../../src"; -import { CustomV1Document } from "../../../../src/product/custom/customV1Document"; -import { Page } from "../../../../src"; -import { CropperExtra } from "../../../../src/parsing/common/extras"; -import { CustomV1 } from "../../../../src/product"; -import { V1_PRODUCT_PATH } from "../../../index"; - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "custom/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "custom/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "custom/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "custom/response_v1/summary_page0.rst"), - page1String: path.join(V1_PRODUCT_PATH, "custom/response_v1/summary_page1.rst"), -}; - -const dataPathV2 = { - complete: path.join(V1_PRODUCT_PATH, "custom/response_v2/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "custom/response_v2/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "custom/response_v2/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "custom/response_v2/summary_page0.rst"), - page1String: path.join(V1_PRODUCT_PATH, "custom/response_v2/summary_page1.rst"), -}; - -describe("Custom Document Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonDataNA = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonDataNA.toString()); - const doc = new CustomV1(response.document.inference); - expect(doc.product.name).to.be.equals("ianare/field_test"); - expect(doc.prediction.fields.size).to.be.equals(10); - expect(doc.prediction.classifications.size).to.be.equals(1); - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(CustomV1, response.document); - const docInference = doc.inference.prediction as CustomV1Document; - const stringAll = docInference.fields.get("string_all"); - expect(stringAll).to.have.property("values"); - expect(stringAll?.contentsString("-")).to.equals("Mindee-is-awesome"); - expect(stringAll?.contentsList()).to.have.members([ - "Mindee", - "is", - "awesome", - ]); - expect(docInference.classifications.get("doc_type")).to.have.property("value"); - expect(docInference.fields.size).to.be.equals(10); - expect(docInference.classifications.size).to.be.equals(1); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); - - it("should load a complete page 0 prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(CustomV1, response.document); - const page0: Page = doc.inference?.pages[0] as Page; - expect(page0.orientation?.value).to.be.equals(0); - expect((page0?.extras && page0?.extras["cropper"] as CropperExtra)?.cropping.length).to.be.equals(1); - const pageString = await fs.readFile(path.join(dataPath.page0String)); - expect(page0.toString()).to.be.equals(pageString.toString()); - }); - - it("should load a complete page 1 prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(CustomV1, response.document); - const page1: Page = doc.inference?.pages[1] as Page; - expect(page1.orientation?.value).to.be.equals(0); - const pageString = await fs.readFile(path.join(dataPath.page1String)); - expect(page1.toString()).to.be.equals(pageString.toString()); - }); -}); - - -describe("Newer Custom Document Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonDataNA = await fs.readFile(path.resolve(dataPathV2.complete)); - const response = JSON.parse(jsonDataNA.toString()); - const doc = new CustomV1(response.document.inference); - expect(doc.product.name).to.be.equals("ianare/field_test"); - expect(doc.prediction.fields.size).to.be.equals(10); - expect(doc.prediction.classifications.size).to.be.equals(1); - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPathV2.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(CustomV1, response.document); - const docInference = doc.inference.prediction as CustomV1Document; - const stringAll = docInference.fields.get("string_all"); - expect(stringAll).to.have.property("values"); - expect(stringAll?.contentsString("-")).to.equals("Mindee-is-awesome"); - expect(stringAll?.contentsList()).to.have.members([ - "Mindee", - "is", - "awesome", - ]); - expect(docInference.classifications.get("doc_type")).to.have.property("value"); - expect(docInference.fields.size).to.be.equals(10); - expect(docInference.classifications.size).to.be.equals(1); - const docString = await fs.readFile(path.join(dataPathV2.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); - - it("should load a complete page 0 prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPathV2.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(CustomV1, response.document); - const page0: Page = doc.inference?.pages[0] as Page; - expect(page0.orientation?.value).to.be.equals(0); - expect((page0?.extras && page0?.extras["cropper"] as CropperExtra)?.cropping.length).to.be.equals(1); - const pageString = await fs.readFile(path.join(dataPathV2.page0String)); - expect(page0.toString()).to.be.equals(pageString.toString()); - }); - - it("should load a complete page 1 prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPathV2.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(CustomV1, response.document); - const page1: Page = doc.inference?.pages[1] as Page; - expect(page1.orientation?.value).to.be.equals(0); - const pageString = await fs.readFile(path.join(dataPathV2.page1String)); - expect(page1.toString()).to.be.equals(pageString.toString()); - }); -}); diff --git a/tests/v1/product/custom/lineItems.spec.ts b/tests/v1/product/custom/lineItems.spec.ts deleted file mode 100644 index c9b8b209b..000000000 --- a/tests/v1/product/custom/lineItems.spec.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { promises as fs } from "fs"; -import { expect } from "chai"; -import { CustomV1 } from "../../../../src/product"; -import { CustomLine } from "../../../../src/parsing/custom"; -import { V1_PRODUCT_PATH } from "../../../index"; -import path from "node:path"; - -const dataPath = { - singleTable01: - path.join(V1_PRODUCT_PATH, "custom/response_v1/line_items/single_table_01.json"), -}; -const dataPathV2 = { - singleTable01: - path.join(V1_PRODUCT_PATH, "custom/response_v2/line_items/single_table_01.json"), -}; - -describe("Custom Document Line Items", async () => { - function testLineItems(lineItems: CustomLine[]) { - expect(lineItems).to.not.null; - expect(lineItems.length).to.be.eq(3); - const firstLine: CustomLine = lineItems[0]; - expect(firstLine.bbox.xMin).to.be.eq(0.059); - expect(firstLine.bbox.yMin).to.be.eq(0.351); - expect(firstLine.bbox.xMax).to.be.eq(0.3); - expect(firstLine.bbox.yMax).to.be.eq(0.36); - expect(firstLine.fields.size).to.be.eq(4); - expect(firstLine.fields.has("beneficiary_birth_date")).to.be.true; - expect(firstLine.fields.has("beneficiary_number")).to.be.true; - expect(firstLine.fields.has("beneficiary_name")).to.be.true; - expect(firstLine.fields.has("beneficiary_rank")).to.be.true; - expect( - lineItems[1].fields.get("beneficiary_number")?.confidence - ).to.be.eq(0.5); - expect( - lineItems[1].fields.get("beneficiary_birth_date")?.content - ).to.be.eq("2010-07-18"); - expect(lineItems[2].fields.size).to.be.eq(4); - expect(lineItems[2].fields.get("beneficiary_rank")?.content).to.be.eq( - "3" - ); - } - - it("with valid custom document V1 must build 3 lines", async () => { - const jsonData = await fs.readFile(dataPath.singleTable01); - const response = JSON.parse(jsonData.toString()); - const doc: CustomV1 = new CustomV1({ - prediction: response.document.inference.prediction, - pages: response.document.inference.pages - }); - const anchorNames: string[] = ["beneficiary_name"]; - const fieldNamesToLineItems: string[] = [ - "beneficiary_birth_date", - "beneficiary_number", - "beneficiary_name", - "beneficiary_rank", - ]; - - const docLineItems: CustomLine[] = doc.prediction.columnsToLineItems( - anchorNames, - fieldNamesToLineItems, - 0.011 - ); - testLineItems(docLineItems); - - const pageLineItems: CustomLine[] = doc.pages[0].prediction.columnsToLineItems( - anchorNames, - fieldNamesToLineItems, - 0.011 - ); - testLineItems(pageLineItems); - }); -}); - -describe("Newer Custom Document Line Items", async () => { - function testLineItems(lineItems: CustomLine[]) { - expect(lineItems).to.not.null; - expect(lineItems.length).to.be.eq(3); - const firstLine: CustomLine = lineItems[0]; - expect(firstLine.bbox.xMin).to.be.eq(0.059); - expect(firstLine.bbox.yMin).to.be.eq(0.351); - expect(firstLine.bbox.xMax).to.be.eq(0.3); - expect(firstLine.bbox.yMax).to.be.eq(0.36); - expect(firstLine.fields.size).to.be.eq(4); - expect(firstLine.fields.has("beneficiary_birth_date")).to.be.true; - expect(firstLine.fields.has("beneficiary_number")).to.be.true; - expect(firstLine.fields.has("beneficiary_name")).to.be.true; - expect(firstLine.fields.has("beneficiary_rank")).to.be.true; - expect( - lineItems[1].fields.get("beneficiary_number")?.confidence - ).to.be.eq(0.5); - expect( - lineItems[1].fields.get("beneficiary_birth_date")?.content - ).to.be.eq("2010-07-18"); - expect(lineItems[2].fields.size).to.be.eq(4); - expect(lineItems[2].fields.get("beneficiary_rank")?.content).to.be.eq( - "3" - ); - } - - it("with valid custom document V1 must build 3 lines", async () => { - const jsonData = await fs.readFile(dataPathV2.singleTable01); - const response = JSON.parse(jsonData.toString()); - const doc: CustomV1 = new CustomV1({ - prediction: response.document.inference.prediction, - pages: response.document.inference.pages - }); - const anchorNames: string[] = ["beneficiary_name"]; - const fieldNamesToLineItems: string[] = [ - "beneficiary_birth_date", - "beneficiary_number", - "beneficiary_name", - "beneficiary_rank", - ]; - - const docLineItems: CustomLine[] = doc.prediction.columnsToLineItems( - anchorNames, - fieldNamesToLineItems, - 0.011 - ); - testLineItems(docLineItems); - - const pageLineItems: CustomLine[] = doc.pages[0].prediction.columnsToLineItems( - anchorNames, - fieldNamesToLineItems, - 0.011 - ); - testLineItems(pageLineItems); - }); -}); diff --git a/tests/v1/product/deliveryNote/deliveryNoteV1.spec.ts b/tests/v1/product/deliveryNote/deliveryNoteV1.spec.ts deleted file mode 100644 index 4291a0acd..000000000 --- a/tests/v1/product/deliveryNote/deliveryNoteV1.spec.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "delivery_notes/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "delivery_notes/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "delivery_notes/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "delivery_notes/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - DeliveryNoteV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.DeliveryNoteV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.deliveryDate.value).to.be.undefined; - expect(docPrediction.deliveryNumber.value).to.be.undefined; - expect(docPrediction.supplierName.value).to.be.undefined; - expect(docPrediction.supplierAddress.value).to.be.undefined; - expect(docPrediction.customerName.value).to.be.undefined; - expect(docPrediction.customerAddress.value).to.be.undefined; - expect(docPrediction.totalAmount.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.DeliveryNoteV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/driverLicense/driverLicenseV1.spec.ts b/tests/v1/product/driverLicense/driverLicenseV1.spec.ts index 46205bc3f..e91827295 100644 --- a/tests/v1/product/driverLicense/driverLicenseV1.spec.ts +++ b/tests/v1/product/driverLicense/driverLicenseV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - DriverLicenseV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.DriverLicenseV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.DriverLicenseV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.countryCode.value).to.be.undefined; expect(docPrediction.state.value).to.be.undefined; @@ -36,7 +36,7 @@ describe("MindeeV1 - DriverLicenseV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.DriverLicenseV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.DriverLicenseV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/financialDocument/financialDocumentV1.spec.ts b/tests/v1/product/financialDocument/financialDocumentV1.spec.ts index 0abeed70b..b819b4eb6 100644 --- a/tests/v1/product/financialDocument/financialDocumentV1.spec.ts +++ b/tests/v1/product/financialDocument/financialDocumentV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import * as path from "path"; import { expect } from "chai"; -import * as mindee from "../../../../src"; -import { V1_PRODUCT_PATH } from "../../../index"; +import * as mindee from "@/index.js"; +import { V1_PRODUCT_PATH } from "../../../index.js"; const dataPath = { receiptComplete: path.join( @@ -32,7 +32,7 @@ describe("Financial Document V1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.FinancialDocumentV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.FinancialDocumentV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.locale.value).to.be.undefined; expect(docPrediction.totalAmount.value).to.be.undefined; @@ -67,7 +67,7 @@ describe("Financial Document V1 Object initialization", async () => { it("should initialize from an invoice object", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.invoiceComplete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.FinancialDocumentV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.FinancialDocumentV1, response.document); const docString = await fs.readFile(path.join(dataPath.invoiceDocString)); expect(doc.toString()).to.be.equals(docString.toString()); }); @@ -75,7 +75,7 @@ describe("Financial Document V1 Object initialization", async () => { it("should initialize from a receipt object", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.receiptComplete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.FinancialDocumentV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.FinancialDocumentV1, response.document); const docString = await fs.readFile(path.join(dataPath.receiptDocString)); expect(doc.toString()).to.be.equals(docString.toString()); }); @@ -83,7 +83,7 @@ describe("Financial Document V1 Object initialization", async () => { it("should load a complete page 0 invoice prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.invoiceComplete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.FinancialDocumentV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.FinancialDocumentV1, response.document); const page0 = doc.inference.pages[0]; const docString = await fs.readFile(path.join(dataPath.page0InvoiceString)); expect(page0.orientation?.value).to.be.equals(0); @@ -93,7 +93,7 @@ describe("Financial Document V1 Object initialization", async () => { it("should load a complete page 0 receipt prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.receiptComplete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.FinancialDocumentV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.FinancialDocumentV1, response.document); const page0 = doc.inference.pages[0]; const docString = await fs.readFile(path.join(dataPath.page0ReceiptString)); expect(page0.orientation?.value).to.be.equals(0); diff --git a/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.spec.ts b/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.spec.ts index 3e5316159..23a1888d4 100644 --- a/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.spec.ts +++ b/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV1.spec.ts @@ -1,9 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; +import { V1_PRODUCT_PATH } from "../../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../../src"; - +import * as mindee from "@/index.js"; const dataPath = { complete: path.join(V1_PRODUCT_PATH, "bank_account_details/response_v1/complete.json"), @@ -16,7 +15,7 @@ describe("MindeeV1 - BankAccountDetailsV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.BankAccountDetailsV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.BankAccountDetailsV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.iban.value).to.be.undefined; expect(docPrediction.accountHolderName.value).to.be.undefined; @@ -26,7 +25,7 @@ describe("MindeeV1 - BankAccountDetailsV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.BankAccountDetailsV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.BankAccountDetailsV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.spec.ts b/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.spec.ts index 1c68c1830..1f867bd77 100644 --- a/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.spec.ts +++ b/tests/v1/product/fr/bankAccountDetails/bankAccountDetailsV2.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; +import { V1_PRODUCT_PATH } from "../../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - BankAccountDetailsV2 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.BankAccountDetailsV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.BankAccountDetailsV2, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.accountHoldersNames.value).to.be.undefined; expect(docPrediction.bban.bbanBankCode).to.be.null; @@ -30,7 +30,7 @@ describe("MindeeV1 - BankAccountDetailsV2 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.BankAccountDetailsV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.BankAccountDetailsV2, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/fr/carteGrise/carteGriseV1.spec.ts b/tests/v1/product/fr/carteGrise/carteGriseV1.spec.ts index a89015993..4e57f7268 100644 --- a/tests/v1/product/fr/carteGrise/carteGriseV1.spec.ts +++ b/tests/v1/product/fr/carteGrise/carteGriseV1.spec.ts @@ -16,7 +16,7 @@ describe("MindeeV1 - CarteGriseV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.CarteGriseV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.CarteGriseV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.a.value).to.be.undefined; expect(docPrediction.b.value).to.be.undefined; @@ -64,7 +64,7 @@ describe("MindeeV1 - CarteGriseV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.CarteGriseV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.CarteGriseV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/fr/energyBill/energyBillV1.spec.ts b/tests/v1/product/fr/energyBill/energyBillV1.spec.ts deleted file mode 100644 index 1c1c03c39..000000000 --- a/tests/v1/product/fr/energyBill/energyBillV1.spec.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "energy_bill_fra/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "energy_bill_fra/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "energy_bill_fra/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "energy_bill_fra/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - EnergyBillV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.EnergyBillV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.invoiceNumber.value).to.be.undefined; - expect(docPrediction.contractId.value).to.be.undefined; - expect(docPrediction.deliveryPoint.value).to.be.undefined; - expect(docPrediction.invoiceDate.value).to.be.undefined; - expect(docPrediction.dueDate.value).to.be.undefined; - expect(docPrediction.totalBeforeTaxes.value).to.be.undefined; - expect(docPrediction.totalTaxes.value).to.be.undefined; - expect(docPrediction.totalAmount.value).to.be.undefined; - expect(docPrediction.energySupplier.address).to.be.null; - expect(docPrediction.energySupplier.name).to.be.null; - expect(docPrediction.energyConsumer.address).to.be.null; - expect(docPrediction.energyConsumer.name).to.be.null; - expect(docPrediction.subscription.length).to.be.equals(0); - expect(docPrediction.energyUsage.length).to.be.equals(0); - expect(docPrediction.taxesAndContributions.length).to.be.equals(0); - expect(docPrediction.meterDetails.meterNumber).to.be.null; - expect(docPrediction.meterDetails.meterType).to.be.null; - expect(docPrediction.meterDetails.unit).to.be.null; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.EnergyBillV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/fr/healthCard/healthCardV1.spec.ts b/tests/v1/product/fr/healthCard/healthCardV1.spec.ts deleted file mode 100644 index ef03a6a0f..000000000 --- a/tests/v1/product/fr/healthCard/healthCardV1.spec.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "french_healthcard/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "french_healthcard/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "french_healthcard/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "french_healthcard/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - HealthCardV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.HealthCardV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.givenNames.length).to.be.equals(0); - expect(docPrediction.surname.value).to.be.undefined; - expect(docPrediction.socialSecurity.value).to.be.undefined; - expect(docPrediction.issuanceDate.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.HealthCardV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/fr/idCard/idCardV1.spec.ts b/tests/v1/product/fr/idCard/idCardV1.spec.ts deleted file mode 100644 index 4c3b14431..000000000 --- a/tests/v1/product/fr/idCard/idCardV1.spec.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "idcard_fr/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "idcard_fr/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "idcard_fr/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "idcard_fr/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - IdCardV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.IdCardV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.idNumber.value).to.be.undefined; - expect(docPrediction.givenNames.length).to.be.equals(0); - expect(docPrediction.surname.value).to.be.undefined; - expect(docPrediction.birthDate.value).to.be.undefined; - expect(docPrediction.birthPlace.value).to.be.undefined; - expect(docPrediction.expiryDate.value).to.be.undefined; - expect(docPrediction.authority.value).to.be.undefined; - expect(docPrediction.gender.value).to.be.undefined; - expect(docPrediction.mrz1.value).to.be.undefined; - expect(docPrediction.mrz2.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.IdCardV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); - it("should load a complete page 0 prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.IdCardV1, response.document); - const page0 = doc.inference.pages[0]; - const docString = await fs.readFile(path.join(dataPath.page0String)); - expect(page0.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/fr/idCard/idCardV2.spec.ts b/tests/v1/product/fr/idCard/idCardV2.spec.ts index 37e1d2806..bf72915e2 100644 --- a/tests/v1/product/fr/idCard/idCardV2.spec.ts +++ b/tests/v1/product/fr/idCard/idCardV2.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; +import { V1_PRODUCT_PATH } from "../../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - IdCardV2 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.IdCardV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.IdCardV2, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.nationality.value).to.be.undefined; expect(docPrediction.cardAccessNumber.value).to.be.undefined; @@ -38,14 +38,14 @@ describe("MindeeV1 - IdCardV2 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.IdCardV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.IdCardV2, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); it("should load a complete page 0 prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.IdCardV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.fr.IdCardV2, response.document); const page0 = doc.inference.pages[0]; const docString = await fs.readFile(path.join(dataPath.page0String)); expect(page0.toString()).to.be.equals(docString.toString()); diff --git a/tests/v1/product/fr/payslip/payslipV2.spec.ts b/tests/v1/product/fr/payslip/payslipV2.spec.ts deleted file mode 100644 index ef7c05764..000000000 --- a/tests/v1/product/fr/payslip/payslipV2.spec.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v2/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v2/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v2/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v2/summary_page0.rst"), -}; - -describe("MindeeV1 - PayslipV2 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.PayslipV2, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.employee.address).to.be.null; - expect(docPrediction.employee.dateOfBirth).to.be.null; - expect(docPrediction.employee.firstName).to.be.null; - expect(docPrediction.employee.lastName).to.be.null; - expect(docPrediction.employee.phoneNumber).to.be.null; - expect(docPrediction.employee.registrationNumber).to.be.null; - expect(docPrediction.employee.socialSecurityNumber).to.be.null; - expect(docPrediction.employer.address).to.be.null; - expect(docPrediction.employer.companyId).to.be.null; - expect(docPrediction.employer.companySite).to.be.null; - expect(docPrediction.employer.nafCode).to.be.null; - expect(docPrediction.employer.name).to.be.null; - expect(docPrediction.employer.phoneNumber).to.be.null; - expect(docPrediction.employer.urssafNumber).to.be.null; - expect(docPrediction.bankAccountDetails.bankName).to.be.null; - expect(docPrediction.bankAccountDetails.iban).to.be.null; - expect(docPrediction.bankAccountDetails.swift).to.be.null; - expect(docPrediction.employment.category).to.be.null; - expect(docPrediction.employment.coefficient).to.be.null; - expect(docPrediction.employment.collectiveAgreement).to.be.null; - expect(docPrediction.employment.jobTitle).to.be.null; - expect(docPrediction.employment.positionLevel).to.be.null; - expect(docPrediction.employment.startDate).to.be.null; - expect(docPrediction.salaryDetails.length).to.be.equals(0); - expect(docPrediction.payDetail.grossSalary).to.be.null; - expect(docPrediction.payDetail.grossSalaryYtd).to.be.null; - expect(docPrediction.payDetail.incomeTaxRate).to.be.null; - expect(docPrediction.payDetail.incomeTaxWithheld).to.be.null; - expect(docPrediction.payDetail.netPaid).to.be.null; - expect(docPrediction.payDetail.netPaidBeforeTax).to.be.null; - expect(docPrediction.payDetail.netTaxable).to.be.null; - expect(docPrediction.payDetail.netTaxableYtd).to.be.null; - expect(docPrediction.payDetail.totalCostEmployer).to.be.null; - expect(docPrediction.payDetail.totalTaxesAndDeductions).to.be.null; - expect(docPrediction.pto.accruedThisPeriod).to.be.null; - expect(docPrediction.pto.balanceEndOfPeriod).to.be.null; - expect(docPrediction.pto.usedThisPeriod).to.be.null; - expect(docPrediction.payPeriod.endDate).to.be.null; - expect(docPrediction.payPeriod.month).to.be.null; - expect(docPrediction.payPeriod.paymentDate).to.be.null; - expect(docPrediction.payPeriod.startDate).to.be.null; - expect(docPrediction.payPeriod.year).to.be.null; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.PayslipV2, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/fr/payslip/payslipV3.spec.ts b/tests/v1/product/fr/payslip/payslipV3.spec.ts deleted file mode 100644 index 2ee0fb82a..000000000 --- a/tests/v1/product/fr/payslip/payslipV3.spec.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v3/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v3/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v3/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "payslip_fra/response_v3/summary_page0.rst"), -}; - -describe("MindeeV1 - PayslipV3 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.PayslipV3, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.payPeriod.endDate).to.be.null; - expect(docPrediction.payPeriod.month).to.be.null; - expect(docPrediction.payPeriod.paymentDate).to.be.null; - expect(docPrediction.payPeriod.startDate).to.be.null; - expect(docPrediction.payPeriod.year).to.be.null; - expect(docPrediction.employee.address).to.be.null; - expect(docPrediction.employee.dateOfBirth).to.be.null; - expect(docPrediction.employee.firstName).to.be.null; - expect(docPrediction.employee.lastName).to.be.null; - expect(docPrediction.employee.phoneNumber).to.be.null; - expect(docPrediction.employee.registrationNumber).to.be.null; - expect(docPrediction.employee.socialSecurityNumber).to.be.null; - expect(docPrediction.employer.address).to.be.null; - expect(docPrediction.employer.companyId).to.be.null; - expect(docPrediction.employer.companySite).to.be.null; - expect(docPrediction.employer.nafCode).to.be.null; - expect(docPrediction.employer.name).to.be.null; - expect(docPrediction.employer.phoneNumber).to.be.null; - expect(docPrediction.employer.urssafNumber).to.be.null; - expect(docPrediction.bankAccountDetails.bankName).to.be.null; - expect(docPrediction.bankAccountDetails.iban).to.be.null; - expect(docPrediction.bankAccountDetails.swift).to.be.null; - expect(docPrediction.employment.category).to.be.null; - expect(docPrediction.employment.coefficient).to.be.null; - expect(docPrediction.employment.collectiveAgreement).to.be.null; - expect(docPrediction.employment.jobTitle).to.be.null; - expect(docPrediction.employment.positionLevel).to.be.null; - expect(docPrediction.employment.seniorityDate).to.be.null; - expect(docPrediction.employment.startDate).to.be.null; - expect(docPrediction.salaryDetails.length).to.be.equals(0); - expect(docPrediction.payDetail.grossSalary).to.be.null; - expect(docPrediction.payDetail.grossSalaryYtd).to.be.null; - expect(docPrediction.payDetail.incomeTaxRate).to.be.null; - expect(docPrediction.payDetail.incomeTaxWithheld).to.be.null; - expect(docPrediction.payDetail.netPaid).to.be.null; - expect(docPrediction.payDetail.netPaidBeforeTax).to.be.null; - expect(docPrediction.payDetail.netTaxable).to.be.null; - expect(docPrediction.payDetail.netTaxableYtd).to.be.null; - expect(docPrediction.payDetail.totalCostEmployer).to.be.null; - expect(docPrediction.payDetail.totalTaxesAndDeductions).to.be.null; - expect(docPrediction.paidTimeOff.length).to.be.equals(0); - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.fr.PayslipV3, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/generated/generatedList.spec.ts b/tests/v1/product/generated/generatedList.spec.ts index b8ce62f84..8f41be181 100644 --- a/tests/v1/product/generated/generatedList.spec.ts +++ b/tests/v1/product/generated/generatedList.spec.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { GeneratedListField } from "../../../../src/parsing/generated"; +import { GeneratedListField } from "@/v1/parsing/generated/index.js"; describe("Generated List Field Objects", async () => { it("should properly format floats.", async () => { diff --git a/tests/v1/product/generated/generatedObject.spec.ts b/tests/v1/product/generated/generatedObject.spec.ts index 7b9b06ec3..976b21786 100644 --- a/tests/v1/product/generated/generatedObject.spec.ts +++ b/tests/v1/product/generated/generatedObject.spec.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import { GeneratedObjectField } from "../../../../src/parsing/generated"; +import { GeneratedObjectField } from "@/v1/parsing/generated/index.js"; describe("Generated Object Field", async () => { it("should properly format booleans.", async () => { diff --git a/tests/v1/product/generated/generatedV1.spec.ts b/tests/v1/product/generated/generatedV1.spec.ts index 62a0a58bc..1950ee421 100644 --- a/tests/v1/product/generated/generatedV1.spec.ts +++ b/tests/v1/product/generated/generatedV1.spec.ts @@ -1,13 +1,13 @@ import { promises as fs } from "fs"; import * as path from "path"; import { expect } from "chai"; -import * as mindee from "../../../../src"; -import { Page } from "../../../../src"; -import { GeneratedV1 } from "../../../../src/product"; -import { GeneratedListField, GeneratedObjectField } from "../../../../src/parsing/generated"; -import { GeneratedV1Page } from "../../../../src/product/generated/generatedV1Page"; -import { StringField } from "../../../../src/parsing/standard"; -import { V1_PRODUCT_PATH } from "../../../index"; +import * as mindee from "@/index.js"; +import { Page } from "@/v1/index.js"; +import { GeneratedV1 } from "@/v1/product/index.js"; +import { GeneratedListField, GeneratedObjectField } from "@/v1/parsing/generated/index.js"; +import { GeneratedV1Page } from "@/v1/product/generated/generatedV1Page.js"; +import { StringField } from "@/v1/parsing/standard/index.js"; +import { V1_PRODUCT_PATH } from "../../../index.js"; const dataPathInternationalId = { complete: path.join(V1_PRODUCT_PATH, "generated/response_v1/complete_international_id_v1.json"), @@ -30,7 +30,7 @@ describe("Generated Document Object initialization on an OTS invoice", async () it("should load an empty document prediction", async () => { const jsonDataNA = await fs.readFile(dataPathInvoice.empty); const response = JSON.parse(jsonDataNA.toString()); - const doc = new mindee.Document(GeneratedV1, response.document); + const doc = new mindee.v1.Document(GeneratedV1, response.document); expect(doc.inference.prediction.fields.get("customer_address").value).to.be.undefined; expect(doc.inference.prediction.fields.get("customer_company_registrations").values.length).to.equals(0); expect(doc.inference.prediction.fields.get("customer_name").value).to.be.undefined; @@ -99,7 +99,7 @@ describe("Generated Document Object initialization on an OTS invoice", async () it("should load a complete document prediction", async () => { const jsonDataNA = await fs.readFile(path.resolve(dataPathInvoice.complete)); const response = JSON.parse(jsonDataNA.toString()); - const doc = new mindee.Document(GeneratedV1, response.document); + const doc = new mindee.v1.Document(GeneratedV1, response.document); expect(doc.inference.prediction.fields.get("customer_address").value).to.equals( "1954 Bloon Street West Toronto, ON, M6P 3K9 Canada" ); @@ -152,7 +152,7 @@ describe("Generated Document Object initialization on an International ID", asyn it("should load an empty document prediction", async () => { const jsonDataNA = await fs.readFile(path.resolve(dataPathInternationalId.empty)); const response = JSON.parse(jsonDataNA.toString()); - const doc = new mindee.Document(GeneratedV1, response.document); + const doc = new mindee.v1.Document(GeneratedV1, response.document); const docString = await fs.readFile(path.join(dataPathInternationalId.emptyDocString)); expect(doc.inference.prediction.fields.get("document_type")).to.be.an.instanceOf(StringField); expect(doc.inference.prediction.fields.get("document_type").value).to.be.undefined; @@ -191,7 +191,7 @@ describe("Generated Document Object initialization on an International ID", asyn it("should load a complete document prediction", async () => { const jsonDataNA = await fs.readFile(path.resolve(dataPathInternationalId.complete)); const response = JSON.parse(jsonDataNA.toString()); - const doc = new mindee.Document(GeneratedV1, response.document); + const doc = new mindee.v1.Document(GeneratedV1, response.document); expect(doc.inference.prediction.fields.get("document_type")).to.be.an.instanceOf(StringField); expect(doc.inference.prediction.fields.get("document_type").value).to.equals("NATIONAL_ID_CARD"); expect(doc.inference.prediction.fields.get("document_number")).to.be.an.instanceOf(StringField); diff --git a/tests/v1/product/ind/indianPassport/indianPassportV1.spec.ts b/tests/v1/product/ind/indianPassport/indianPassportV1.spec.ts deleted file mode 100644 index 0cd5be980..000000000 --- a/tests/v1/product/ind/indianPassport/indianPassportV1.spec.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "ind_passport/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "ind_passport/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "ind_passport/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "ind_passport/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - IndianPassportV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.ind.IndianPassportV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.country.value).to.be.undefined; - expect(docPrediction.idNumber.value).to.be.undefined; - expect(docPrediction.givenNames.value).to.be.undefined; - expect(docPrediction.surname.value).to.be.undefined; - expect(docPrediction.birthDate.value).to.be.undefined; - expect(docPrediction.birthPlace.value).to.be.undefined; - expect(docPrediction.issuancePlace.value).to.be.undefined; - expect(docPrediction.issuanceDate.value).to.be.undefined; - expect(docPrediction.expiryDate.value).to.be.undefined; - expect(docPrediction.mrz1.value).to.be.undefined; - expect(docPrediction.mrz2.value).to.be.undefined; - expect(docPrediction.legalGuardian.value).to.be.undefined; - expect(docPrediction.nameOfSpouse.value).to.be.undefined; - expect(docPrediction.nameOfMother.value).to.be.undefined; - expect(docPrediction.oldPassportDateOfIssue.value).to.be.undefined; - expect(docPrediction.oldPassportNumber.value).to.be.undefined; - expect(docPrediction.oldPassportPlaceOfIssue.value).to.be.undefined; - expect(docPrediction.address1.value).to.be.undefined; - expect(docPrediction.address2.value).to.be.undefined; - expect(docPrediction.address3.value).to.be.undefined; - expect(docPrediction.fileNumber.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.ind.IndianPassportV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/internationalId/internationalIdV2.spec.ts b/tests/v1/product/internationalId/internationalIdV2.spec.ts index 921f778b7..53d1550f5 100644 --- a/tests/v1/product/internationalId/internationalIdV2.spec.ts +++ b/tests/v1/product/internationalId/internationalIdV2.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - InternationalIdV2 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.InternationalIdV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.InternationalIdV2, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.documentNumber.value).to.be.undefined; expect(docPrediction.surnames.length).to.be.equals(0); @@ -39,7 +39,7 @@ describe("MindeeV1 - InternationalIdV2 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.InternationalIdV2, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.InternationalIdV2, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/invoice/invoiceV4.spec.ts b/tests/v1/product/invoice/invoiceV4.spec.ts index 534c65f30..cd7bb5543 100644 --- a/tests/v1/product/invoice/invoiceV4.spec.ts +++ b/tests/v1/product/invoice/invoiceV4.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - InvoiceV4 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.InvoiceV4, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.InvoiceV4, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.locale.value).to.be.undefined; expect(docPrediction.invoiceNumber.value).to.be.undefined; @@ -48,7 +48,7 @@ describe("MindeeV1 - InvoiceV4 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.InvoiceV4, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.InvoiceV4, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/invoiceSplitter/invoiceSplitter.integration.ts b/tests/v1/product/invoiceSplitter/invoiceSplitter.integration.ts index 54f105a32..7a1023647 100644 --- a/tests/v1/product/invoiceSplitter/invoiceSplitter.integration.ts +++ b/tests/v1/product/invoiceSplitter/invoiceSplitter.integration.ts @@ -1,16 +1,16 @@ -import * as mindee from "../../../../src"; -import { InvoiceSplitterV1 } from "../../../../src/product"; +import * as mindee from "@/index.js"; +import { InvoiceSplitterV1 } from "@/v1/product/index.js"; import { expect } from "chai"; -import { levenshteinRatio } from "../../../testingUtilities"; +import { levenshteinRatio } from "../../../testingUtilities.js"; import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; -describe("MindeeV1 - InvoiceSplitterV1 Integration Tests", async () => { - let client: mindee.Client; +describe("MindeeV1 - InvoiceSplitterV1 Integration Tests #includeOptionalDeps", async () => { + let client: mindee.v1.Client; beforeEach(() => { - client = new mindee.Client(); + client = new mindee.v1.Client(); }); it("should extract invoices in strict mode.", async () => { @@ -19,11 +19,11 @@ describe("MindeeV1 - InvoiceSplitterV1 Integration Tests", async () => { }); const response = await client.enqueueAndParse( - mindee.product.InvoiceSplitterV1, sample + mindee.v1.product.InvoiceSplitterV1, sample ); const invoiceSplitterInference = response.document?.inference; expect(invoiceSplitterInference).to.be.an.instanceof(InvoiceSplitterV1); - const invoices = await mindee.imageOperations.extractInvoices( + const invoices = await mindee.v1.extraction.extractInvoices( sample, invoiceSplitterInference as InvoiceSplitterV1 ); @@ -32,7 +32,7 @@ describe("MindeeV1 - InvoiceSplitterV1 Integration Tests", async () => { expect(invoices[1].asSource().filename).to.eq("invoice_p_1-1.pdf"); const invoiceResult = await client.parse( - mindee.product.InvoiceV4, invoices[0].asSource() + mindee.v1.product.InvoiceV4, invoices[0].asSource() ); const testStringRstInvoice = await fs.readFile( path.join(V1_PRODUCT_PATH, "invoices/response_v4/summary_full_invoice_p1.rst") diff --git a/tests/v1/product/invoiceSplitter/invoiceSplitterV1.spec.ts b/tests/v1/product/invoiceSplitter/invoiceSplitterV1.spec.ts index 42edc446f..8075affee 100644 --- a/tests/v1/product/invoiceSplitter/invoiceSplitterV1.spec.ts +++ b/tests/v1/product/invoiceSplitter/invoiceSplitterV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - InvoiceSplitterV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.InvoiceSplitterV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.InvoiceSplitterV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.invoicePageGroups.length).to.be.equals(0); }); @@ -24,7 +24,7 @@ describe("MindeeV1 - InvoiceSplitterV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.InvoiceSplitterV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.InvoiceSplitterV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.spec.ts b/tests/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.spec.ts index 43f18e155..cae5ef4fc 100644 --- a/tests/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.spec.ts +++ b/tests/v1/product/multiReceiptsDetector/multiReceiptsDetectorV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - MultiReceiptsDetectorV1 Object initialization", async () => it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.MultiReceiptsDetectorV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.MultiReceiptsDetectorV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.receipts.length).to.be.equals(0); }); @@ -24,7 +24,7 @@ describe("MindeeV1 - MultiReceiptsDetectorV1 Object initialization", async () => it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.MultiReceiptsDetectorV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.MultiReceiptsDetectorV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/nutritionFactsLabel/nutritionFactsLabelV1.spec.ts b/tests/v1/product/nutritionFactsLabel/nutritionFactsLabelV1.spec.ts deleted file mode 100644 index 3cb5a8327..000000000 --- a/tests/v1/product/nutritionFactsLabel/nutritionFactsLabelV1.spec.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "nutrition_facts/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "nutrition_facts/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "nutrition_facts/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "nutrition_facts/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - NutritionFactsLabelV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.NutritionFactsLabelV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.servingPerBox.value).to.be.undefined; - expect(docPrediction.servingSize.amount).to.be.null; - expect(docPrediction.servingSize.unit).to.be.null; - expect(docPrediction.calories.dailyValue).to.be.null; - expect(docPrediction.calories.per100G).to.be.null; - expect(docPrediction.calories.perServing).to.be.null; - expect(docPrediction.totalFat.dailyValue).to.be.null; - expect(docPrediction.totalFat.per100G).to.be.null; - expect(docPrediction.totalFat.perServing).to.be.null; - expect(docPrediction.saturatedFat.dailyValue).to.be.null; - expect(docPrediction.saturatedFat.per100G).to.be.null; - expect(docPrediction.saturatedFat.perServing).to.be.null; - expect(docPrediction.transFat.dailyValue).to.be.null; - expect(docPrediction.transFat.per100G).to.be.null; - expect(docPrediction.transFat.perServing).to.be.null; - expect(docPrediction.cholesterol.dailyValue).to.be.null; - expect(docPrediction.cholesterol.per100G).to.be.null; - expect(docPrediction.cholesterol.perServing).to.be.null; - expect(docPrediction.totalCarbohydrate.dailyValue).to.be.null; - expect(docPrediction.totalCarbohydrate.per100G).to.be.null; - expect(docPrediction.totalCarbohydrate.perServing).to.be.null; - expect(docPrediction.dietaryFiber.dailyValue).to.be.null; - expect(docPrediction.dietaryFiber.per100G).to.be.null; - expect(docPrediction.dietaryFiber.perServing).to.be.null; - expect(docPrediction.totalSugars.dailyValue).to.be.null; - expect(docPrediction.totalSugars.per100G).to.be.null; - expect(docPrediction.totalSugars.perServing).to.be.null; - expect(docPrediction.addedSugars.dailyValue).to.be.null; - expect(docPrediction.addedSugars.per100G).to.be.null; - expect(docPrediction.addedSugars.perServing).to.be.null; - expect(docPrediction.protein.dailyValue).to.be.null; - expect(docPrediction.protein.per100G).to.be.null; - expect(docPrediction.protein.perServing).to.be.null; - expect(docPrediction.sodium.dailyValue).to.be.null; - expect(docPrediction.sodium.per100G).to.be.null; - expect(docPrediction.sodium.perServing).to.be.null; - expect(docPrediction.sodium.unit).to.be.null; - expect(docPrediction.nutrients.length).to.be.equals(0); - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.NutritionFactsLabelV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/passport/passportV1.spec.ts b/tests/v1/product/passport/passportV1.spec.ts index b9356f997..20b54b5d7 100644 --- a/tests/v1/product/passport/passportV1.spec.ts +++ b/tests/v1/product/passport/passportV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - PassportV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.PassportV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.PassportV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.country.value).to.be.undefined; expect(docPrediction.idNumber.value).to.be.undefined; @@ -34,7 +34,7 @@ describe("MindeeV1 - PassportV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.PassportV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.PassportV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/receipt/receiptV5.spec.ts b/tests/v1/product/receipt/receiptV5.spec.ts index 001965913..87c8e8c06 100644 --- a/tests/v1/product/receipt/receiptV5.spec.ts +++ b/tests/v1/product/receipt/receiptV5.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - ReceiptV5 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.ReceiptV5, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.ReceiptV5, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.locale.value).to.be.undefined; expect(docPrediction.date.value).to.be.undefined; @@ -37,7 +37,7 @@ describe("MindeeV1 - ReceiptV5 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.ReceiptV5, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.ReceiptV5, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/resume/resumeV1.spec.ts b/tests/v1/product/resume/resumeV1.spec.ts index eefe06960..7635a48ee 100644 --- a/tests/v1/product/resume/resumeV1.spec.ts +++ b/tests/v1/product/resume/resumeV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../index"; +import { V1_PRODUCT_PATH } from "../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - ResumeV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.ResumeV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.ResumeV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.documentLanguage.value).to.be.undefined; expect(docPrediction.givenNames.length).to.be.equals(0); @@ -39,7 +39,7 @@ describe("MindeeV1 - ResumeV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.ResumeV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.ResumeV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); diff --git a/tests/v1/product/us/bankCheck/bankCheckV1.spec.ts b/tests/v1/product/us/bankCheck/bankCheckV1.spec.ts index 7ae7d6d9f..eaeca7952 100644 --- a/tests/v1/product/us/bankCheck/bankCheckV1.spec.ts +++ b/tests/v1/product/us/bankCheck/bankCheckV1.spec.ts @@ -1,8 +1,8 @@ import { promises as fs } from "fs"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; +import { V1_PRODUCT_PATH } from "../../../../index.js"; import { expect } from "chai"; -import * as mindee from "../../../../../src"; +import * as mindee from "@/index.js"; const dataPath = { @@ -16,7 +16,7 @@ describe("MindeeV1 - BankCheckV1 Object initialization", async () => { it("should load an empty document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.empty)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.BankCheckV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.us.BankCheckV1, response.document); const docPrediction = doc.inference.prediction; expect(docPrediction.date.value).to.be.undefined; expect(docPrediction.amount.value).to.be.undefined; @@ -29,14 +29,14 @@ describe("MindeeV1 - BankCheckV1 Object initialization", async () => { it("should load a complete document prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.BankCheckV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.us.BankCheckV1, response.document); const docString = await fs.readFile(path.join(dataPath.docString)); expect(doc.toString()).to.be.equals(docString.toString()); }); it("should load a complete page 0 prediction", async () => { const jsonData = await fs.readFile(path.resolve(dataPath.complete)); const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.BankCheckV1, response.document); + const doc = new mindee.v1.Document(mindee.v1.product.us.BankCheckV1, response.document); const page0 = doc.inference.pages[0]; const docString = await fs.readFile(path.join(dataPath.page0String)); expect(page0.toString()).to.be.equals(docString.toString()); diff --git a/tests/v1/product/us/healthcareCard/healthcareCardV1.spec.ts b/tests/v1/product/us/healthcareCard/healthcareCardV1.spec.ts deleted file mode 100644 index 72c4c878c..000000000 --- a/tests/v1/product/us/healthcareCard/healthcareCardV1.spec.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "us_healthcare_cards/response_v1/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "us_healthcare_cards/response_v1/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "us_healthcare_cards/response_v1/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "us_healthcare_cards/response_v1/summary_page0.rst"), -}; - -describe("MindeeV1 - HealthcareCardV1 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.HealthcareCardV1, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.companyName.value).to.be.undefined; - expect(docPrediction.planName.value).to.be.undefined; - expect(docPrediction.memberName.value).to.be.undefined; - expect(docPrediction.memberId.value).to.be.undefined; - expect(docPrediction.issuer80840.value).to.be.undefined; - expect(docPrediction.dependents.length).to.be.equals(0); - expect(docPrediction.groupNumber.value).to.be.undefined; - expect(docPrediction.payerId.value).to.be.undefined; - expect(docPrediction.rxBin.value).to.be.undefined; - expect(docPrediction.rxId.value).to.be.undefined; - expect(docPrediction.rxGrp.value).to.be.undefined; - expect(docPrediction.rxPcn.value).to.be.undefined; - expect(docPrediction.copays.length).to.be.equals(0); - expect(docPrediction.enrollmentDate.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.HealthcareCardV1, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/product/us/usMail/usMailV3.spec.ts b/tests/v1/product/us/usMail/usMailV3.spec.ts deleted file mode 100644 index e1773db56..000000000 --- a/tests/v1/product/us/usMail/usMailV3.spec.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { promises as fs } from "fs"; -import path from "path"; -import { V1_PRODUCT_PATH } from "../../../../index"; -import { expect } from "chai"; -import * as mindee from "../../../../../src"; - - -const dataPath = { - complete: path.join(V1_PRODUCT_PATH, "us_mail/response_v3/complete.json"), - empty: path.join(V1_PRODUCT_PATH, "us_mail/response_v3/empty.json"), - docString: path.join(V1_PRODUCT_PATH, "us_mail/response_v3/summary_full.rst"), - page0String: path.join(V1_PRODUCT_PATH, "us_mail/response_v3/summary_page0.rst"), -}; - -describe("MindeeV1 - UsMailV3 Object initialization", async () => { - it("should load an empty document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.empty)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.UsMailV3, response.document); - const docPrediction = doc.inference.prediction; - expect(docPrediction.senderName.value).to.be.undefined; - expect(docPrediction.senderAddress.city).to.be.null; - expect(docPrediction.senderAddress.complete).to.be.null; - expect(docPrediction.senderAddress.postalCode).to.be.null; - expect(docPrediction.senderAddress.state).to.be.null; - expect(docPrediction.senderAddress.street).to.be.null; - expect(docPrediction.recipientNames.length).to.be.equals(0); - expect(docPrediction.recipientAddresses.length).to.be.equals(0); - expect(docPrediction.isReturnToSender.value).to.be.undefined; - }); - - it("should load a complete document prediction", async () => { - const jsonData = await fs.readFile(path.resolve(dataPath.complete)); - const response = JSON.parse(jsonData.toString()); - const doc = new mindee.Document(mindee.product.us.UsMailV3, response.document); - const docString = await fs.readFile(path.join(dataPath.docString)); - expect(doc.toString()).to.be.equals(docString.toString()); - }); -}); diff --git a/tests/v1/workflows/workflow.integration.ts b/tests/v1/workflows/workflow.integration.ts index 0912425e3..1167af9bd 100644 --- a/tests/v1/workflows/workflow.integration.ts +++ b/tests/v1/workflows/workflow.integration.ts @@ -1,20 +1,20 @@ -import * as mindee from "../../../src"; -import { ExecutionPriority } from "../../../src/parsing/common"; +import * as mindee from "@/index.js"; +import { ExecutionPriority } from "@/v1/parsing/common/index.js"; import { expect } from "chai"; -import { LocalInputSource } from "../../../src/input"; -import { OptionalAsyncOptions } from "../../../src/client"; -import { FinancialDocumentV1 } from "../../../src/product"; -import { RAGExtra } from "../../../src/parsing/common/extras/ragExtra"; +import { LocalInputSource } from "@/input/index.js"; +import { OptionalAsyncOptions } from "@/v1/index.js"; +import { FinancialDocumentV1 } from "@/v1/product/index.js"; +import { RAGExtra } from "@/v1/parsing/common/extras/ragExtra.js"; import path from "path"; -import { V1_PRODUCT_PATH } from "../../index"; +import { V1_PRODUCT_PATH } from "../../index.js"; describe("MindeeV1 - Workflow calls", () => { - let client: mindee.Client; + let client: mindee.v1.Client; let sample: LocalInputSource; let workflowId: string; beforeEach(async () => { - client = new mindee.Client(); + client = new mindee.v1.Client(); workflowId = process.env["WORKFLOW_ID"] ?? ""; sample = new mindee.PathInput({ inputPath: path.join(V1_PRODUCT_PATH, "financial_document/default_sample.jpg") diff --git a/tests/v1/workflows/workflow.spec.ts b/tests/v1/workflows/workflow.spec.ts index de698df8f..edda1c516 100644 --- a/tests/v1/workflows/workflow.spec.ts +++ b/tests/v1/workflows/workflow.spec.ts @@ -1,25 +1,46 @@ import { expect } from "chai"; -import nock from "nock"; +import { MockAgent, setGlobalDispatcher } from "undici"; import { promises as fs } from "fs"; import path from "path"; -import { GeneratedV1 } from "../../../src/product"; -import { WorkflowResponse } from "../../../src/parsing/common/workflowResponse"; -import { V1_RESOURCE_PATH } from "../../index"; +import { RESOURCE_PATH, V1_RESOURCE_PATH } from "../../index.js"; +import { Client } from "@/v1/index.js"; +import { PathInput } from "@/index.js"; + +const mockAgent = new MockAgent(); +setGlobalDispatcher(mockAgent); +const mockPool = mockAgent.get("https://v1-workflow-host"); + +async function setInterceptor(httpCode: number, jsonFilePath: string) { + const mockResponse = JSON.parse(await fs.readFile(jsonFilePath, "utf-8")); + mockPool + .intercept({ path: /v1\/workflows\/.*/, method: "POST" }) + .reply(httpCode, mockResponse); +} + +async function executeWorkflow(doc: PathInput, workflowId: string) { + const client = new Client({ apiKey: "my-api-key", debug: true, dispatcher: mockAgent }); + return await client.executeWorkflow(doc, workflowId); +} describe("MindeeV1 - Workflow executions", () => { - it("should deserialize response correctly when sending a document to an execution", async () => { - const jsonFilePath = path.join(V1_RESOURCE_PATH, "workflows", "success.json"); - const mockResponse = JSON.parse(await fs.readFile(jsonFilePath, "utf-8")); + const doc = new PathInput({ + inputPath: path.join(RESOURCE_PATH, "file_types/pdf/blank_1.pdf") + }); - nock("https://api.mindee.net") - .post("/v1/workflows/execute") - .reply(202, mockResponse); + beforeEach(function() { + process.env.MINDEE_API_HOST = "v1-workflow-host"; + }); - const mockedExecution = new WorkflowResponse( - GeneratedV1, - mockResponse - ); + afterEach(function() { + delete process.env.MINDEE_API_HOST; + }); + it("should deserialize response correctly when sending a document to an execution", async () => { + const jsonFilePath = path.join(V1_RESOURCE_PATH, "workflows", "success.json"); + await setInterceptor(202, jsonFilePath); + const mockedExecution = await executeWorkflow( + doc, "07ebf237-ff27-4eee-b6a2-425df4a5cca6" + ); expect(mockedExecution).to.not.be.null; expect(mockedExecution.apiRequest).to.not.be.null; expect(mockedExecution.execution.batchName).to.be.null; @@ -40,17 +61,10 @@ describe("MindeeV1 - Workflow executions", () => { it("should deserialize response correctly when sending a document to an execution with priority and alias", async () => { const jsonFilePath = path.join(V1_RESOURCE_PATH, "workflows", "success_low_priority.json"); - const mockResponse = JSON.parse(await fs.readFile(jsonFilePath, "utf-8")); - - nock("https://api.mindee.net") - .post("/v1/workflows/execute") - .reply(200, mockResponse); - - const mockedExecution = new WorkflowResponse( - GeneratedV1, - mockResponse + await setInterceptor(200, jsonFilePath); + const mockedExecution = await executeWorkflow( + doc, "07ebf237-ff27-4eee-b6a2-425df4a5cca6" ); - expect(mockedExecution).to.not.be.null; expect(mockedExecution.apiRequest).to.not.be.null; expect(mockedExecution.execution.batchName).to.be.null; diff --git a/tests/v2/clientV2.integration.ts b/tests/v2/client.integration.ts similarity index 68% rename from tests/v2/clientV2.integration.ts rename to tests/v2/client.integration.ts index 4583d90d8..e98d7f3c6 100644 --- a/tests/v2/clientV2.integration.ts +++ b/tests/v2/client.integration.ts @@ -2,18 +2,21 @@ import { expect } from "chai"; import path from "node:path"; import { - ClientV2, - InferenceParameters, + Client, PathInput, UrlInput, Base64Input, - InferenceResponse, -} from "../../src"; -import { Inference } from "../../src/parsing/v2"; -import { SimpleField } from "../../src/parsing/v2/field"; -import { MindeeHttpErrorV2 } from "../../src/errors/mindeeError"; +} from "@/index.js"; +import { + ExtractionInference, + ExtractionParameters, + ExtractionResponse, +} from "@/v2/product/extraction/index.js"; +import { SimpleField } from "@/v2/parsing/inference/field/index.js"; +import { MindeeHttpErrorV2 } from "@/v2/http/index.js"; import * as fs from "node:fs"; -import { RESOURCE_PATH, V2_PRODUCT_PATH, V2_RESOURCE_PATH } from "../index"; +import { RESOURCE_PATH, V2_PRODUCT_PATH } from "../index.js"; +import { Extraction } from "@/v2/product/index.js"; function check422(err: unknown) { expect(err).to.be.instanceOf(MindeeHttpErrorV2); @@ -25,7 +28,7 @@ function check422(err: unknown) { expect(errObj.errors).to.be.instanceOf(Array); } -function checkEmptyActiveOptions(inference: Inference) { +function checkEmptyActiveOptions(inference: ExtractionInference) { expect(inference.activeOptions).to.not.be.null; expect(inference.activeOptions?.rag).to.be.false; expect(inference.activeOptions?.rawText).to.be.false; @@ -35,7 +38,7 @@ function checkEmptyActiveOptions(inference: Inference) { } describe("MindeeV2 – Client Integration Tests", () => { - let client: ClientV2; + let client: Client; let modelId: string; const emptyPdfPath = path.join( @@ -46,6 +49,7 @@ describe("MindeeV2 – Client Integration Tests", () => { ); const sampleImagePath = path.join( V2_PRODUCT_PATH, + "extraction", "financial_document", "default_sample.jpg", ); @@ -55,23 +59,23 @@ describe("MindeeV2 – Client Integration Tests", () => { "receipt.txt", ); const dataSchemaReplacePath = path.join( - V2_RESOURCE_PATH, "inference/data_schema_replace_param.json" + V2_PRODUCT_PATH, "extraction/data_schema_replace_param.json" ); let dataSchemaReplace: string; beforeEach(async () => { const apiKey = process.env["MINDEE_V2_API_KEY"] ?? ""; - modelId = process.env["MINDEE_V2_FINDOC_MODEL_ID"] ?? ""; + modelId = process.env["MINDEE_V2_SE_TESTS_FINDOC_MODEL_ID"] ?? ""; - client = new ClientV2({ apiKey }); + client = new Client({ apiKey: apiKey, debug: true }); }); before(async () => { dataSchemaReplace = fs.readFileSync(dataSchemaReplacePath).toString(); }); - it("Empty, multi-page PDF – PathInput - enqueueAndGetInference must succeed", async () => { + it("Empty, multi-page PDF – PathInput - enqueueAndGetResult must succeed", async () => { const source = new PathInput({ inputPath: emptyPdfPath }); - const params: InferenceParameters = { + const params = { modelId, rag: false, rawText: false, @@ -80,12 +84,12 @@ describe("MindeeV2 – Client Integration Tests", () => { webhookIds: [], alias: "ts_integration_empty_multiple" }; - - const response = await client.enqueueAndGetInference(source, params); - + const response = await client.enqueueAndGetResult( + Extraction, source, params + ); expect(response).to.exist; - expect(response.inference).to.be.instanceOf(Inference); - const inference: Inference = response.inference; + expect(response.inference).to.be.instanceOf(ExtractionInference); + const inference: ExtractionInference = response.inference; expect(inference.file?.name).to.equal("multipage_cut-2.pdf"); expect(inference.file.pageCount).to.equal(2); @@ -96,9 +100,9 @@ describe("MindeeV2 – Client Integration Tests", () => { checkEmptyActiveOptions(inference); }).timeout(60000); - it("Filled, single-page image – PathInput - enqueueAndGetInference must succeed", async () => { + it("Filled, single-page image – PathInput - enqueueAndGetResult must succeed", async () => { const source = new PathInput({ inputPath: sampleImagePath }); - const params: InferenceParameters = { + const params = { modelId, rag: false, rawText: true, @@ -109,10 +113,11 @@ describe("MindeeV2 – Client Integration Tests", () => { alias: "ts_integration_binary_filled_single" }; - const response = await client.enqueueAndGetInference(source, params); - - expect(response.inference).to.be.instanceOf(Inference); - const inference: Inference = response.inference; + const response = await client.enqueueAndGetResult( + Extraction, source, params + ); + expect(response.inference).to.be.instanceOf(ExtractionInference); + const inference: ExtractionInference = response.inference; expect(inference.file?.name).to.equal("default_sample.jpg"); expect(inference.model?.id).to.equal(modelId); @@ -134,10 +139,10 @@ describe("MindeeV2 – Client Integration Tests", () => { expect(inference.result.rawText?.pages).to.have.lengthOf(1); }).timeout(120000); - it("Filled, single-page image – Base64Input - enqueueAndGetInference must succeed", async () => { + it("Filled, single-page image – Base64Input - enqueueAndGetResult must succeed", async () => { const data = fs.readFileSync(sampleBase64Path, "utf8"); const source = new Base64Input({ inputString: data, filename: "receipt.jpg" }); - const params: InferenceParameters = { + const params = { modelId, rag: false, rawText: false, @@ -147,10 +152,11 @@ describe("MindeeV2 – Client Integration Tests", () => { alias: "ts_integration_base64_filled_single" }; - const response = await client.enqueueAndGetInference(source, params); - - expect(response.inference).to.be.instanceOf(Inference); - const inference: Inference = response.inference; + const response = await client.enqueueAndGetResult( + Extraction, source, params + ); + expect(response.inference).to.be.instanceOf(ExtractionInference); + const inference: ExtractionInference = response.inference; expect(inference.file?.name).to.equal("receipt.jpg"); expect(inference.model?.id).to.equal(modelId); @@ -166,10 +172,10 @@ describe("MindeeV2 – Client Integration Tests", () => { it("Invalid model ID – enqueue must raise 422", async () => { const source = new PathInput({ inputPath: emptyPdfPath }); - const badParams: InferenceParameters = { modelId: "00000000-0000-0000-0000-000000000000" }; + const badParams = { modelId: "00000000-0000-0000-0000-000000000000" }; try { - await client.enqueueInference(source, badParams); + await client.enqueue(Extraction, source, badParams); expect.fail("Expected the call to throw, but it succeeded."); } catch (err) { check422(err); @@ -178,17 +184,20 @@ describe("MindeeV2 – Client Integration Tests", () => { it("Invalid job ID – getInference must raise 422", async () => { try { - await client.getInference("00000000-0000-0000-0000-000000000000"); + await client.getResult( + Extraction, + "00000000-0000-0000-0000-000000000000" + ); expect.fail("Expected the call to throw, but it succeeded."); } catch (err) { check422(err); } }).timeout(60000); - it("HTTPS URL – enqueue & get inference must succeed", async () => { + it("HTTPS URL – enqueueAndGetResult must succeed", async () => { const url = process.env.MINDEE_V2_SE_TESTS_BLANK_PDF_URL ?? "error-no-url-found"; const source = new UrlInput({ url }); - const params: InferenceParameters = { + const params = new ExtractionParameters({ modelId, rag: false, rawText: false, @@ -196,17 +205,17 @@ describe("MindeeV2 – Client Integration Tests", () => { confidence: false, webhookIds: [], alias: "ts_integration_url_source" - }; - - const response: InferenceResponse = await client.enqueueAndGetInference(source, params); - + }); + const response: ExtractionResponse = await client.enqueueAndGetResult( + Extraction, source, params + ); expect(response).to.exist; - expect(response.inference).to.be.instanceOf(Inference); + expect(response.inference).to.be.instanceOf(ExtractionInference); }).timeout(60000); it("Data Schema Override - Overrides the data schema successfully", async () => { const source = new PathInput({ inputPath: emptyPdfPath }); - const params: InferenceParameters = { + const params = new ExtractionParameters({ modelId, rag: false, rawText: false, @@ -215,11 +224,12 @@ describe("MindeeV2 – Client Integration Tests", () => { webhookIds: [], dataSchema: dataSchemaReplace, alias: "ts_integration_data_schema_replace" - }; - const response = await client.enqueueAndGetInference(source, params); - + }); + const response = await client.enqueueAndGetResult( + Extraction, source, params + ); expect(response).to.exist; - expect(response.inference).to.be.instanceOf(Inference); + expect(response.inference).to.be.instanceOf(ExtractionInference); expect(response.inference.result.fields.get("test_replace")).to.exist; expect((response.inference.result.fields.get("test_replace") as SimpleField).value).to.be.equals("a test value"); diff --git a/tests/v2/client.spec.ts b/tests/v2/client.spec.ts new file mode 100644 index 000000000..9db91a6a6 --- /dev/null +++ b/tests/v2/client.spec.ts @@ -0,0 +1,157 @@ +import { expect } from "chai"; +import { MockAgent, setGlobalDispatcher } from "undici"; +import path from "node:path"; +import { Client, PathInput } from "@/index.js"; +import { MindeeHttpErrorV2 } from "@/v2/http/index.js"; +import assert from "node:assert/strict"; +import { RESOURCE_PATH, V2_RESOURCE_PATH } from "../index.js"; +import fs from "node:fs/promises"; +import { Crop, Extraction } from "@/v2/product/index.js"; + +const mockAgent = new MockAgent(); +setGlobalDispatcher(mockAgent); +const mockPool = mockAgent.get("https://v2-client-host"); + +/** + * Injects a minimal set of environment variables so that the SDK behaves + * as if it had been configured by the user. + */ +function dummyEnvvars(): void { + process.env.MINDEE_V2_API_KEY = "dummy"; + process.env.MINDEE_V2_API_HOST = "v2-client-host"; +} + +async function setInterceptor(statusCode: number, filePath: string): Promise { + const fileObj = await fs.readFile(filePath, { encoding: "utf-8" }); + mockPool + .intercept({ path: /.*/, method: "GET" }) + .reply(statusCode, fileObj); +} + +async function setAllInterceptors(): Promise { + mockPool + .intercept({ path: /.*/, method: "POST" }) + .reply( + 400, + { status: 400, detail: "forced failure from test", title: "Bad Request", code: "400-001" } + ); + await setInterceptor( + 200, + path.join(V2_RESOURCE_PATH, "job/ok_processing.json") + ); +} + +describe("MindeeV2 - ClientV2", () => { + const fileTypesDir = path.join(RESOURCE_PATH, "file_types"); + + before(() => { + dummyEnvvars(); + }); + + after(() => { + delete process.env.MINDEE_V2_API_KEY; + delete process.env.MINDEE_V2_API_HOST; + }); + + describe("Client configured via environment variables", () => { + let client: Client; + + beforeEach(async () => { + await setAllInterceptors(); + client = new Client({ apiKey: "dummy", debug: true, dispatcher: mockAgent }); + }); + + it("inherits base URL, token & headers from the env / options", () => { + const api = (client as any).mindeeApi; + expect(api.settings.apiKey).to.equal("dummy"); + expect(api.settings.hostname).to.equal("v2-client-host"); + expect(api.settings.baseHeaders.Authorization).to.equal("dummy"); + expect(api.settings.baseHeaders["User-Agent"]).to.match(/mindee/i); + }); + + it("enqueue(path) on extraction rejects with MindeeHttpErrorV2 on 400", async () => { + const filePath = path.join(fileTypesDir, "receipt.jpg"); + const inputDoc = new PathInput({ inputPath: filePath }); + + await assert.rejects( + client.enqueue(Extraction, inputDoc, { modelId: "dummy-model", textContext: "hello" }), + (error: any) => { + assert.strictEqual(error instanceof MindeeHttpErrorV2, true); + assert.strictEqual(error.status, 400); + return true; + } + ); + }); + + it("enqueue(path) on crop rejects with MindeeHttpErrorV2 on 400", async () => { + const filePath = path.join(fileTypesDir, "receipt.jpg"); + const inputDoc = new PathInput({ inputPath: filePath }); + + await assert.rejects( + client.enqueue(Crop, inputDoc, { modelId: "dummy-model" }), + (error: any) => { + assert.strictEqual(error instanceof MindeeHttpErrorV2, true); + assert.strictEqual(error.status, 400); + return true; + } + ); + }); + + it("enqueueAndGetResult(path) on extraction rejects with MindeeHttpErrorV2 on 400", async () => { + const filePath = path.join(fileTypesDir, "receipt.jpg"); + const inputDoc = new PathInput({ inputPath: filePath }); + await assert.rejects( + client.enqueueAndGetResult( + Extraction, + inputDoc, + { modelId: "dummy-model", rag: false } + ), + (error: any) => { + assert.strictEqual(error instanceof MindeeHttpErrorV2, true); + assert.strictEqual(error.status, 400); + return true; + } + ); + }); + + it("bubble-up HTTP errors with details", async () => { + const input = new PathInput({ + inputPath: path.join( + V2_RESOURCE_PATH, + "products", + "extraction", + "financial_document", + "default_sample.jpg" + ), + }); + await assert.rejects( + client.enqueue(Extraction, input, { modelId: "dummy-model" }), + (error: any) => { + expect(error).to.be.instanceOf(MindeeHttpErrorV2); + expect(error.status).to.equal(400); + expect(error.detail).to.equal("forced failure from test"); + return true; + } + ); + }); + + it("getJob(jobId) returns a fully-formed JobResponse", async () => { + const resp = await client.getJob( + "12345678-1234-1234-1234-123456789ABC" + ); + const job = resp.job; + expect(job.id).to.equal("12345678-1234-1234-1234-123456789ABC"); + expect(job.modelId).to.equal("87654321-4321-4321-4321-CBA987654321"); + expect(job.filename).to.equal("default_sample.jpg"); + expect(job.alias).to.equal("dummy-alias.jpg"); + expect(job.createdAt?.toISOString()).to.equal("2025-07-03T14:27:58.974Z"); + expect(job.status).to.equal("Processing"); + expect(job.pollingUrl).to.equal( + "https://api-v2.mindee.net/v2/jobs/12345678-1234-1234-1234-123456789ABC" + ); + expect(job.resultUrl).to.be.null; + expect(job.webhooks).to.have.length(0); + expect(job.error).to.be.undefined; + }); + }); +}); diff --git a/tests/v2/clientV2.spec.ts b/tests/v2/clientV2.spec.ts deleted file mode 100644 index 552784bcc..000000000 --- a/tests/v2/clientV2.spec.ts +++ /dev/null @@ -1,151 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention,camelcase */ -import { expect } from "chai"; -import nock from "nock"; -import path from "node:path"; -import { ClientV2, LocalResponse, PathInput, InferenceResponse } from "../../src"; -import { MindeeHttpErrorV2 } from "../../src/errors/mindeeError"; -import assert from "node:assert/strict"; -import { RESOURCE_PATH, V2_RESOURCE_PATH } from "../index"; - -/** - * Injects a minimal set of environment variables so that the SDK behaves - * as if it had been configured by the user. - */ -function dummyEnvvars(): void { - process.env.MINDEE_V2_API_KEY = "dummy"; - process.env.MINDEE_V2_API_HOST = "dummy-url"; -} - -function setNockInterceptors(): void { - nock("https://dummy-url") - .persist() - .post(/.*/) - .reply(400, { status: 400, detail: "forced failure from test" }); - - nock("https://dummy-url") - .persist() - .get(/.*/) - .reply(200, { - job: { - id: "12345678-1234-1234-1234-123456789ABC", - model_id: "87654321-4321-4321-4321-CBA987654321", - filename: "default_sample.jpg", - alias: "dummy-alias.jpg", - created_at: "2025-07-03T14:27:58.974451", - status: "Processing", - polling_url: - "https://api-v2.mindee.net/v2/jobs/12345678-1234-1234-1234-123456789ABC", - result_url: null, - webhooks: [], - error: null, - }, - }); -} - -const fileTypesDir = path.join(RESOURCE_PATH, "file_types"); - -describe("MindeeV2 - ClientV2", () => { - before(() => { - setNockInterceptors(); - dummyEnvvars(); - }); - - after(() => { - nock.cleanAll(); - delete process.env.MINDEE_V2_API_KEY; - delete process.env.MINDEE_V2_API_HOST; - }); - - describe("Client configured via environment variables", () => { - let client: ClientV2; - - beforeEach(() => { - client = new ClientV2({ apiKey: "dummy" }); - }); - - it("inherits base URL, token & headers from the env / options", () => { - const api = (client as any).mindeeApi; - expect(api.settings.apiKey).to.equal("dummy"); - expect(api.settings.hostname).to.equal("dummy-url"); - expect(api.settings.baseHeaders.Authorization).to.equal("dummy"); - expect(api.settings.baseHeaders["User-Agent"]).to.match(/mindee/i); - }); - - it("enqueue(path) rejects with MindeeHttpErrorV2 on 4xx", async () => { - const filePath = path.join(fileTypesDir, "receipt.jpg"); - const inputDoc = new PathInput({ inputPath: filePath }); - - await assert.rejects( - client.enqueueInference(inputDoc, { modelId: "dummy-model", textContext: "hello" }), - MindeeHttpErrorV2 - ); - }); - - it("enqueueAndParse(path) rejects with MindeeHttpErrorV2 on 4xx", async () => { - const filePath = path.join(fileTypesDir, "receipt.jpg"); - const inputDoc = new PathInput({ inputPath: filePath }); - await assert.rejects( - client.enqueueAndGetInference( - inputDoc, - { modelId: "dummy-model", rag: false } - ), - MindeeHttpErrorV2 - ); - }); - - it("loading an inference works on stored JSON fixtures", async () => { - const jsonPath = path.join( - V2_RESOURCE_PATH, - "products", - "financial_document", - "complete.json" - ); - - const localResponse = new LocalResponse(jsonPath); - const response: InferenceResponse = await localResponse.deserializeResponse(InferenceResponse); - - expect(response.inference.model.id).to.equal( - "12345678-1234-1234-1234-123456789abc" - ); - }); - - it("bubble-up HTTP errors with details", async () => { - const input = new PathInput({ - inputPath: path.join( - V2_RESOURCE_PATH, - "products", - "financial_document", - "default_sample.jpg" - ), - }); - try { - await client.enqueueInference(input, { modelId: "dummy-model" }); - expect.fail("enqueue() should have thrown"); - } catch (err) { - expect(err).to.be.instanceOf(MindeeHttpErrorV2); - const httpErr = err as MindeeHttpErrorV2; - expect(httpErr.status).to.equal(400); - expect(httpErr.detail).to.equal("forced failure from test"); - } - }); - - it("parseQueued(jobId) returns a fully-formed JobResponse", async () => { - const resp = await client.getJob( - "12345678-1234-1234-1234-123456789ABC" - ); - const job = resp.job; - expect(job.id).to.equal("12345678-1234-1234-1234-123456789ABC"); - expect(job.modelId).to.equal("87654321-4321-4321-4321-CBA987654321"); - expect(job.filename).to.equal("default_sample.jpg"); - expect(job.alias).to.equal("dummy-alias.jpg"); - expect(job.createdAt?.toISOString()).to.equal("2025-07-03T14:27:58.974Z"); - expect(job.status).to.equal("Processing"); - expect(job.pollingUrl).to.equal( - "https://api-v2.mindee.net/v2/jobs/12345678-1234-1234-1234-123456789ABC" - ); - expect(job.resultUrl).to.be.null; - expect(job.webhooks).to.have.length(0); - expect(job.error).to.be.undefined; - }); - }); -}); diff --git a/tests/v2/input/inferenceParameter.spec.ts b/tests/v2/input/inferenceParameter.spec.ts deleted file mode 100644 index 51a48171f..000000000 --- a/tests/v2/input/inferenceParameter.spec.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { StringDict } from "../../../src/parsing/common"; -import path from "path"; -import { V2_RESOURCE_PATH } from "../../index"; -import { InferenceParameters } from "../../../src"; -import { expect } from "chai"; -import { DataSchema } from "../../../src/input"; -import { promises as fs } from "fs"; - -let expectedDataSchemaDict: StringDict; -let expectedDataSchemaString: string; -let expectedDataSchemaObject: DataSchema; - -describe("MindeeV2 - Inference Parameter", () => { - before(async () => { - const fileContents = await fs.readFile(path.join(V2_RESOURCE_PATH, "inference/data_schema_replace_param.json")); - expectedDataSchemaDict = JSON.parse(fileContents.toString()); - expectedDataSchemaString = JSON.stringify(expectedDataSchemaDict); - expectedDataSchemaObject = new DataSchema(expectedDataSchemaDict); - }); - - describe("dataSchema", () => { - it("shouldn't replace when unset", async () => { - const params: InferenceParameters = { - modelId: "test-model-id", - }; - - expect(params.dataSchema).to.be.undefined; - }); - - it("should equate no matter the type", async () => { - const paramsDict: InferenceParameters = { - modelId: "test-model-id", - dataSchema: expectedDataSchemaDict, - }; - const paramsString: InferenceParameters = { - modelId: "test-model-id", - dataSchema: expectedDataSchemaString, - }; - const paramsObject: InferenceParameters = { - modelId: "test-model-id", - dataSchema: expectedDataSchemaObject, - }; - - expect(JSON.stringify(paramsDict.dataSchema)).to.eq(expectedDataSchemaString); - expect(paramsObject.dataSchema?.toString()).to.eq(expectedDataSchemaString); - expect(paramsString.dataSchema?.toString()).to.eq(expectedDataSchemaString); - }); - }); -}); diff --git a/tests/v2/parsing/job.spec.ts b/tests/v2/parsing/job.spec.ts index 675c0e921..81d4f41ac 100644 --- a/tests/v2/parsing/job.spec.ts +++ b/tests/v2/parsing/job.spec.ts @@ -1,8 +1,11 @@ -import { JobResponse, LocalResponse } from "../../../src"; +import { + JobResponse, + LocalResponse, + ErrorResponse, +} from "@/v2/index.js"; import path from "node:path"; -import { V2_RESOURCE_PATH } from "../../index"; +import { V2_RESOURCE_PATH } from "../../index.js"; import { expect } from "chai"; -import { ErrorResponse } from "../../../src/parsing/v2"; const jobPath = path.join(V2_RESOURCE_PATH, "job"); diff --git a/tests/v2/input/localResponse.spec.ts b/tests/v2/parsing/localResponse.spec.ts similarity index 62% rename from tests/v2/input/localResponse.spec.ts rename to tests/v2/parsing/localResponse.spec.ts index 89eae6d14..fd6184686 100644 --- a/tests/v2/input/localResponse.spec.ts +++ b/tests/v2/parsing/localResponse.spec.ts @@ -1,14 +1,15 @@ import * as fs from "node:fs/promises"; import { expect } from "chai"; -import { InferenceResponse, LocalResponse } from "../../../src"; +import { LocalResponse } from "@/v2/index.js"; import path from "path"; -import { V2_RESOURCE_PATH } from "../../index"; +import { V2_PRODUCT_PATH } from "../../index.js"; import { Buffer } from "node:buffer"; +import { ExtractionResponse } from "@/v2/product/index.js"; -const signature: string = "1df388c992d87897fe61dfc56c444c58fc3c7369c31e2b5fd20d867695e93e85"; +const signature: string = "e51bdf80f1a08ed44ee161100fc30a25cb35b4ede671b0a575dc9064a3f5dbf1"; const dummySecretKey: string = "ogNjY44MhvKPGTtVsI8zG82JqWQa68woYQH"; -const filePath: string = path.join(V2_RESOURCE_PATH, "inference/standard_field_types.json"); +const filePath: string = path.join(V2_PRODUCT_PATH, "extraction/standard_field_types.json"); async function assertLocalResponse(localResponse: LocalResponse) { await localResponse.init(); @@ -16,8 +17,8 @@ async function assertLocalResponse(localResponse: LocalResponse) { expect(localResponse.isValidHmacSignature(dummySecretKey, "invalid signature")).to.be.false; expect(localResponse.getHmacSignature(dummySecretKey)).to.eq(signature); expect(localResponse.isValidHmacSignature(dummySecretKey, signature)).to.be.true; - const inferenceResponse = await localResponse.deserializeResponse(InferenceResponse); - expect(inferenceResponse).to.be.an.instanceof(InferenceResponse); + const inferenceResponse = await localResponse.deserializeResponse(ExtractionResponse); + expect(inferenceResponse).to.be.an.instanceof(ExtractionResponse); expect(inferenceResponse.inference).to.not.be.null; } @@ -40,9 +41,23 @@ describe("MindeeV2 - Load Local Response", () => { it("should deserialize a prediction.", async () => { const fileObj = await fs.readFile(filePath, { encoding: "utf-8" }); const localResponse = new LocalResponse(fileObj); - const response = await localResponse.deserializeResponse(InferenceResponse); - expect(response).to.be.an.instanceof(InferenceResponse); + const response = await localResponse.deserializeResponse(ExtractionResponse); + expect(response).to.be.an.instanceof(ExtractionResponse); expect(JSON.stringify(response.getRawHttp())).to.eq(JSON.stringify(JSON.parse(fileObj))); }); + + it("loading an inference works on catalog model", async () => { + const jsonPath = path.join( + V2_PRODUCT_PATH, + "extraction", + "financial_document", + "complete.json" + ); + const localResponse = new LocalResponse(jsonPath); + const response: ExtractionResponse = await localResponse.deserializeResponse(ExtractionResponse); + expect(response.inference.model.id).to.equal( + "12345678-1234-1234-1234-123456789abc" + ); + }); }); diff --git a/tests/v2/product/classification.spec.ts b/tests/v2/product/classification.spec.ts new file mode 100644 index 000000000..a8c5e64be --- /dev/null +++ b/tests/v2/product/classification.spec.ts @@ -0,0 +1,18 @@ +import { expect } from "chai"; +import path from "node:path"; +import { ClassificationResponse } from "@/v2/product/index.js"; + +import { V2_PRODUCT_PATH } from "../../index.js"; +import { loadV2Response } from "./utils.js"; + + +describe("MindeeV2 - Classification Response", async () => { + it("should load a single result", async () => { + const response = await loadV2Response( + ClassificationResponse, + path.join(V2_PRODUCT_PATH, "classification", "classification_single.json") + ); + const classification = response.inference.result.classification; + expect(classification.documentType).to.equal("invoice"); + }); +}); diff --git a/tests/v2/product/crop.spec.ts b/tests/v2/product/crop.spec.ts new file mode 100644 index 000000000..b0e7ca8ab --- /dev/null +++ b/tests/v2/product/crop.spec.ts @@ -0,0 +1,92 @@ +import { expect } from "chai"; +import path from "node:path"; +import { Polygon } from "@/geometry/index.js"; +import { crop } from "@/v2/product/index.js"; + +import { V2_PRODUCT_PATH } from "../../index.js"; +import { loadV2Response } from "./utils.js"; + +describe("MindeeV2 - Crop Response", async () => { + it("should load a single result", async () => { + const response = await loadV2Response( + crop.CropResponse, + path.join(V2_PRODUCT_PATH, "crop", "crop_single.json") + ); + // Validate inference metadata + expect(response.inference.id).to.equal("12345678-1234-1234-1234-123456789abc"); + expect(response.inference.model.id).to.equal("test-model-id"); + + // Validate file metadata + expect(response.inference.file.name).to.equal("sample.jpeg"); + expect(response.inference.file.pageCount).to.equal(1); + expect(response.inference.file.mimeType).to.equal("image/jpeg"); + + // Validate crops + const crops: crop.CropItem[] = response.inference.result.crops; + expect(crops).to.be.an("array").that.has.lengthOf(1); + + const firstCrop = crops[0]; + expect(firstCrop.objectType).to.equal("invoice"); + expect(firstCrop.location.page).to.equal(0); + + const polygon: Polygon = firstCrop.location.polygon!; + expect(polygon.length).to.equal(4); + expect(polygon.length).to.equal(4); + expect(polygon[0][0]).to.equal(0.15); + expect(polygon[0][1]).to.equal(0.254); + expect(polygon[1][0]).to.equal(0.85); + expect(polygon[1][1]).to.equal(0.254); + expect(polygon[2][0]).to.equal(0.85); + expect(polygon[2][1]).to.equal(0.947); + expect(polygon[3][0]).to.equal(0.15); + expect(polygon[3][1]).to.equal(0.947); + }); + + it("should load multiple results", async () => { + const response = await loadV2Response( + crop.CropResponse, + path.join(V2_PRODUCT_PATH, "crop", "crop_multiple.json") + ); + // Validate inference metadata + expect(response.inference.id).to.equal("12345678-1234-1234-1234-123456789abc"); + expect(response.inference.model.id).to.equal("test-model-id"); + + // Validate file metadata + expect(response.inference.file.name).to.equal("default_sample.jpg"); + expect(response.inference.file.pageCount).to.equal(1); + expect(response.inference.file.mimeType).to.equal("image/jpeg"); + + const crops: crop.CropItem[] = response.inference.result.crops; + expect(crops).to.be.an("array").that.has.lengthOf(2); + + // Validate first crop item + const firstCrop: crop.CropItem = crops[0]; + expect(firstCrop.objectType).to.equal("invoice"); + expect(firstCrop.location.page).to.equal(0); + const firstPolygon: Polygon = firstCrop.location.polygon!; + expect(firstPolygon.length).to.equal(4); + expect(firstPolygon[0][0]).to.equal(0.214); + expect(firstPolygon[0][1]).to.equal(0.079); + expect(firstPolygon[1][0]).to.equal(0.476); + expect(firstPolygon[1][1]).to.equal(0.079); + expect(firstPolygon[2][0]).to.equal(0.476); + expect(firstPolygon[2][1]).to.equal(0.979); + expect(firstPolygon[3][0]).to.equal(0.214); + expect(firstPolygon[3][1]).to.equal(0.979); + + // Validate second crop item + const secondCrop: crop.CropItem = crops[1]; + expect(secondCrop.objectType).to.equal("invoice"); + expect(secondCrop.location.page).to.equal(0); + const secondPolygon: Polygon = secondCrop.location.polygon!; + expect(secondPolygon.length).to.equal(4); + expect(secondPolygon[0][0]).to.equal(0.547); + expect(secondPolygon[0][1]).to.equal(0.15); + expect(secondPolygon[1][0]).to.equal(0.862); + expect(secondPolygon[1][1]).to.equal(0.15); + expect(secondPolygon[2][0]).to.equal(0.862); + expect(secondPolygon[2][1]).to.equal(0.97); + expect(secondPolygon[3][0]).to.equal(0.547); + expect(secondPolygon[3][1]).to.equal(0.97); + }); +}); diff --git a/tests/v2/parsing/inference.spec.ts b/tests/v2/product/extraction.spec.ts similarity index 83% rename from tests/v2/parsing/inference.spec.ts rename to tests/v2/product/extraction.spec.ts index 080610cf0..977c966bd 100644 --- a/tests/v2/parsing/inference.spec.ts +++ b/tests/v2/product/extraction.spec.ts @@ -1,28 +1,31 @@ -import { expect } from "chai"; import path from "node:path"; -import { LocalResponse, InferenceResponse, RawText, RagMetadata } from "../../../src"; -import { FieldConfidence, ListField, ObjectField, SimpleField } from "../../../src/parsing/v2/field"; import { promises as fs } from "node:fs"; -import { Polygon } from "../../../src/geometry"; -import { V2_RESOURCE_PATH } from "../../index"; - -const findocPath = path.join(V2_RESOURCE_PATH, "products", "financial_document"); -const inferencePath = path.join(V2_RESOURCE_PATH, "inference"); -const deepNestedFieldPath = path.join(inferencePath, "deep_nested_fields.json"); -const standardFieldPath = path.join(inferencePath, "standard_field_types.json"); -const standardFieldRstPath = path.join(inferencePath, "standard_field_types.rst"); +import { expect } from "chai"; +import { Polygon } from "@/geometry/index.js"; +import { + FieldConfidence, + ListField, + ObjectField, + SimpleField, +} from "@/v2/parsing/inference/field/index.js"; +import { field } from "@/v2/parsing/index.js"; +import { ExtractionResponse } from "@/v2/product/index.js"; + +import { V2_PRODUCT_PATH } from "../../index.js"; +import { loadV2Response } from "./utils.js"; + +const findocPath = path.join(V2_PRODUCT_PATH, "extraction", "financial_document"); +const extractionPath = path.join(V2_PRODUCT_PATH, "extraction"); +const deepNestedFieldPath = path.join(extractionPath, "deep_nested_fields.json"); +const standardFieldPath = path.join(extractionPath, "standard_field_types.json"); +const standardFieldRstPath = path.join(extractionPath, "standard_field_types.rst"); const locationFieldPath = path.join(findocPath, "complete_with_coordinates.json"); -async function loadV2Inference(resourcePath: string): Promise { - const localResponse = new LocalResponse(resourcePath); - await localResponse.init(); - return localResponse.deserializeResponse(InferenceResponse); -} - -describe("MindeeV2 - Inference Response", async () => { +describe("MindeeV2 - Extraction Response", async () => { describe("Financial Document", async () => { it("should load a blank inference with valid properties", async () => { - const response = await loadV2Inference( + const response = await loadV2Response( + ExtractionResponse, path.join(findocPath, "blank.json") ); const fields = response.inference.result.fields; @@ -54,7 +57,8 @@ describe("MindeeV2 - Inference Response", async () => { }); it("should load a complete inference with valid properties", async () => { - const response = await loadV2Inference( + const response = await loadV2Response( + ExtractionResponse, path.join(findocPath, "complete.json") ); const inference = response.inference; @@ -119,12 +123,20 @@ describe("MindeeV2 - Inference Response", async () => { describe("Deeply Nested", async () => { it("should load a deep nested object", async () => { - const response = await loadV2Inference(deepNestedFieldPath); + const response = await loadV2Response( + ExtractionResponse, deepNestedFieldPath + ); const fields = response.inference.result.fields; expect(fields.get("field_simple")).to.be.an.instanceof(SimpleField); expect(fields.get("field_object")).to.be.an.instanceof(ObjectField); const fieldObject = fields.getObjectField("field_object"); + expect(fieldObject.getSimpleField("sub_object_simple")).to.be.an.instanceof(SimpleField); + expect(fieldObject.getListField("sub_object_list")).to.be.an.instanceof(ListField); + expect(fieldObject.getObjectField("sub_object_object")).to.be.an.instanceof(ObjectField); + expect(fieldObject.simpleFields.size).to.eq(1); + expect(fieldObject.listFields.size).to.eq(1); + expect(fieldObject.objectFields.size).to.eq(1); const lvl1 = fieldObject.fields; expect(lvl1.get("sub_object_list")).to.be.an.instanceof(ListField); @@ -155,7 +167,9 @@ describe("MindeeV2 - Inference Response", async () => { describe("Standard Field Types", async () => { it("should recognize simple fields", async () => { - const response = await loadV2Inference(standardFieldPath); + const response = await loadV2Response( + ExtractionResponse, standardFieldPath + ); const fields = response.inference.result.fields; expect(fields.get("field_simple_string")).to.be.instanceOf(SimpleField); @@ -201,7 +215,9 @@ describe("MindeeV2 - Inference Response", async () => { }); it("should recognize simple list fields", async () => { - const response = await loadV2Inference(standardFieldPath); + const response = await loadV2Response( + ExtractionResponse, standardFieldPath + ); const fields = response.inference.result.fields; expect(fields.get("field_simple_list")).to.be.instanceOf(ListField); @@ -216,7 +232,9 @@ describe("MindeeV2 - Inference Response", async () => { }); it("should recognize object fields", async () => { - const response = await loadV2Inference(standardFieldPath); + const response = await loadV2Response( + ExtractionResponse, standardFieldPath + ); const fields = response.inference.result.fields; expect(fields.get("field_object")).to.be.instanceOf(ObjectField); @@ -236,7 +254,9 @@ describe("MindeeV2 - Inference Response", async () => { }); it("should recognize object list fields", async () => { - const response = await loadV2Inference(standardFieldPath); + const response = await loadV2Response( + ExtractionResponse, standardFieldPath + ); const fields = response.inference.result.fields; expect(fields.get("field_object_list")).to.be.instanceOf(ListField); @@ -262,13 +282,13 @@ describe("MindeeV2 - Inference Response", async () => { describe("Raw Text", async () => { it("raw text should be exposed", async () => { - const response = await loadV2Inference( - path.join(inferencePath, "raw_texts.json") + const response = await loadV2Response( + ExtractionResponse, path.join(extractionPath, "raw_texts.json") ); expect(response.inference.result.rag).to.be.undefined; const rawText = response.inference.result.rawText; - expect(rawText).to.be.instanceOf(RawText); + expect(rawText).to.be.instanceOf(field.RawText); const pages = rawText?.pages; if (pages === undefined) throw new Error("pages is undefined"); @@ -281,27 +301,29 @@ describe("MindeeV2 - Inference Response", async () => { describe("RAG Metadata", async () => { it("RAG metadata when matched", async () => { - const response = await loadV2Inference( - path.join(inferencePath, "rag_matched.json") + const response = await loadV2Response( + ExtractionResponse, path.join(extractionPath, "rag_matched.json") ); const rag = response.inference.result.rag; - expect(rag).to.be.instanceOf(RagMetadata); + expect(rag).to.be.instanceOf(field.RagMetadata); expect(rag?.retrievedDocumentId).to.eq("12345abc-1234-1234-1234-123456789abc"); }); it("RAG metadata when not matched", async () => { - const response = await loadV2Inference( - path.join(inferencePath, "rag_not_matched.json") + const response = await loadV2Response( + ExtractionResponse, path.join(extractionPath, "rag_not_matched.json") ); const rag = response.inference.result.rag; - expect(rag).to.be.instanceOf(RagMetadata); + expect(rag).to.be.instanceOf(field.RagMetadata); expect(rag?.retrievedDocumentId).to.be.undefined; }); }); describe("RST Display", async () => { it("to be properly exposed", async () => { - const response = await loadV2Inference(standardFieldPath); + const response = await loadV2Response( + ExtractionResponse, standardFieldPath + ); const rstString = await fs.readFile(standardFieldRstPath, "utf8"); expect(response.inference).to.not.be.null; @@ -311,8 +333,9 @@ describe("MindeeV2 - Inference Response", async () => { describe("Field Locations and Confidence", async () => { it("to be properly exposed", async () => { - const response = await loadV2Inference(locationFieldPath); - + const response = await loadV2Response( + ExtractionResponse, locationFieldPath + ); expect(response.inference).to.not.be.null; const dateField = response.inference.result.fields.get("date") as SimpleField; diff --git a/tests/v2/product/extractionParameter.spec.ts b/tests/v2/product/extractionParameter.spec.ts new file mode 100644 index 000000000..3318210ff --- /dev/null +++ b/tests/v2/product/extractionParameter.spec.ts @@ -0,0 +1,66 @@ +import { StringDict } from "@/parsing/index.js"; +import path from "path"; +import { V2_PRODUCT_PATH } from "../../index.js"; +import { expect } from "chai"; +import { promises as fs } from "fs"; +import { extraction } from "@/v2/product/index.js"; + +let expectedDataSchemaDict: StringDict; +let expectedDataSchemaString: string; +let expectedDataSchemaObject: extraction.params.DataSchema; + +describe("MindeeV2 - Extraction Parameter", () => { + const modelIdValue = "test-model-id"; + + describe("Polling Options", () => { + it("should provide sensible defaults", () => { + + const paramsInstance = new extraction.ExtractionParameters({ + modelId: modelIdValue, + }); + expect(paramsInstance.modelId).to.equal(modelIdValue); + expect(paramsInstance.getValidatedPollingOptions()).to.deep.equal({ + delaySec: 1.5, + initialDelaySec: 2, + maxRetries: 80 + }); + }); + }); + + describe("Data Schema", () => { + before(async () => { + const fileContents = await fs.readFile( + path.join(V2_PRODUCT_PATH, "extraction/data_schema_replace_param.json") + ); + expectedDataSchemaDict = JSON.parse(fileContents.toString()); + expectedDataSchemaString = JSON.stringify(expectedDataSchemaDict); + expectedDataSchemaObject = new extraction.params.DataSchema(expectedDataSchemaDict); + }); + + it("shouldn't replace when unset", () => { + const params = new extraction.ExtractionParameters({ + modelId: modelIdValue, + }); + expect(params.dataSchema).to.be.undefined; + }); + + it("should equate no matter the type", () => { + const paramsDict = new extraction.ExtractionParameters({ + modelId: modelIdValue, + dataSchema: expectedDataSchemaDict, + }); + const paramsString = new extraction.ExtractionParameters({ + modelId: modelIdValue, + dataSchema: expectedDataSchemaString, + }); + const paramsObject = new extraction.ExtractionParameters({ + modelId: modelIdValue, + dataSchema: expectedDataSchemaObject, + }); + + expect(JSON.stringify(paramsDict.dataSchema)).to.eq(expectedDataSchemaString); + expect(paramsObject.dataSchema?.toString()).to.eq(expectedDataSchemaString); + expect(paramsString.dataSchema?.toString()).to.eq(expectedDataSchemaString); + }); + }); +}); diff --git a/tests/v2/product/ocr.spec.ts b/tests/v2/product/ocr.spec.ts new file mode 100644 index 000000000..4135024cc --- /dev/null +++ b/tests/v2/product/ocr.spec.ts @@ -0,0 +1,60 @@ +import { expect } from "chai"; +import path from "node:path"; +import { ocr } from "@/v2/product/index.js"; +import { Polygon } from "@/geometry/index.js"; + +import { V2_PRODUCT_PATH } from "../../index.js"; +import { loadV2Response } from "./utils.js"; + + +describe("MindeeV2 - OCR Response", async () => { + it("should load a single result", async () => { + const response = await loadV2Response( + ocr.OcrResponse, + path.join(V2_PRODUCT_PATH, "ocr", "ocr_single.json") + ); + // Validate inference metadata + expect(response.inference.id).to.equal("12345678-1234-1234-1234-123456789abc"); + expect(response.inference.model.id).to.equal("test-model-id"); + + // Validate file metadata + expect(response.inference.file.name).to.equal("default_sample.jpg"); + expect(response.inference.file.pageCount).to.equal(1); + expect(response.inference.file.mimeType).to.equal("image/jpeg"); + + // Validate pages + const pages: ocr.OcrPage[] = response.inference.result.pages; + expect(pages).to.be.an("array").that.has.lengthOf(1); + + // Validate first page + const firstPage: ocr.OcrPage = pages[0]; + expect(firstPage.words).to.be.an("array"); + + // Check first word + const firstWord: ocr.OcrWord = firstPage.words[0]; + expect(firstWord.content).to.equal("Shipper:"); + const firstPolygon: Polygon = firstWord.polygon; + expect(firstPolygon.length).to.equal(4); + + // Check another word (5th word: "INC.") + const fifthWord: ocr.OcrWord = firstPage.words[4]; + expect(fifthWord.content).to.equal("INC."); + const fifthPolygon: Polygon = fifthWord.polygon; + expect(fifthPolygon.length).to.equal(4); + }); + + it("should load multiple results", async () => { + const response = await loadV2Response( + ocr.OcrResponse, + path.join(V2_PRODUCT_PATH, "ocr", "ocr_multiple.json") + ); + const pages: ocr.OcrPage[] = response.inference.result.pages; + expect(pages).to.be.an("array").that.has.lengthOf(3); + + // Validate that each page has words and content + pages.forEach((page: ocr.OcrPage): void => { + expect(page.words).to.be.an("array"); + expect(page.content).to.be.a("string"); + }); + }); +}); diff --git a/tests/v2/product/split.spec.ts b/tests/v2/product/split.spec.ts new file mode 100644 index 000000000..270428654 --- /dev/null +++ b/tests/v2/product/split.spec.ts @@ -0,0 +1,46 @@ +import { expect } from "chai"; +import path from "node:path"; +import { split } from "@/v2/product/index.js"; + +import { V2_PRODUCT_PATH } from "../../index.js"; +import { loadV2Response } from "./utils.js"; + + +describe("MindeeV2 - Split Response", async () => { + it("should load a single result", async () => { + const response = await loadV2Response( + split.SplitResponse, + path.join(V2_PRODUCT_PATH, "split", "split_single.json") + ); + const splits: split.SplitRange[] = response.inference.result.splits; + expect(splits).to.be.an("array").that.has.lengthOf(1); + + const firstSplit: split.SplitRange = splits[0]; + expect(firstSplit.documentType).to.equal("receipt"); + + expect(firstSplit.pageRange).to.be.an("array").that.has.lengthOf(2); + expect(firstSplit.pageRange[0]).to.equal(0); + expect(firstSplit.pageRange[1]).to.equal(0); + }); + + it("should load multiple results", async () => { + const response = await loadV2Response( + split.SplitResponse, + path.join(V2_PRODUCT_PATH, "split", "split_multiple.json") + ); + const splits: split.SplitRange[] = response.inference.result.splits; + expect(splits).to.be.an("array").that.has.lengthOf(3); + + const firstSplit: split.SplitRange = splits[0]; + expect(firstSplit.documentType).to.equal("invoice"); + expect(firstSplit.pageRange).to.be.an("array").that.has.lengthOf(2); + expect(firstSplit.pageRange[0]).to.equal(0); + expect(firstSplit.pageRange[1]).to.equal(0); + + const secondSplit: split.SplitRange = splits[1]; + expect(secondSplit.documentType).to.equal("invoice"); + expect(secondSplit.pageRange).to.be.an("array").that.has.lengthOf(2); + expect(secondSplit.pageRange[0]).to.equal(1); + expect(secondSplit.pageRange[1]).to.equal(3); + }); +}); diff --git a/tests/v2/product/splitParameter.spec.ts b/tests/v2/product/splitParameter.spec.ts new file mode 100644 index 000000000..83aee3dde --- /dev/null +++ b/tests/v2/product/splitParameter.spec.ts @@ -0,0 +1,36 @@ +import { expect } from "chai"; +import { split } from "@/v2/product/index.js"; + +describe("MindeeV2 - Split Parameter", () => { + const modelIdValue = "test-model-id"; + + describe("Polling Options", () => { + it("should provide sensible defaults", () => { + + const paramsInstance = new split.SplitParameters({ + modelId: modelIdValue, + }); + expect(paramsInstance.modelId).to.equal(modelIdValue); + expect(paramsInstance.getValidatedPollingOptions()).to.deep.equal({ + delaySec: 1.5, + initialDelaySec: 2, + maxRetries: 80 + }); + }); + }); + + describe("Invalid Options", () => { + it("should not set invalid options", () => { + + const paramsInstance = new split.SplitParameters({ + modelId: modelIdValue, + // @ts-expect-error - rag is not a valid option + rag: true, + }); + expect(paramsInstance.modelId).to.equal(modelIdValue); + // @ts-expect-error - rag is not a valid option + expect(paramsInstance.rag).to.be.undefined; + }); + }); + +}); diff --git a/tests/v2/product/utils.ts b/tests/v2/product/utils.ts new file mode 100644 index 000000000..a789ba1f7 --- /dev/null +++ b/tests/v2/product/utils.ts @@ -0,0 +1,10 @@ +import { BaseResponse, ResponseConstructor } from "@/v2/parsing/index.js"; +import { LocalResponse } from "@/v2/index.js"; + +export async function loadV2Response( + responseClass: ResponseConstructor, + resourcePath: string +): Promise { + const localResponse = new LocalResponse(resourcePath); + return localResponse.deserializeResponse(responseClass); +} diff --git a/tsconfig.json b/tsconfig.json index 2b4a5b3c2..caabb86d5 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,21 +1,27 @@ { "compilerOptions": { - "lib": ["ES2020"], - "module": "commonjs", - "target": "ES2020", - - "strict": true, + "baseUrl": "./", + "declaration": true, "esModuleInterop": true, - "skipLibCheck": true, "forceConsistentCasingInFileNames": true, - "moduleResolution": "node", - - "resolveJsonModule": true, - "declaration": true, + "isolatedModules": true, + "lib": [ + "ES2020" + ], + "module": "Node18", + "moduleResolution": "Node16", + "newLine": "lf", "outDir": "dist", - "baseUrl": "./", + "paths": { + "@/*": [ + "src/*" + ] + }, + "resolveJsonModule": true, + "skipLibCheck": true, "sourceMap": false, - "newLine": "lf", + "strict": true, + "target": "ES2020", }, "include": ["./src/**/*", "./bin/**/*", "LICENSE"], "exclude": ["./node_modules/", "./dist/", "./tests/"]