Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
7418a9f
Replace WebDAV Browser webview with native TreeView
clavery Feb 20, 2026
7a82677
Add FileSystemProvider, New File command, and workspace folder mount
clavery Feb 20, 2026
d670a05
Mount individual folders instead of entire WebDAV root
clavery Feb 20, 2026
1f663c7
wip content
clavery Feb 21, 2026
ed309b1
Add content library explorer to VS Code extension
clavery Feb 21, 2026
94f508d
Add VS Code extension CI tests and VSIX release publishing
clavery Feb 21, 2026
a71b468
wip
clavery Feb 22, 2026
1378386
Centralize config resolution in VS Code extension
clavery Feb 22, 2026
15ff8ca
Add changeset for SDK plugin module
clavery Feb 22, 2026
e2a5a68
Replace WebDAV Browser webview with native TreeView
clavery Feb 20, 2026
fb868fa
Add FileSystemProvider, New File command, and workspace folder mount
clavery Feb 20, 2026
2608755
Mount individual folders instead of entire WebDAV root
clavery Feb 20, 2026
224efbd
Merge remote-tracking branch 'origin/feature/vsc-library-explorer' in…
clavery Feb 22, 2026
e326678
Merge remote-tracking branch 'origin/feature/generic-plugins-hooks' i…
clavery Feb 22, 2026
996d73c
fix config resolution for content tree
clavery Feb 22, 2026
5a50f7c
Merge remote-tracking branch 'origin/feature/vsc-ci' into feature/vsc…
clavery Feb 22, 2026
46f3b42
fix review findings: config reset, writeFile flags, archive stripping…
clavery Feb 22, 2026
2f5822a
bug fixing import zip conventions and adding multi-select
clavery Feb 23, 2026
9725008
Merge remote-tracking branch 'origin/main' into feature/vsc-ext-nativ…
clavery Feb 24, 2026
7f8289a
changeset: downgrade sdk-plugin-module to patch
clavery Feb 24, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/sdk-plugin-module.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@salesforce/b2c-tooling-sdk': patch
---

Add `@salesforce/b2c-tooling-sdk/plugins` module for discovering and loading b2c-cli plugins outside of oclif. Enables the VS Code extension and other non-CLI consumers to use installed plugins (keychain managers, config sources, middleware) without depending on `@oclif/core`.
64 changes: 64 additions & 0 deletions .github/workflows/ci-vs-extension.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
name: VS Extension Tests

on:
push:
branches:
- main
paths:
- 'packages/b2c-vs-extension/**'
pull_request:
branches:
- main
paths:
- 'packages/b2c-vs-extension/**'

permissions:
contents: read

env:
SFCC_DISABLE_TELEMETRY: ${{ vars.SFCC_DISABLE_TELEMETRY }}

jobs:
test:
runs-on: ubuntu-latest

strategy:
matrix:
node-version: [22.x]

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Setup Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}

- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 10.17.1

- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV

- name: Setup pnpm cache
uses: actions/cache@v4
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-

- name: Install dependencies
run: pnpm install --frozen-lockfile

- name: Build packages
run: pnpm -r run build

- name: Run VS Extension tests
working-directory: packages/b2c-vs-extension
run: xvfb-run -a pnpm run test
7 changes: 3 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,11 @@ jobs:
working-directory: packages/b2c-cli
run: pnpm run pretest && pnpm run test:ci && pnpm run lint

- name: Run VS Extension lint
- name: Run VS Extension checks
id: vs-extension-test
if: always() && steps.vs-extension-test.conclusion != 'cancelled'
if: always() && steps.cli-test.conclusion != 'cancelled'
working-directory: packages/b2c-vs-extension
# Testing not currently supported on CI
run: pnpm run lint
run: pnpm run typecheck:agent && pnpm run lint

- name: Test Report
uses: dorny/test-reporter@fe45e9537387dac839af0d33ba56eed8e24189e8 # v2.3.0
Expand Down
54 changes: 54 additions & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,17 @@ jobs:
check_package "@salesforce/b2c-cli" "packages/b2c-cli" "cli"
check_package "@salesforce/b2c-dx-mcp" "packages/b2c-dx-mcp" "mcp"

# VS Code extension — compare against git tags (not npm)
LOCAL_VSX_VERSION=$(node -p "require('./packages/b2c-vs-extension/package.json').version")
LAST_VSX_TAG=$(git tag -l "b2c-vs-extension@*" --sort=-v:refname | head -1 | sed 's/b2c-vs-extension@//')
echo "b2c-vs-extension: local=${LOCAL_VSX_VERSION} tag=${LAST_VSX_TAG:-none}"
if [ "$LOCAL_VSX_VERSION" != "$LAST_VSX_TAG" ]; then
echo "publish_vsx=true" >> $GITHUB_OUTPUT
echo "version_vsx=${LOCAL_VSX_VERSION}" >> $GITHUB_OUTPUT
else
echo "publish_vsx=false" >> $GITHUB_OUTPUT
fi

# Check if docs version changed (private package — not published to npm, uses git tag)
DOCS_VERSION=$(node -p "require('./docs/package.json').version")
if git rev-parse "docs@${DOCS_VERSION}" >/dev/null 2>&1; then
Expand Down Expand Up @@ -204,6 +215,11 @@ jobs:
pnpm --filter @salesforce/b2c-dx-mcp publish --provenance --no-git-checks
--tag ${{ steps.release-type.outputs.type == 'nightly' && 'nightly' || steps.packages.outputs.tag_mcp }}

- name: Package VS Code extension
if: steps.release-type.outputs.type == 'stable' && steps.packages.outputs.publish_vsx == 'true'
working-directory: packages/b2c-vs-extension
run: pnpm run package

- name: Create git tags
if: steps.release-type.outputs.type == 'stable' && steps.changesets.outputs.skip != 'true' && steps.quick-check.outputs.skip != 'true'
run: |
Expand All @@ -230,6 +246,12 @@ jobs:
TAGS_CREATED="$TAGS_CREATED $TAG"
fi

if [[ "${{ steps.packages.outputs.publish_vsx }}" == "true" ]]; then
TAG="b2c-vs-extension@${{ steps.packages.outputs.version_vsx }}"
git tag "$TAG"
TAGS_CREATED="$TAGS_CREATED $TAG"
fi

if [ -n "$TAGS_CREATED" ]; then
git push origin $TAGS_CREATED
echo "Created tags:$TAGS_CREATED"
Expand Down Expand Up @@ -279,6 +301,13 @@ jobs:
echo ""
fi

if [[ "${{ steps.packages.outputs.publish_vsx }}" == "true" ]]; then
echo "## b2c-vs-extension@${{ steps.packages.outputs.version_vsx }}"
echo ""
extract_latest packages/b2c-vs-extension/CHANGELOG.md
echo ""
fi

if [[ "${{ steps.packages.outputs.publish_docs }}" == "true" && -f docs/CHANGELOG.md ]]; then
echo "## Documentation"
echo ""
Expand All @@ -297,6 +326,8 @@ jobs:
RELEASE_TAG="@salesforce/b2c-tooling-sdk@${{ steps.packages.outputs.version_sdk }}"
elif [[ "${{ steps.packages.outputs.publish_mcp }}" == "true" ]]; then
RELEASE_TAG="@salesforce/b2c-dx-mcp@${{ steps.packages.outputs.version_mcp }}"
elif [[ "${{ steps.packages.outputs.publish_vsx }}" == "true" ]]; then
RELEASE_TAG="b2c-vs-extension@${{ steps.packages.outputs.version_vsx }}"
elif [[ "${{ steps.packages.outputs.publish_docs }}" == "true" ]]; then
RELEASE_TAG="docs@${{ steps.packages.outputs.version_docs }}"
else
Expand Down Expand Up @@ -330,6 +361,8 @@ jobs:
RELEASE_TAG="@salesforce/b2c-tooling-sdk@${{ steps.packages.outputs.version_sdk }}"
elif [[ "${{ steps.packages.outputs.publish_mcp }}" == "true" ]]; then
RELEASE_TAG="@salesforce/b2c-dx-mcp@${{ steps.packages.outputs.version_mcp }}"
elif [[ "${{ steps.packages.outputs.publish_vsx }}" == "true" ]]; then
RELEASE_TAG="b2c-vs-extension@${{ steps.packages.outputs.version_vsx }}"
else
echo "No package release to upload to"
exit 0
Expand All @@ -339,6 +372,27 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Upload VS Code extension to release
if: steps.release-type.outputs.type == 'stable' && steps.packages.outputs.publish_vsx == 'true'
run: |
# Determine the release tag (same logic as Create GitHub Release)
if [[ "${{ steps.packages.outputs.publish_cli }}" == "true" ]]; then
RELEASE_TAG="@salesforce/b2c-cli@${{ steps.packages.outputs.version_cli }}"
elif [[ "${{ steps.packages.outputs.publish_sdk }}" == "true" ]]; then
RELEASE_TAG="@salesforce/b2c-tooling-sdk@${{ steps.packages.outputs.version_sdk }}"
elif [[ "${{ steps.packages.outputs.publish_mcp }}" == "true" ]]; then
RELEASE_TAG="@salesforce/b2c-dx-mcp@${{ steps.packages.outputs.version_mcp }}"
elif [[ "${{ steps.packages.outputs.publish_vsx }}" == "true" ]]; then
RELEASE_TAG="b2c-vs-extension@${{ steps.packages.outputs.version_vsx }}"
else
echo "No release to upload to"
exit 0
fi

gh release upload "$RELEASE_TAG" packages/b2c-vs-extension/*.vsix
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Trigger documentation deployment
if: >-
steps.release-type.outputs.type == 'stable' && steps.changesets.outputs.skip != 'true' && steps.quick-check.outputs.skip != 'true'
Expand Down
11 changes: 11 additions & 0 deletions packages/b2c-tooling-sdk/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,17 @@
"default": "./dist/cjs/scaffold/index.js"
}
},
"./plugins": {
"development": "./src/plugins/index.ts",
"import": {
"types": "./dist/esm/plugins/index.d.ts",
"default": "./dist/esm/plugins/index.js"
},
"require": {
"types": "./dist/cjs/plugins/index.d.ts",
"default": "./dist/cjs/plugins/index.js"
}
},
"./test-utils": {
"development": "./src/test-utils/index.ts",
"import": {
Expand Down
13 changes: 0 additions & 13 deletions packages/b2c-tooling-sdk/src/cli/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -255,19 +255,6 @@ export function loadConfig(
sourcesAfter: pluginSources.after,
});

// Log source summary
for (const source of resolved.sources) {
logger.trace(
{
source: source.name,
location: source.location,
fields: source.fields,
fieldsIgnored: source.fieldsIgnored,
},
`[${source.name}] Contributed fields`,
);
}

// Log warnings (at warn level so users can see configuration issues)
for (const warning of resolved.warnings) {
logger.warn({warning}, `[Config] ${warning.message}`);
Expand Down
12 changes: 12 additions & 0 deletions packages/b2c-tooling-sdk/src/config/resolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
*/
import type {AuthCredentials} from '../auth/types.js';
import type {B2CInstance} from '../instance/index.js';
import {getLogger} from '../logging/logger.js';
import {mergeConfigsWithProtection, getPopulatedFields, createInstanceFromConfig} from './mapping.js';
import {DwJsonSource, MobifySource, PackageJsonSource} from './sources/index.js';
import type {
Expand Down Expand Up @@ -221,6 +222,17 @@ export class ConfigResolver {
fieldsIgnored: fieldsIgnored.length > 0 ? fieldsIgnored : undefined,
});

const logger = getLogger();
logger.trace(
{
source: source.name,
location,
fields,
fieldsIgnored: fieldsIgnored.length > 0 ? fieldsIgnored : undefined,
},
`[${source.name}] Contributed fields`,
);

// Enrich options with accumulated config values for subsequent sources.
// Only set if not already provided via CLI options.
if (!enrichedOptions.accountManagerHost && baseConfig.accountManagerHost) {
Expand Down
8 changes: 6 additions & 2 deletions packages/b2c-tooling-sdk/src/operations/content/library.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,14 @@ function processContent(
}
}

// Recurse into content-links
// Recurse into content-links (sorted by position)
const contentLinks = content['content-links'] as Array<Record<string, unknown>> | undefined;
if (contentLinks?.[0]?.['content-link']) {
const links = contentLinks[0]['content-link'] as Array<Record<string, unknown>>;
const links = (contentLinks[0]['content-link'] as Array<Record<string, unknown>>).slice().sort((a, b) => {
const posA = parseFloat((a['position'] as string[] | undefined)?.[0] ?? 'Infinity');
const posB = parseFloat((b['position'] as string[] | undefined)?.[0] ?? 'Infinity');
return posA - posB;
});
for (const link of links) {
const linkAttrs = link['$'] as Record<string, string>;
const linkId = linkAttrs['content-id'];
Expand Down
73 changes: 65 additions & 8 deletions packages/b2c-tooling-sdk/src/operations/jobs/site-archive.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@ export interface SiteArchiveImportResult {
* - A Buffer containing zip data
* - A filename already on the instance (in Impex/src/instance/)
*
* **Buffer handling:** When passing a Buffer, the `archiveName` option controls
* the contract:
* - **Without `archiveName`:** The buffer should contain archive entries without
* a root directory (e.g. `libraries/mylib/library.xml`). The SDK generates
* an archive name and wraps the contents under it.
* - **With `archiveName`:** The buffer must already be correctly structured with
* `archiveName/` as the top-level directory. It is uploaded as-is.
*
* @param instance - B2C instance to import to
* @param target - Source to import (directory path, zip file path, Buffer, or remote filename)
* @param options - Import options
Expand All @@ -64,9 +72,17 @@ export interface SiteArchiveImportResult {
* // Import from a zip file
* const result = await siteArchiveImport(instance, './export.zip');
*
* // Import from a buffer
* const zipBuffer = await fs.promises.readFile('./export.zip');
* const result = await siteArchiveImport(instance, zipBuffer, {
* // Import from a buffer (SDK wraps contents automatically)
* const zip = new JSZip();
* zip.file('libraries/mylib/library.xml', xmlContent);
* const buffer = await zip.generateAsync({type: 'nodebuffer'});
* const result = await siteArchiveImport(instance, buffer);
*
* // Import from a buffer with explicit archive name (caller owns structure)
* const zip = new JSZip();
* zip.file('my-import/libraries/mylib/library.xml', xmlContent);
* const buffer = await zip.generateAsync({type: 'nodebuffer'});
* const result = await siteArchiveImport(instance, buffer, {
* archiveName: 'my-import'
* });
*
Expand Down Expand Up @@ -94,12 +110,20 @@ export async function siteArchiveImport(
zipFilename = target.remoteFilename;
needsUpload = false;
} else if (Buffer.isBuffer(target)) {
// Buffer - use provided archive name
if (!archiveName) {
throw new Error('archiveName is required when importing from a Buffer');
if (archiveName) {
// Caller provides name — buffer must already contain the correct
// top-level directory structure (archiveName/...).
const baseName = archiveName.endsWith('.zip') ? archiveName.slice(0, -4) : archiveName;
zipFilename = `${baseName}.zip`;
archiveContent = target;
} else {
// No name — SDK generates one and wraps the buffer contents under it.
// The buffer should contain archive entries without a root directory
// (e.g. libraries/mylib/library.xml, sites/RefArch/site.xml).
const archiveDirName = `import-${Date.now()}`;
zipFilename = `${archiveDirName}.zip`;
archiveContent = await wrapArchiveContents(target, archiveDirName, logger);
}
zipFilename = archiveName.endsWith('.zip') ? archiveName : `${archiveName}.zip`;
archiveContent = target;
} else {
// File path - check if directory or zip file
const targetPath = target as string;
Expand Down Expand Up @@ -236,6 +260,39 @@ async function addDirectoryToZip(zipFolder: JSZip, dirPath: string): Promise<voi
}
}

/**
* Wraps the contents of a zip buffer under a new top-level directory.
*
* The input buffer should contain archive entries without a root directory
* (e.g. `libraries/mylib/library.xml`). The output will have all entries
* nested under `archiveDirName/` (e.g. `archiveDirName/libraries/mylib/library.xml`).
*/
async function wrapArchiveContents(
buffer: Buffer,
archiveDirName: string,
logger: ReturnType<typeof getLogger>,
): Promise<Buffer> {
const zip = await JSZip.loadAsync(buffer);

logger.debug({archiveDirName}, `Wrapping archive contents under ${archiveDirName}/`);

const newZip = new JSZip();
const rootFolder = newZip.folder(archiveDirName)!;

for (const [filePath, entry] of Object.entries(zip.files)) {
if (!entry.dir) {
const content = await entry.async('nodebuffer');
rootFolder.file(filePath, content);
}
}

return newZip.generateAsync({
type: 'nodebuffer',
compression: 'DEFLATE',
compressionOptions: {level: 9},
});
}

/**
* Configuration for sites in export.
*/
Expand Down
Loading
Loading