Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions bin/pos-cli-sync.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,6 @@ program
// Continue with watch mode
const { watcher, liveReloadServer } = await watchStart(env, params.directAssetsUpload, params.livereload);

setupGracefulShutdown({ watcher, liveReloadServer, context: 'Sync' });

if (params.open) {
try {
const open = (await import('open')).default;
Expand All @@ -64,6 +62,8 @@ program
}
}
}

setupGracefulShutdown({ watcher, liveReloadServer, context: 'Sync' });
});

program.parse(process.argv);
3 changes: 2 additions & 1 deletion lib/assets.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,9 @@ const deployAssets = async gateway => {
const manifest = await manifestGenerate();
logger.Debug(manifest);
files.writeJSON('tmp/assets_manifest.json', manifest);
await gateway.sendManifest(manifest);
const response = await gateway.sendManifest(manifest);
logger.Debug('Uploading assets');
return response;
} catch (e) {
logger.Debug(e);
logger.Debug(e.message);
Expand Down
18 changes: 15 additions & 3 deletions lib/deploy/directAssetsUploadStrategy.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,18 @@ const deployAndUploadAssets = async (authData) => {
logger.Warn('There are no assets to deploy, skipping.');
return;
}
await deployAssets(new Gateway(authData));
return deployAssets(new Gateway(authData));
};

const printAssetsReport = (response) => {
if (!response || !response.report) return;
const { upserted = 0, deleted = 0 } = response.report;
const parts = [];
if (upserted > 0) parts.push(`${upserted} upserted`);
if (deleted > 0) parts.push(`${deleted} deleted`);
if (parts.length > 0) {
logger.Success(`Assets: ${parts.join(', ')}`, { hideTimestamp: true });
}
};

const strategy = async ({ env, authData, _params }) => {
Expand All @@ -33,14 +44,15 @@ const strategy = async ({ env, authData, _params }) => {
spinner.start();

const t0 = performance.now();
const assetsResult = await deployAndUploadAssets(authData);
printAssetsReport(assetsResult);

if (numberOfFiles > 0) {
await uploadArchive(env);
} else {
logger.Warn('There are no files in release file, skipping.');
}

await deployAndUploadAssets(authData);

spinner.succeed(`Deploy succeeded after ${duration(t0, performance.now())}`);
} catch (e) {
if (ServerError.isNetworkError(e)) {
Expand Down
33 changes: 33 additions & 0 deletions lib/push.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,44 @@
import fs from 'fs';
import { performance } from 'perf_hooks';
import chalk from 'chalk';

import logger from './logger.js';
import report from './logger/report.js';
import Gateway from '../lib/proxy.js';
import duration from '../lib/duration.js';
let gateway;

const toCount = (val) => Array.isArray(val) ? val.length : (typeof val === 'number' ? val : 0);

const printDeployReport = (deployReport) => {
if (!deployReport) return;

const lines = [];
for (const [category, data] of Object.entries(deployReport)) {
const { upserted = [], deleted = [] } = data || {};
const upsertedCount = toCount(upserted);
const deletedCount = toCount(deleted);

if (upsertedCount === 0 && deletedCount === 0) continue;

const parts = [];
if (upsertedCount > 0) parts.push(`${upsertedCount} upserted`);
if (deletedCount > 0) parts.push(`${deletedCount} deleted`);
lines.push(` ${category}: ${parts.join(', ')}`);

if (Array.isArray(upserted)) {
upserted.forEach(p => lines.push(` + ${p}`));
}
if (Array.isArray(deleted)) {
deleted.forEach(p => lines.push(chalk.red(` - ${p}`)));
}
}

if (lines.length === 0) return;

logger.Success(['\nDeploy report:', ...lines].join('\n'), { hideTimestamp: true });
};

const getDeploymentStatus = ({ id }) => {
return new Promise((resolve, reject) => {
let getStatus = () => {
Expand Down Expand Up @@ -56,6 +88,7 @@ const push = async env => {
if (response.warning) {
logger.Warn(response.warning);
}
printDeployReport(response.report);
const t1 = performance.now();
return duration(t0, t1);
});
Expand Down
21 changes: 5 additions & 16 deletions lib/watch.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,7 @@ const pushFile = async (gateway, syncedFilePath) => {
logger.Warn('[Sync] WARNING: Data schema was updated. It will take a while for the change to be applied.');
}

if (body) {
logger.Success(`[Sync] Synced: ${filePath}`);
}
logger.Success(`[Sync] Synced: ${filePath}`);
} catch (e) {
// Handle validation errors (422) with custom formatting
if (e.statusCode === 422 && e.response && e.response.body) {
Expand All @@ -87,8 +85,8 @@ const pushFile = async (gateway, syncedFilePath) => {
}
};

const deleteFile = (gateway, syncedFilePath) => {
let filePath = filePathUnixified(syncedFilePath);
const deleteFile = async (gateway, syncedFilePath) => {
const filePath = filePathUnixified(syncedFilePath);
const formData = {
path: filePath,
primary_key: filePath
Expand Down Expand Up @@ -188,15 +186,7 @@ const start = async (env, directAssetsUpload, liveReload) => {
push(gateway, task.path)
.then(reload)
.then(callback)
.catch(error => {
// If error was already logged, just continue processing queue
if (error.alreadyLogged) {
callback();
} else {
// For other errors, still continue queue processing
callback();
}
});
.catch(() => callback());
break;
case 'delete':
deleteFile(gateway, task.path).then(reload).then(callback);
Expand All @@ -222,12 +212,11 @@ const start = async (env, directAssetsUpload, liveReload) => {
'**/.DS_Store'
]
})
.on('ready', () => logger.Info(`[Sync] Synchronizing changes to: ${program.url}`))
.on('change', fp => shouldBeSynced(fp, ignoreList) && enqueuePush(fp))
.on('add', fp => shouldBeSynced(fp, ignoreList) && enqueuePush(fp))
.on('unlink', fp => shouldBeSynced(fp, ignoreList) && enqueueDelete(fp));

logger.Info(`[Sync] Synchronizing changes to: ${program.url}`);

return { watcher, liveReloadServer };
});
};
Expand Down
26 changes: 26 additions & 0 deletions test/global-setup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import { execSync } from 'child_process';
import path from 'path';
import { fileURLToPath } from 'url';
import dotenv from 'dotenv';

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

const cliPath = `node ${path.join(__dirname, '../bin/pos-cli.js')}`;

export async function setup() {
dotenv.config();

const { MPKIT_URL, MPKIT_TOKEN, MPKIT_EMAIL } = process.env;
if (!MPKIT_URL || !MPKIT_TOKEN || !MPKIT_EMAIL || MPKIT_URL.includes('example.com')) {
console.log('[Global Setup] No real credentials found, skipping instance cleanup');
return;
}

console.log(`[Global Setup] Cleaning instance: ${MPKIT_URL}`);
execSync(`${cliPath} data clean --include-schema --auto-confirm`, {
env: process.env,
stdio: 'inherit'
});
console.log('[Global Setup] Instance cleaned');
}
48 changes: 10 additions & 38 deletions test/integration/sync.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,26 +2,21 @@ import 'dotenv/config';
import { describe, test, expect, afterAll, afterEach, vi } from 'vitest';
import exec from '#test/utils/exec';
import cliPath from '#test/utils/cliPath';
import waitForOutput from '#test/utils/waitForOutput';
import path from 'path';
import fs from 'fs';
import { requireRealCredentials } from '#test/utils/credentials';

vi.setConfig({ testTimeout: 30000 });

// Force this test file to run in sequence to avoid race conditions with fixture files
// @vitest-environment node

const stepTimeout = 3500;

const cwd = name => path.join(process.cwd(), 'test', 'fixtures', 'deploy', name);
const run = (fixtureName, options, callback) => {
return exec(
`${cliPath} sync ${options}`,
`${cliPath} sync ${options || ''}`,
{ cwd: cwd(fixtureName), env: process.env },
callback
);
};
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms));

const kill = p => {
p.stdout.destroy();
Expand Down Expand Up @@ -50,15 +45,14 @@ afterAll(() => {
}
});

// Skip all tests if credentials aren't available
describe('Happy path', () => {
test('sync assets', { retry: 2 }, async () => {
requireRealCredentials();

const steps = async (child) => {
await sleep(stepTimeout);
await waitForOutput(child, /Synchronizing changes to/);
exec('echo "x" >> app/assets/bar.js', { cwd: cwd('correct_with_assets') });
await sleep(stepTimeout);
await waitForOutput(child, /\[Sync\] Synced asset: app\/assets\/bar\.js/);
kill(child);
};

Expand All @@ -70,9 +64,9 @@ describe('Happy path', () => {

test('sync with direct assets upload', { retry: 2 }, async () => {
const steps = async (child) => {
await sleep(stepTimeout);
await waitForOutput(child, /Synchronizing changes to/);
exec('echo "x" >> app/assets/bar.js', { cwd: cwd('correct_with_assets') });
await sleep(stepTimeout);
await waitForOutput(child, /\[Sync\] Synced asset: app\/assets\/bar\.js/);
kill(child);
};
const { stdout } = await run('correct_with_assets', '-d', steps);
Expand All @@ -94,53 +88,45 @@ properties:
const testDir = path.join(cwd('correct_with_assets'), 'app', dir);
const testFile = path.join(cwd('correct_with_assets'), 'app', fileName);

// Wait for sync to initialize before creating file
await sleep(stepTimeout);
await waitForOutput(child, /Synchronizing changes to/);

// Use Node.js fs for cross-platform compatibility
if (!fs.existsSync(testDir)) {
fs.mkdirSync(testDir, { recursive: true });
}

fs.writeFileSync(testFile, validYML);
// Wait longer for sync to complete (stabilityThreshold 500ms + network time + queue processing)
await sleep(stepTimeout * 2);
await waitForOutput(child, new RegExp(`\\[Sync\\] Synced: ${fileName.replace(/\//g, '[/\\\\]')}`));

fs.unlinkSync(testFile);
await sleep(stepTimeout);
await waitForOutput(child, new RegExp(`\\[Sync\\] Deleted: ${fileName.replace(/\//g, '[/\\\\]')}`));

kill(child);
};
const { stdout } = await run('correct_with_assets', null, steps);

expect(stdout).toMatch(process.env.MPKIT_URL);
// Use regex to handle potential path separator differences
expect(stdout).toMatch(new RegExp(`\\[Sync\\] Synced: ${fileName.replace(/\//g, '[/\\\\]')}`));
expect(stdout).toMatch(new RegExp(`\\[Sync\\] Deleted: ${fileName.replace(/\//g, '[/\\\\]')}`));
});

test('sync single file with -f option', { retry: 2 }, async () => {
requireRealCredentials();

// Create a temporary file to sync
const testFilePath = 'app/views/pages/test-single-sync.liquid';
const fullTestPath = path.join(cwd('correct_with_assets'), testFilePath);
const testContent = '<!-- Test single file sync -->\n<h1>Test Page</h1>\n';

// Write test file
fs.writeFileSync(fullTestPath, testContent);

try {
// Run sync with -f option (without callback, so it runs to completion)
const { stdout, code } = await exec(
`${cliPath} sync -f ${testFilePath}`,
{ cwd: cwd('correct_with_assets'), env: process.env }
);

// Verify output - note that filePathUnixified removes the app/ prefix
expect(code).toBe(0);
expect(stdout).toMatch(/\[Sync\] Synced: views\/pages\/test-single-sync\.liquid/);
} finally {
// Clean up test file
if (fs.existsSync(fullTestPath)) {
fs.unlinkSync(fullTestPath);
}
Expand All @@ -150,26 +136,21 @@ properties:
test('sync single asset file with -f option', { retry: 2 }, async () => {
requireRealCredentials();

// Create a temporary asset file to sync
const testFilePath = 'app/assets/test-single-sync.js';
const fullTestPath = path.join(cwd('correct_with_assets'), testFilePath);
const testContent = '// Test single asset file sync\nconsole.log("test");\n';

// Write test file
fs.writeFileSync(fullTestPath, testContent);

try {
// Run sync with -f option (without callback, so it runs to completion)
const { stdout, code } = await exec(
`${cliPath} sync -f ${testFilePath}`,
{ cwd: cwd('correct_with_assets'), env: process.env }
);

// Verify output
expect(code).toBe(0);
expect(stdout).toMatch(/\[Sync\] Synced asset: app\/assets\/test-single-sync\.js/);
} finally {
// Clean up test file
if (fs.existsSync(fullTestPath)) {
fs.unlinkSync(fullTestPath);
}
Expand All @@ -179,27 +160,18 @@ properties:
test('422 validation error shows proper format with single error message', { retry: 2 }, async () => {
requireRealCredentials();

// Use fixture with invalid schema file that triggers 422 validation error
const testFilePath = 'app/schema/invalid-property-type.yml';

// Run sync with -f option - this should fail with validation error
const { stderr, code } = await exec(
`${cliPath} sync -f ${testFilePath}`,
{ cwd: cwd('invalid_schema'), env: process.env }
);

// Verify error output
expect(code).toBe(1);

// Should show [Sync] Failed to sync with timestamp and file path
expect(stderr).toMatch(/\[\d{2}:\d{2}:\d{2}\] \[Sync\] Failed to sync: schema\/invalid-property-type\.yml/);

// Should include the validation error message
expect(stderr).toMatch(/Validation failed/);

// Verify error is NOT duplicated - count occurrences of "Failed to sync"
const failedToSyncMatches = (stderr.match(/Failed to sync/g) || []).length;
expect(failedToSyncMatches).toBe(1);
});

});
Loading