Skip to content

Commit 566471b

Browse files
committed
merge(workflow): resolve conflict in useWorkflowImportExport
Combines port resolution and secret validation features from both branches: - Keeps dynamic port resolution for Analytics components (from feature branch) - Retains secret reference validation (from main) - Merges all required imports (api, useSecretStore, useComponentStore) Signed-off-by: Aseem Shrey <LuD1161@users.noreply.github.com>
2 parents cccd641 + fe8b35d commit 566471b

31 files changed

Lines changed: 436 additions & 102 deletions

AGENTS.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ Local development runs as **multiple app instances** (PM2) on top of **one share
4141

4242
- Shared infra (Docker Compose project `shipsec-infra`): Postgres/Temporal/Redpanda/Redis/MinIO/Loki on fixed ports.
4343
- Per-instance apps: `shipsec-{frontend,backend,worker}-N`.
44-
- Isolation is via per-instance DB + Temporal namespace/task queue + Kafka topic suffixing (not per-instance infra containers).
44+
- Isolation is via per-instance DB + Temporal namespace/task queue + Kafka topic suffixing + instance-scoped Kafka consumer groups/client IDs (not per-instance infra containers).
4545
- The workspace can have an **active instance** (stored in `.shipsec-instance`, gitignored).
4646

4747
**Agent rule:** before running any dev commands, ensure you’re targeting the intended instance.
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
2+
3+
import { AgentTraceIngestService } from '../agent-trace-ingest.service';
4+
import type { AgentTraceRepository } from '../agent-trace.repository';
5+
6+
const ORIGINAL_ENV = { ...process.env };
7+
8+
function restoreEnv(): void {
9+
process.env = { ...ORIGINAL_ENV };
10+
}
11+
12+
describe('AgentTraceIngestService', () => {
13+
beforeEach(() => {
14+
restoreEnv();
15+
process.env.LOG_KAFKA_BROKERS = 'localhost:19092';
16+
delete process.env.SHIPSEC_INSTANCE;
17+
delete process.env.AGENT_TRACE_KAFKA_GROUP_ID;
18+
delete process.env.AGENT_TRACE_KAFKA_CLIENT_ID;
19+
});
20+
21+
afterEach(() => {
22+
restoreEnv();
23+
});
24+
25+
test('uses legacy defaults when SHIPSEC_INSTANCE is unset', () => {
26+
const repository = { append: async () => undefined } as unknown as AgentTraceRepository;
27+
const service = new AgentTraceIngestService(repository) as unknown as {
28+
kafkaGroupId: string;
29+
kafkaClientId: string;
30+
};
31+
32+
expect(service.kafkaGroupId).toBe('shipsec-agent-trace-ingestor');
33+
expect(service.kafkaClientId).toBe('shipsec-backend-agent-trace');
34+
});
35+
36+
test('uses instance-scoped defaults when SHIPSEC_INSTANCE is set', () => {
37+
process.env.SHIPSEC_INSTANCE = '7';
38+
const repository = { append: async () => undefined } as unknown as AgentTraceRepository;
39+
const service = new AgentTraceIngestService(repository) as unknown as {
40+
kafkaGroupId: string;
41+
kafkaClientId: string;
42+
};
43+
44+
expect(service.kafkaGroupId).toBe('shipsec-agent-trace-ingestor-7');
45+
expect(service.kafkaClientId).toBe('shipsec-backend-agent-trace-7');
46+
});
47+
48+
test('prefers explicit env vars over defaults', () => {
49+
process.env.SHIPSEC_INSTANCE = '3';
50+
process.env.AGENT_TRACE_KAFKA_GROUP_ID = 'custom-agent-trace-group';
51+
process.env.AGENT_TRACE_KAFKA_CLIENT_ID = 'custom-agent-trace-client';
52+
const repository = { append: async () => undefined } as unknown as AgentTraceRepository;
53+
const service = new AgentTraceIngestService(repository) as unknown as {
54+
kafkaGroupId: string;
55+
kafkaClientId: string;
56+
};
57+
58+
expect(service.kafkaGroupId).toBe('custom-agent-trace-group');
59+
expect(service.kafkaClientId).toBe('custom-agent-trace-client');
60+
});
61+
});

backend/src/agent-trace/agent-trace-ingest.service.ts

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,16 @@ export class AgentTraceIngestService implements OnModuleInit, OnModuleDestroy {
2626
// Use instance-aware topic name
2727
const topicResolver = getTopicResolver();
2828
this.kafkaTopic = topicResolver.getAgentTraceTopic();
29+
const instanceId = process.env.SHIPSEC_INSTANCE;
30+
const defaultGroupId = instanceId
31+
? `shipsec-agent-trace-ingestor-${instanceId}`
32+
: 'shipsec-agent-trace-ingestor';
33+
const defaultClientId = instanceId
34+
? `shipsec-backend-agent-trace-${instanceId}`
35+
: 'shipsec-backend-agent-trace';
2936

30-
this.kafkaGroupId = process.env.AGENT_TRACE_KAFKA_GROUP_ID ?? 'shipsec-agent-trace-ingestor';
31-
this.kafkaClientId = process.env.AGENT_TRACE_KAFKA_CLIENT_ID ?? 'shipsec-backend-agent-trace';
37+
this.kafkaGroupId = process.env.AGENT_TRACE_KAFKA_GROUP_ID ?? defaultGroupId;
38+
this.kafkaClientId = process.env.AGENT_TRACE_KAFKA_CLIENT_ID ?? defaultClientId;
3239
}
3340

3441
async onModuleInit(): Promise<void> {
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
2+
3+
import { NodeIOIngestService } from '../node-io-ingest.service';
4+
import type { NodeIORepository } from '../node-io.repository';
5+
6+
const ORIGINAL_ENV = { ...process.env };
7+
8+
function restoreEnv(): void {
9+
process.env = { ...ORIGINAL_ENV };
10+
}
11+
12+
describe('NodeIOIngestService', () => {
13+
beforeEach(() => {
14+
restoreEnv();
15+
process.env.LOG_KAFKA_BROKERS = 'localhost:19092';
16+
delete process.env.SHIPSEC_INSTANCE;
17+
delete process.env.NODE_IO_KAFKA_GROUP_ID;
18+
delete process.env.NODE_IO_KAFKA_CLIENT_ID;
19+
});
20+
21+
afterEach(() => {
22+
restoreEnv();
23+
});
24+
25+
test('uses legacy defaults when SHIPSEC_INSTANCE is unset', () => {
26+
const repository = {
27+
recordStart: async () => undefined,
28+
recordCompletion: async () => undefined,
29+
} as unknown as NodeIORepository;
30+
const service = new NodeIOIngestService(repository) as unknown as {
31+
kafkaGroupId: string;
32+
kafkaClientId: string;
33+
};
34+
35+
expect(service.kafkaGroupId).toBe('shipsec-node-io-ingestor');
36+
expect(service.kafkaClientId).toBe('shipsec-backend-node-io');
37+
});
38+
39+
test('uses instance-scoped defaults when SHIPSEC_INSTANCE is set', () => {
40+
process.env.SHIPSEC_INSTANCE = '4';
41+
const repository = {
42+
recordStart: async () => undefined,
43+
recordCompletion: async () => undefined,
44+
} as unknown as NodeIORepository;
45+
const service = new NodeIOIngestService(repository) as unknown as {
46+
kafkaGroupId: string;
47+
kafkaClientId: string;
48+
};
49+
50+
expect(service.kafkaGroupId).toBe('shipsec-node-io-ingestor-4');
51+
expect(service.kafkaClientId).toBe('shipsec-backend-node-io-4');
52+
});
53+
54+
test('prefers explicit env vars over defaults', () => {
55+
process.env.SHIPSEC_INSTANCE = '9';
56+
process.env.NODE_IO_KAFKA_GROUP_ID = 'custom-node-io-group';
57+
process.env.NODE_IO_KAFKA_CLIENT_ID = 'custom-node-io-client';
58+
const repository = {
59+
recordStart: async () => undefined,
60+
recordCompletion: async () => undefined,
61+
} as unknown as NodeIORepository;
62+
const service = new NodeIOIngestService(repository) as unknown as {
63+
kafkaGroupId: string;
64+
kafkaClientId: string;
65+
};
66+
67+
expect(service.kafkaGroupId).toBe('custom-node-io-group');
68+
expect(service.kafkaClientId).toBe('custom-node-io-client');
69+
});
70+
});

backend/src/node-io/node-io-ingest.service.ts

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,16 @@ export class NodeIOIngestService implements OnModuleInit, OnModuleDestroy {
4646
// Use instance-aware topic name
4747
const topicResolver = getTopicResolver();
4848
this.kafkaTopic = topicResolver.getNodeIOTopic();
49+
const instanceId = process.env.SHIPSEC_INSTANCE;
50+
const defaultGroupId = instanceId
51+
? `shipsec-node-io-ingestor-${instanceId}`
52+
: 'shipsec-node-io-ingestor';
53+
const defaultClientId = instanceId
54+
? `shipsec-backend-node-io-${instanceId}`
55+
: 'shipsec-backend-node-io';
4956

50-
this.kafkaGroupId = process.env.NODE_IO_KAFKA_GROUP_ID ?? 'shipsec-node-io-ingestor';
51-
this.kafkaClientId = process.env.NODE_IO_KAFKA_CLIENT_ID ?? 'shipsec-backend-node-io';
57+
this.kafkaGroupId = process.env.NODE_IO_KAFKA_GROUP_ID ?? defaultGroupId;
58+
this.kafkaClientId = process.env.NODE_IO_KAFKA_CLIENT_ID ?? defaultClientId;
5259
}
5360

5461
async onModuleInit(): Promise<void> {

bun.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

docs/components/security.mdx

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ Security components wrap popular open-source tools for subdomain discovery, DNS
1212
### Subfinder
1313

1414
<Info>
15-
[GitHub](https://github.com/projectdiscovery/subfinder) · Docker: `projectdiscovery/subfinder`
15+
[GitHub](https://github.com/projectdiscovery/subfinder) · Docker: `ghcr.io/shipsecai/subfinder`
1616
</Info>
1717

1818
Discovers subdomains using passive sources.
@@ -35,7 +35,7 @@ Discovers subdomains using passive sources.
3535
### Amass
3636

3737
<Info>
38-
[GitHub](https://github.com/owasp-amass/amass) · Docker: `owaspamass/amass`
38+
[GitHub](https://github.com/owasp-amass/amass) · Docker: `ghcr.io/shipsecai/amass`
3939
</Info>
4040

4141
Active and passive subdomain enumeration.
@@ -73,7 +73,7 @@ High-performance DNS bruteforcing and resolution. This is a combined image that
7373
### DNSX
7474

7575
<Info>
76-
[GitHub](https://github.com/projectdiscovery/dnsx) · Docker: `projectdiscovery/dnsx`
76+
[GitHub](https://github.com/projectdiscovery/dnsx) · Docker: `ghcr.io/shipsecai/dnsx`
7777
</Info>
7878

7979
Resolves DNS records with support for multiple record types and custom resolvers.
@@ -105,7 +105,7 @@ Resolves DNS records with support for multiple record types and custom resolvers
105105
### httpx
106106

107107
<Info>
108-
[GitHub](https://github.com/projectdiscovery/httpx) · Docker: `projectdiscovery/httpx`
108+
[GitHub](https://github.com/projectdiscovery/httpx) · Docker: `ghcr.io/shipsecai/httpx`
109109
</Info>
110110

111111
Probes hosts for live HTTP services and captures response metadata.
@@ -137,7 +137,7 @@ Probes hosts for live HTTP services and captures response metadata.
137137
### Naabu
138138

139139
<Info>
140-
[GitHub](https://github.com/projectdiscovery/naabu) · Docker: `projectdiscovery/naabu`
140+
[GitHub](https://github.com/projectdiscovery/naabu) · Docker: `ghcr.io/shipsecai/naabu`
141141
</Info>
142142

143143
Fast active port scanning using SYN/CONNECT probes.
@@ -196,7 +196,7 @@ Template-based vulnerability scanning. This is nuclei custom image with nuclei-t
196196
### TruffleHog
197197

198198
<Info>
199-
[GitHub](https://github.com/trufflesecurity/trufflehog) · Docker: `trufflesecurity/trufflehog`
199+
[GitHub](https://github.com/trufflesecurity/trufflehog) · Docker: `ghcr.io/shipsecai/trufflehog`
200200
</Info>
201201

202202
Scans for leaked credentials across repositories, filesystems, and cloud storage.
@@ -227,7 +227,7 @@ Scans for leaked credentials across repositories, filesystems, and cloud storage
227227
### Prowler Scan
228228

229229
<Info>
230-
[GitHub](https://github.com/prowler-cloud/prowler) · Docker: `prowlercloud/prowler`
230+
[GitHub](https://github.com/prowler-cloud/prowler) · Docker: `ghcr.io/shipsecai/prowler`
231231
</Info>
232232

233233
Cloud (AWS, Azure, GCP) security posture management. Best practices auditing.
@@ -262,7 +262,7 @@ Scans Supabase instances for misconfigurations.
262262
### Notify
263263

264264
<Info>
265-
[GitHub](https://github.com/projectdiscovery/notify) · Docker: `projectdiscovery/notify`
265+
[GitHub](https://github.com/projectdiscovery/notify) · Docker: `ghcr.io/shipsecai/notify`
266266
</Info>
267267

268268
Sends alerts to Slack, Discord, Telegram, or email.

docs/development/component-development.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -892,7 +892,7 @@ export default defineComponent({
892892
category: 'security',
893893
runner: {
894894
kind: 'docker',
895-
image: 'projectdiscovery/dnsx:latest',
895+
image: 'ghcr.io/shipsecai/dnsx:latest',
896896
entrypoint: 'sh',
897897
command: ['-c', 'dnsx "$@"', '--'],
898898
network: 'bridge',

frontend/src/features/workflow-builder/hooks/useWorkflowImportExport.ts

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ import {
1313
import type { FrontendNodeData } from '@/schemas/node';
1414
import type { Node as ReactFlowNode, Edge as ReactFlowEdge } from 'reactflow';
1515
import { api } from '@/services/api';
16+
import { useSecretStore } from '@/store/secretStore';
17+
import { useComponentStore } from '@/store/componentStore';
1618
interface WorkflowMetadataShape {
1719
id: string | null;
1820
name: string;
@@ -131,6 +133,54 @@ export function useWorkflowImportExport({
131133
}),
132134
);
133135

136+
// Validate secret references
137+
const removedSecrets: { param: string; node: string; secretId: string }[] = [];
138+
try {
139+
await useSecretStore.getState().fetchSecrets();
140+
const secrets = useSecretStore.getState().secrets;
141+
const secretIds = new Set(secrets.map((s) => s.id));
142+
143+
const componentStore = useComponentStore.getState();
144+
if (Object.keys(componentStore.components).length === 0) {
145+
await componentStore.fetchComponents();
146+
}
147+
const components = useComponentStore.getState().components;
148+
149+
resolvedNodes.forEach((node) => {
150+
const data = node.data as FrontendNodeData;
151+
const componentRef = data.componentId || data.componentSlug;
152+
if (!componentRef) return;
153+
154+
const component =
155+
componentStore.getComponent(componentRef) ||
156+
Object.values(components).find((c) => c.slug === componentRef);
157+
158+
if (!component || !component.parameters) return;
159+
160+
// Find parameters that are secrets
161+
const secretParams = component.parameters.filter((p) => p.type === 'secret');
162+
const configParams = node.data.config.params || {};
163+
164+
secretParams.forEach((param) => {
165+
const val = configParams[param.id];
166+
// If value is a string (ID) and not in available secrets, remove it
167+
if (typeof val === 'string' && val.trim().length > 0) {
168+
if (!secretIds.has(val)) {
169+
console.warn(
170+
`[Import] Removing invalid secret reference for param "${param.id}" in node "${node.id}" (secret ID: ${val})`,
171+
);
172+
removedSecrets.push({ param: param.id, node: node.id, secretId: val });
173+
// Set to undefined to clear it
174+
configParams[param.id] = undefined;
175+
}
176+
}
177+
});
178+
});
179+
} catch (error) {
180+
console.error('Failed to validate secrets during import:', error);
181+
// Continue with import even if validation fails
182+
}
183+
134184
resetWorkflow();
135185
setDesignNodes(resolvedNodes as ReactFlowNode<FrontendNodeData>[]);
136186
setDesignEdges(normalizedEdges);
@@ -151,6 +201,15 @@ export function useWorkflowImportExport({
151201
title: 'Workflow imported',
152202
description: `Loaded ${parsed.name}`,
153203
});
204+
205+
// Show warning if any invalid secret references were removed
206+
if (removedSecrets.length > 0) {
207+
toast({
208+
variant: 'warning',
209+
title: 'Invalid secret references removed',
210+
description: `${removedSecrets.length} secret reference(s) could not be resolved and were cleared. Please select valid secrets from the Secrets Manager.`,
211+
});
212+
}
154213
},
155214
[
156215
canManageWorkflows,

0 commit comments

Comments
 (0)