Skip to content
26 changes: 23 additions & 3 deletions src/stores/workflowStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,15 @@ interface WorkflowStore {
importedTriggers: Record<string, unknown>;
importedPipelines: Record<string, unknown>;

/**
* Passthrough metadata from the most recently imported WorkflowConfig.
* Stores non-visual top-level fields (name, version, _originalKeys, _extraTopLevelKeys, imports,
* requires, platform, infrastructure, sidecars) so that exportToConfig() can reconstruct
* a complete WorkflowConfig without losing unknown keys or reformatting structure.
* Pipelines are excluded here because they are tracked separately via importedPipelines.
*/
importedPassthrough: Omit<WorkflowConfig, 'modules' | 'workflows' | 'triggers' | 'pipelines'> | null;

// Multi-file resolution: maps module name → source file path
sourceMap: Map<string, string>;
setSourceMap: (sourceMap: Map<string, string>) => void;
Expand Down Expand Up @@ -218,6 +227,7 @@ const useWorkflowStore = create<WorkflowStore>()(
importedWorkflows: {},
importedTriggers: {},
importedPipelines: {},
importedPassthrough: null,

// Multi-file resolution
sourceMap: new Map<string, string>(),
Expand Down Expand Up @@ -453,9 +463,15 @@ const useWorkflowStore = create<WorkflowStore>()(
},

exportToConfig: () => {
const { nodes, edges, importedWorkflows, importedTriggers, importedPipelines } = get();
const { nodes, edges, importedWorkflows, importedTriggers, importedPipelines, importedPassthrough } = get();
const moduleTypeMap = useModuleSchemaStore.getState().moduleTypeMap;
const config = nodesToConfig(nodes, edges, moduleTypeMap);
// Pass importedPassthrough as originalConfig so that nodesToConfig can restore
// non-visual fields: name, version, _originalKeys, _extraTopLevelKeys, imports,
// requires, platform, infrastructure, sidecars.
const originalConfig: import('../types/workflow.ts').WorkflowConfig | undefined = importedPassthrough
? { modules: [], workflows: {}, triggers: {}, ...importedPassthrough }
: undefined;
const config = nodesToConfig(nodes, edges, moduleTypeMap, originalConfig);
Comment on lines +466 to +474
Copy link

Copilot AI Apr 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

exportToConfig() always reuses importedPassthrough when present, but clearCanvas() does not reset importedPassthrough. This can cause stale name/version/_originalKeys/_extraTopLevelKeys/etc. from a previously imported YAML to leak into subsequent exports after clearing the canvas. Clear importedPassthrough (and potentially sourceMap) in clearCanvas(), or gate passthrough usage on whether a config was actually imported for the current session.

Copilot uses AI. Check for mistakes.
if (Object.keys(config.workflows).length === 0 && Object.keys(importedWorkflows).length > 0) {
config.workflows = importedWorkflows;
}
Expand Down Expand Up @@ -484,13 +500,17 @@ const useWorkflowStore = create<WorkflowStore>()(
get().pushHistory();
const moduleTypeMap = useModuleSchemaStore.getState().moduleTypeMap;
const { nodes, edges } = configToNodes(config, moduleTypeMap, sourceMap);
// Extract passthrough metadata (everything except the visual module/workflow/trigger data)
const { modules: _modules, workflows: _workflows, triggers: _triggers, pipelines: _pipelines, ...passthroughFields } = config;
const importedPassthrough: WorkflowStore['importedPassthrough'] = passthroughFields;
const updates: Partial<WorkflowStore> = {
nodes,
edges,
selectedNodeId: null,
importedWorkflows: config.workflows ?? {},
importedTriggers: config.triggers ?? {},
importedPipelines: config.pipelines ?? {},
importedPassthrough,
};
if (sourceMap) {
updates.sourceMap = sourceMap;
Expand All @@ -501,7 +521,7 @@ const useWorkflowStore = create<WorkflowStore>()(

clearCanvas: () => {
get().pushHistory();
set({ nodes: [], edges: [], selectedNodeId: null, selectedEdgeId: null, nodeCounter: 0, importedWorkflows: {}, importedTriggers: {}, importedPipelines: {} });
set({ nodes: [], edges: [], selectedNodeId: null, selectedEdgeId: null, nodeCounter: 0, importedWorkflows: {}, importedTriggers: {}, importedPipelines: {}, importedPassthrough: null });
},

exportLayout: (): LayoutData => {
Expand Down
2 changes: 2 additions & 0 deletions src/types/workflow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ export interface WorkflowConfig {
infrastructure?: Record<string, unknown>;
sidecars?: unknown[];
_originalKeys?: string[];
/** Preserves unknown top-level keys that are not part of the known schema (e.g. engine:, custom config blocks). */
_extraTopLevelKeys?: Record<string, unknown>;
}

// Workflow section types for edge extraction
Expand Down
160 changes: 159 additions & 1 deletion src/utils/serialization-bugs.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { describe, it, expect } from 'vitest';
import { configToNodes, nodesToConfig, configToYaml, parseYamlSafe } from './serialization.ts';
import { configToNodes, nodesToConfig, configToYaml, parseYamlSafe, parseYaml } from './serialization.ts';
import { MODULE_TYPE_MAP } from '../types/workflow.ts';

describe('Bug 1: name and version round-trip', () => {
Expand Down Expand Up @@ -136,3 +136,161 @@ triggers: {}
expect(isPartial).toBe(false);
});
});

describe('Bug 4: unknown top-level keys (e.g. engine:) must not be dropped', () => {
it('parseYaml captures unknown top-level keys in _extraTopLevelKeys', () => {
const yamlText = `
name: my-service
engine:
validation:
templateRefs: warn
modules: []
workflows: {}
`;
const config = parseYaml(yamlText);
expect(config._extraTopLevelKeys).toBeDefined();
expect(config._extraTopLevelKeys!['engine']).toEqual({ validation: { templateRefs: 'warn' } });
});

it('parseYamlSafe captures unknown top-level keys in _extraTopLevelKeys', () => {
const yamlText = `
name: my-service
engine:
validation:
templateRefs: warn
modules: []
`;
const { config } = parseYamlSafe(yamlText);
expect(config._extraTopLevelKeys).toBeDefined();
expect(config._extraTopLevelKeys!['engine']).toEqual({ validation: { templateRefs: 'warn' } });
});

it('configToYaml emits unknown top-level keys', () => {
const yamlText = `
name: my-service
engine:
validation:
templateRefs: warn
modules: []
workflows: {}
`;
const config = parseYaml(yamlText);
const out = configToYaml(config);
expect(out).toContain('engine:');
expect(out).toContain('templateRefs: warn');
});

it('nodesToConfig passes through _extraTopLevelKeys from originalConfig', () => {
const yamlText = `
name: my-service
engine:
validation:
templateRefs: warn
modules:
- name: server
type: http.server
config:
address: ":8080"
workflows: {}
`;
const config = parseYaml(yamlText);
const { nodes, edges } = configToNodes(config, MODULE_TYPE_MAP);
const exported = nodesToConfig(nodes, edges, MODULE_TYPE_MAP, config);
expect(exported._extraTopLevelKeys).toBeDefined();
expect(exported._extraTopLevelKeys!['engine']).toEqual({ validation: { templateRefs: 'warn' } });
const out = configToYaml(exported);
expect(out).toContain('engine:');
});

it('full round-trip preserves engine block and original key ordering', () => {
const yamlText = `name: my-service
version: "1.0"
engine:
validation:
templateRefs: warn
modules:
- name: server
type: http.server
config:
address: ':8080'
pipelines:
health:
trigger:
type: http
method: GET
path: /healthz
`;
const config = parseYaml(yamlText);
const { nodes, edges } = configToNodes(config, MODULE_TYPE_MAP);
const exported = nodesToConfig(nodes, edges, MODULE_TYPE_MAP, config);
const out = configToYaml(exported);

// engine block must be present
expect(out).toContain('engine:');
expect(out).toContain('templateRefs: warn');

// Key ordering: name comes before engine comes before modules
const nameIdx = out.indexOf('name:');
const engineIdx = out.indexOf('engine:');
const modulesIdx = out.indexOf('modules:');
expect(nameIdx).toBeGreaterThanOrEqual(0);
expect(engineIdx).toBeGreaterThan(nameIdx);
expect(modulesIdx).toBeGreaterThan(engineIdx);
});
});

describe('Bug 5: triggers: {} must not be injected when not in original', () => {
it('does not add triggers: {} when original YAML had no triggers key', () => {
const yamlText = `
name: my-service
modules:
- name: server
type: http.server
config:
address: ":8080"
pipelines:
health:
trigger:
type: http
method: GET
path: /healthz
`;
const config = parseYaml(yamlText);
// triggers not in original keys
expect(config._originalKeys).not.toContain('triggers');

const { nodes, edges } = configToNodes(config, MODULE_TYPE_MAP);
const exported = nodesToConfig(nodes, edges, MODULE_TYPE_MAP, config);
const out = configToYaml(exported);

// triggers: {} must NOT appear in output
expect(out).not.toMatch(/^triggers:/m);
});
});

describe('Bug 6: parseYamlSafe is consistent with parseYaml for all fields', () => {
it('parseYamlSafe preserves imports, requires, platform, infrastructure, sidecars', () => {
const yamlText = `
name: my-service
imports:
- other.yaml
requires:
some-service: ">=1.0"
platform:
target: kubernetes
infrastructure:
database:
type: postgres
sidecars:
- name: proxy
image: envoy:latest
modules: []
`;
const { config } = parseYamlSafe(yamlText);
expect(config.imports).toEqual(['other.yaml']);
expect(config.requires).toEqual({ 'some-service': '>=1.0' });
expect(config.platform).toEqual({ target: 'kubernetes' });
expect(config.infrastructure).toEqual({ database: { type: 'postgres' } });
expect(config.sidecars).toHaveLength(1);
});
});
Loading
Loading