From 5771665e03cc0eb58a63edb1dc53974b9be61530 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Wed, 21 Jan 2026 17:36:37 +0000 Subject: [PATCH 1/8] project: support forked_from key --- .changeset/moody-ducks-warn.md | 5 ++ packages/project/src/Project.ts | 1 + packages/project/src/parse/from-fs.ts | 4 ++ packages/project/src/util/config.ts | 5 ++ packages/project/src/util/omit-nil.ts | 6 ++- packages/project/test/parse/from-fs.test.ts | 33 ++++++++++-- packages/project/test/serialize/to-fs.test.ts | 54 +++++++++++++++++++ packages/project/test/util/config.test.ts | 6 +++ 8 files changed, 109 insertions(+), 5 deletions(-) create mode 100644 .changeset/moody-ducks-warn.md diff --git a/.changeset/moody-ducks-warn.md b/.changeset/moody-ducks-warn.md new file mode 100644 index 000000000..7e1a6a94f --- /dev/null +++ b/.changeset/moody-ducks-warn.md @@ -0,0 +1,5 @@ +--- +'@openfn/project': patch +--- + +Support forked_from metadata key in openfn.yaml diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index e1c425ed8..b3aeb1900 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -31,6 +31,7 @@ type UUIDMap = { type CLIMeta = { version?: number; alias?: string; + forked_from?: string; }; export class Project { diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index d1ff194d2..380641b78 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -12,6 +12,7 @@ import { } from '../util/config'; import { omit } from 'lodash-es'; import { Logger } from '@openfn/logger'; +import omitNil from '../util/omit-nil'; export type FromFsConfig = { root: string; @@ -36,6 +37,9 @@ export const parseProject = async (options: FromFsConfig) => { openfn: omit(context.project, ['id']), config: config, workflows: [], + cli: omitNil({ + forked_from: context.project.forked_from, + }), }; // now find all the workflows diff --git a/packages/project/src/util/config.ts b/packages/project/src/util/config.ts index c1d80c433..e30937294 100644 --- a/packages/project/src/util/config.ts +++ b/packages/project/src/util/config.ts @@ -30,6 +30,11 @@ export const extractConfig = (source: Project, format?: 'yaml' | 'json') => { if (source.name) { project.name = source.name; } + + if (source.cli.forked_from) { + project.forked_from = source.cli.forked_from; + } + const workspace = { ...source.config, }; diff --git a/packages/project/src/util/omit-nil.ts b/packages/project/src/util/omit-nil.ts index 40a8974e8..1e05f887f 100644 --- a/packages/project/src/util/omit-nil.ts +++ b/packages/project/src/util/omit-nil.ts @@ -1,8 +1,10 @@ import { omitBy, isNil } from 'lodash-es'; -export const omitNil = (obj: any, key: string) => { - if (obj[key]) { +export const omitNil = (obj: any, key?: string) => { + if (key && obj[key]) { obj[key] = omitBy(obj[key], isNil); + } else { + return omitBy(obj, isNil); } }; export default omitNil; diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index 6a0785f52..87adc29cc 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -22,7 +22,7 @@ function mockFile(path: string, content: string | object) { mock(files); } -test.serial('should include multiple workflows', async (t) => { +test.serial('should include multiple workflows (legacy format)', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); mockFile('/ws/workflows/workflow-1/workflow-1.yaml', { @@ -66,7 +66,7 @@ test.serial('should include multiple workflows', async (t) => { t.is(wf2.name, 'Workflow 2'); }); -test.serial('should load a workflow expression', async (t) => { +test.serial('should load a workflow expression (legacy format)', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { @@ -104,7 +104,7 @@ test.serial( ); test.serial( - 'should load a workflow from the file system and expand shorthand links', + 'should load a workflow from the file system and expand shorthand links (legacy format)', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); @@ -139,3 +139,30 @@ test.serial( t.is(typeof wf.steps[1].next.c, 'object'); } ); + +test.serial.only('should track forked_from', async (t) => { + mockFile('/ws/openfn.yaml', { + workspace: buildConfig(), + project: { + uuid: '', + forked_from: 'abcd', + }, + }); + + mockFile('/ws/workflows/workflow-1/workflow-1.yaml', { + id: 'workflow-1', + name: 'Workflow 1', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }); + + mockFile('/ws/workflows/workflow-1/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); + + t.is(project.cli.forked_from, 'abcd'); +}); diff --git a/packages/project/test/serialize/to-fs.test.ts b/packages/project/test/serialize/to-fs.test.ts index 63c29c4d2..484a0e244 100644 --- a/packages/project/test/serialize/to-fs.test.ts +++ b/packages/project/test/serialize/to-fs.test.ts @@ -233,4 +233,58 @@ test('toFs: extract a project with 1 workflow and 1 step', (t) => { t.is(files['workflows/my-workflow/step.js'], 'fn(s => s)'); }); +test('toFs: extract a project with forked_from meta', (t) => { + const project = new Project( + { + name: 'My Project', + workflows: [ + { + id: 'my-workflow', + steps: [step], + }, + ], + cli: { + forked_from: 'abcd', + }, + }, + { + formats: { + openfn: 'json', // for easier testing + workflow: 'json', + }, + } + ); + + const files = toFs(project); + + // Ensure that all the right files have been created + t.deepEqual(Object.keys(files), [ + 'openfn.json', + 'workflows/my-workflow/my-workflow.json', + 'workflows/my-workflow/step.js', + ]); + + // rough test on the file contents + // (this should be validated in more detail by each step) + const config = JSON.parse(files['openfn.json']); + t.deepEqual(config, { + workspace: { + credentials: 'credentials.yaml', + formats: { openfn: 'json', project: 'yaml', workflow: 'json' }, + dirs: { projects: '.projects', workflows: 'workflows' }, + }, + project: { + id: 'my-project', + name: 'My Project', + forked_from: 'abcd', + }, + }); + + const workflow = JSON.parse(files['workflows/my-workflow/my-workflow.json']); + t.is(workflow.id, 'my-workflow'); + t.is(workflow.steps.length, 1); + + t.is(files['workflows/my-workflow/step.js'], 'fn(s => s)'); +}); + // TODO we need many more tests on this, with options diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index db6837c09..2c2de1609 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -29,6 +29,7 @@ project: env: dev inserted_at: 2025-10-21T17:10:57Z updated_at: 2025-10-21T17:10:57Z + forked_from: abcd `; const result = loadWorkspaceFile(yaml); @@ -51,6 +52,7 @@ project: env: 'dev', inserted_at: '2025-10-21T17:10:57Z', updated_at: '2025-10-21T17:10:57Z', + forked_from: 'abcd', }); }); @@ -161,6 +163,9 @@ test('generate openfn.yaml', (t) => { openfn: { uuid: 1234, }, + cli: { + forked_from: 'abcd', + }, }, { formats: { @@ -176,6 +181,7 @@ test('generate openfn.yaml', (t) => { uuid: 1234 id: my-project name: My Project + forked_from: abcd workspace: credentials: credentials.yaml formats: From 043867203333a53318b4c34391de8786134e19e1 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 14:29:01 +0000 Subject: [PATCH 2/8] update forked_from to a map --- packages/project/src/Project.ts | 2 +- packages/project/src/Workflow.ts | 8 ++++---- packages/project/test/parse/from-fs.test.ts | 8 +++++--- packages/project/test/util/config.test.ts | 7 +++++-- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index b3aeb1900..8bb9f7283 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -31,7 +31,7 @@ type UUIDMap = { type CLIMeta = { version?: number; alias?: string; - forked_from?: string; + forked_from?: Record; }; export class Project { diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index e1066f981..9b649cc48 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -71,6 +71,10 @@ class Workflow { this.workflow.start = s; } + get history() { + return this.workflow.history ?? []; + } + _buildIndex() { for (const step of this.workflow.steps) { const s = step as any; @@ -191,10 +195,6 @@ class Workflow { this.workflow.history?.push(versionHash); } - get history() { - return this.workflow.history ?? []; - } - // return true if the current workflow can be merged into the target workflow without losing any changes canMergeInto(target: Workflow) { const thisHistory = diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index 87adc29cc..ed8115d35 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -140,12 +140,14 @@ test.serial( } ); -test.serial.only('should track forked_from', async (t) => { +test.serial('should track forked_from', async (t) => { mockFile('/ws/openfn.yaml', { workspace: buildConfig(), project: { uuid: '', - forked_from: 'abcd', + forked_from: { + w1: 'abcd', + }, }, }); @@ -164,5 +166,5 @@ test.serial.only('should track forked_from', async (t) => { const project = await parseProject({ root: '/ws' }); - t.is(project.cli.forked_from, 'abcd'); + t.deepEqual(project.cli.forked_from, { w1: 'abcd' }); }); diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index 2c2de1609..0a1e123c4 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -29,7 +29,8 @@ project: env: dev inserted_at: 2025-10-21T17:10:57Z updated_at: 2025-10-21T17:10:57Z - forked_from: abcd + forked_from: + w1: abcd `; const result = loadWorkspaceFile(yaml); @@ -52,7 +53,9 @@ project: env: 'dev', inserted_at: '2025-10-21T17:10:57Z', updated_at: '2025-10-21T17:10:57Z', - forked_from: 'abcd', + forked_from: { + w1: 'abcd', + }, }); }); From 132e15b7f4e5e27c82b29060c89b4ecaacf7ba72 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 14:56:18 +0000 Subject: [PATCH 3/8] ensure history serializes --- packages/project/src/Workflow.ts | 4 ++-- packages/project/test/fixtures/sample-v2-project.ts | 6 ++++-- packages/project/test/parse/from-project.test.ts | 7 +++++-- packages/project/test/serialize/to-project.test.ts | 3 +++ 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index 9b649cc48..07b976a0e 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -27,8 +27,8 @@ class Workflow { this.workflow = clone(workflow); - // history needs to be on workflow object. - this.workflow.history = workflow.history?.length ? workflow.history : []; + // history needs to be on workflow object + this.workflow.history = workflow.history ?? []; const { id, diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index b8202ecf9..4029eb90f 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -33,7 +33,7 @@ export const json: SerializedProject = { name: 'Workflow', id: 'workflow', openfn: { uuid: 1 }, - history: [], + history: ['a', 'b'], start: 'trigger', }, ], @@ -72,7 +72,9 @@ workflows: id: workflow openfn: uuid: 1 - history: [] + history: + - a + - b start: trigger sandbox: parentId: abcd diff --git a/packages/project/test/parse/from-project.test.ts b/packages/project/test/parse/from-project.test.ts index 6a328b1bb..c41b4dca6 100644 --- a/packages/project/test/parse/from-project.test.ts +++ b/packages/project/test/parse/from-project.test.ts @@ -26,6 +26,9 @@ workflows: lock_version: 1 deleted_at: null concurrency: null + version_history: + - a + - b jobs: transform-data: name: Transform data @@ -93,7 +96,7 @@ test('import from a v2 project as JSON', async (t) => { openfn: { uuid: 1, }, - history: [], + history: ['a', 'b'], start: 'trigger', steps: [ { @@ -152,7 +155,7 @@ test('import from a v2 project as YAML', async (t) => { uuid: 1, }, start: 'trigger', - history: [], + history: ['a', 'b'], steps: [ { name: 'b', diff --git a/packages/project/test/serialize/to-project.test.ts b/packages/project/test/serialize/to-project.test.ts index 76eeced78..ea4f1cba5 100644 --- a/packages/project/test/serialize/to-project.test.ts +++ b/packages/project/test/serialize/to-project.test.ts @@ -37,6 +37,9 @@ const createProject = (props: Partial = {}) => { // hack delete proj.workflows[0].steps[0].name; proj.workflows[0].start = 'trigger'; + + // add some history + proj.workflows[0].workflow.history = ['a', 'b']; return proj; }; From 1703deb3eba6eadce209812c05fb9ca4cfa29e56 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 15:44:12 +0000 Subject: [PATCH 4/8] only include forked_from if it has values --- packages/project/src/util/config.ts | 2 +- packages/project/test/util/config.test.ts | 37 +++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/project/src/util/config.ts b/packages/project/src/util/config.ts index e30937294..17bd59ecc 100644 --- a/packages/project/src/util/config.ts +++ b/packages/project/src/util/config.ts @@ -31,7 +31,7 @@ export const extractConfig = (source: Project, format?: 'yaml' | 'json') => { project.name = source.name; } - if (source.cli.forked_from) { + if (source.cli.forked_from && Object.keys(source.cli.forked_from).length) { project.forked_from = source.cli.forked_from; } diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index 0a1e123c4..02cdb5ffd 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -198,6 +198,43 @@ workspace: ); }); +test("exclude forked_from if it's not set", (t) => { + const proj = new Project( + { + id: 'my-project', + name: 'My Project', + openfn: { + uuid: 1234, + }, + cli: {}, + }, + { + formats: { + openfn: 'yaml', + }, + } + ); + const result = extractConfig(proj); + t.is(result.path, 'openfn.yaml'), + t.deepEqual( + result.content, + `project: + uuid: 1234 + id: my-project + name: My Project +workspace: + credentials: credentials.yaml + formats: + openfn: yaml + project: yaml + workflow: yaml + dirs: + projects: .projects + workflows: workflows +` + ); +}); + test.todo('generate openfn.json'); test('include project name', (t) => { From 1601edc2fe7d7ebbf4ac0bec1514efc26b646672 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 15:48:31 +0000 Subject: [PATCH 5/8] add forked_from on checkkout --- packages/cli/src/projects/checkout.ts | 11 +++++++++ packages/cli/test/projects/checkout.test.ts | 26 ++++++++++++++++++++- packages/lexicon/core.d.ts | 1 + 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 33369e34c..467278600 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -69,6 +69,17 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { await tidyWorkflowDir(currentProject!, switchProject); } + // write the forked from map + switchProject.cli.forked_from = switchProject.workflows.reduce( + (obj: any, wf) => { + if (wf.history.length) { + obj[wf.id] = wf.history.at(-1); + } + return obj; + }, + {} + ); + // expand project into directory const files: any = switchProject.serialize('fs'); for (const f in files) { diff --git a/packages/cli/test/projects/checkout.test.ts b/packages/cli/test/projects/checkout.test.ts index 44fcb0fa3..fac0c1e29 100644 --- a/packages/cli/test/projects/checkout.test.ts +++ b/packages/cli/test/projects/checkout.test.ts @@ -3,7 +3,7 @@ import { createMockLogger } from '@openfn/logger'; import { handler as checkoutHandler } from '../../src/projects/checkout'; import mock from 'mock-fs'; import fs from 'fs'; -import { jsonToYaml, Workspace } from '@openfn/project'; +import { jsonToYaml, Workspace, yamlToJson } from '@openfn/project'; test.beforeEach(() => { mock({ @@ -28,6 +28,7 @@ test.beforeEach(() => { { name: 'simple-workflow', id: 'wf-id', + history: ['a'], jobs: [ { name: 'Transform data to FHIR standard', @@ -56,6 +57,7 @@ test.beforeEach(() => { { name: 'another-workflow', id: 'another-id', + history: ['b'], jobs: [ { name: 'Transform data to FHIR standard', @@ -83,6 +85,7 @@ test.beforeEach(() => { }, ], }), + // TODO this is actually a v1 state file for some reason, which is wierd '/ws/.projects/project@app.openfn.org.yaml': jsonToYaml({ id: '', name: 'My Project', @@ -90,6 +93,7 @@ test.beforeEach(() => { { name: 'simple-workflow-main', id: 'wf-id-main', + version_history: ['a'], jobs: [ { name: 'Transform data to FHIR standard', @@ -118,6 +122,7 @@ test.beforeEach(() => { { name: 'another-workflow-main', id: 'another-id', + version_history: ['b'], jobs: [ { name: 'Transform data to FHIR standard', @@ -217,6 +222,25 @@ test.serial('checkout: same id as active', async (t) => { ); }); +test.serial( + 'checkout: writes forked_from based on version history', + async (t) => { + const bcheckout = new Workspace('/ws'); + t.is(bcheckout.activeProject!.id, 'my-project'); + + await checkoutHandler( + { command: 'project-checkout', project: 'my-project', workspace: '/ws' }, + logger + ); + + const openfn = yamlToJson(fs.readFileSync('/ws/openfn.yaml', 'utf8')); + t.deepEqual(openfn.project.forked_from, { + 'simple-workflow-main': 'a', + 'another-workflow-main': 'b', + }); + } +); + test.serial('checkout: switching to and back between projects', async (t) => { // before checkout. my-project is active and expanded const bcheckout = new Workspace('/ws'); diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index 187da091f..07e9b6490 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -118,6 +118,7 @@ export interface ProjectMeta { env?: string; inserted_at?: string; updated_at?: string; + forked_from?: Record; [key: string]: unknown; } From f3fd21e4d9543bcca94f49b35bb02648b66fad95 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 16:40:24 +0000 Subject: [PATCH 6/8] project: omit forked_from from openfn object when loading from fs --- packages/project/src/parse/from-fs.ts | 2 +- packages/project/test/parse/from-fs.test.ts | 1 + .../test/serialize/to-app-state.test.ts | 37 +++++++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index 380641b78..1ced41426 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -34,7 +34,7 @@ export const parseProject = async (options: FromFsConfig) => { const proj: any = { id: context.project?.id, name: context.project?.name, - openfn: omit(context.project, ['id']), + openfn: omit(context.project, ['id', 'forked_from']), config: config, workflows: [], cli: omitNil({ diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index ed8115d35..a3f8f7639 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -167,4 +167,5 @@ test.serial('should track forked_from', async (t) => { const project = await parseProject({ root: '/ws' }); t.deepEqual(project.cli.forked_from, { w1: 'abcd' }); + t.falsy(project.openfn!.forked_from); }); diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index de6e4117e..b6a7fa570 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -262,6 +262,43 @@ test('should handle credentials', (t) => { t.is(step.project_credential_id, 'p'); }); +test.only('should ignore forked_from', (t) => { + const data = { + id: 'my-project', + workflows: [ + { + id: 'wf', + name: 'wf', + steps: [ + { + id: 'trigger', + type: 'webhook', + next: { + step: {}, + }, + }, + { + id: 'step', + expression: '.', + configuration: 'p', + openfn: { + keychain_credential_id: 'k', + }, + }, + ], + }, + ], + cli: { + forked_form: { wf: 'a' }, + }, + }; + const proj = new Project(data); + console.log(proj); + const state = toAppState(proj, { format: 'json' }); + console.log(state); + t.falsy((state as any).forked_form); +}); + test('should ignore workflow start keys', (t) => { const data = { id: 'my-project', From 45c3e1028053f83fa134e9d503bcd8b182bfa3a9 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 17:35:35 +0000 Subject: [PATCH 7/8] deploy changes against local server --- packages/cli/src/projects/checkout.ts | 13 +--- packages/cli/src/projects/deploy.ts | 77 +++++++++++++++++++---- packages/cli/src/projects/fetch.ts | 2 + packages/cli/src/projects/util.ts | 11 ++++ packages/cli/test/projects/deploy.test.ts | 51 +++++++++++++++ packages/project/src/Project.ts | 10 ++- 6 files changed, 140 insertions(+), 24 deletions(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 467278600..1bb2886db 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -10,7 +10,7 @@ import * as o from '../options'; import * as po from './options'; import type { Opts } from './options'; -import { tidyWorkflowDir } from './util'; +import { tidyWorkflowDir, updateForkedFrom } from './util'; export type CheckoutOptions = Pick< Opts, @@ -70,17 +70,10 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { } // write the forked from map - switchProject.cli.forked_from = switchProject.workflows.reduce( - (obj: any, wf) => { - if (wf.history.length) { - obj[wf.id] = wf.history.at(-1); - } - return obj; - }, - {} - ); + updateForkedFrom(switchProject); // expand project into directory + // TODO: only write files with a diff const files: any = switchProject.serialize('fs'); for (const f in files) { if (files[f]) { diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 25fa87a93..42bc34e8f 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -1,6 +1,8 @@ import yargs from 'yargs'; import Project from '@openfn/project'; import c from 'chalk'; +import { writeFile } from 'node:fs/promises'; +import path from 'node:path'; import * as o from '../options'; import * as o2 from './options'; @@ -10,6 +12,7 @@ import { fetchProject, serialize, getSerializePath, + updateForkedFrom, } from './util'; import { build, ensure } from '../util/command-builders'; @@ -64,6 +67,34 @@ export const command: yargs.CommandModule = { handler: ensure('project-deploy', options), }; +export const hasRemoteDiverged = ( + local: Project, + remote: Project +): string[] | null => { + let diverged: string[] | null = null; + + const refs = local.cli.forked_from ?? {}; + + // for each workflow, check that the local fetched_from is the head of the remote history + for (const wf of local.workflows) { + if (wf.id in refs) { + const forkedVersion = refs[wf.id]; + const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); + if (forkedVersion !== remoteVersion) { + diverged ??= []; + diverged.push(wf.id); + } + } else { + // TODO what if there's no forked from for this workflow? + // Do we assume divergence because we don't know? Do we warn? + } + } + + // TODO what if a workflow is removed locally? + + return diverged; +}; + export async function handler(options: DeployOptions, logger: Logger) { logger.warn( 'WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects.' @@ -132,32 +163,41 @@ Pass --force to override this error and deploy anyway.`); // Skip divergence testing if the remote has no history in its workflows // (this will only happen on older versions of lightning) + // TODO now maybe skip if there's no forked_from const skipVersionTest = - localProject.workflows.find((wf) => wf.history.length === 0) || + // localProject.workflows.find((wf) => wf.history.length === 0) || remoteProject.workflows.find((wf) => wf.history.length === 0); + // localProject.workflows.forEach((w) => console.log(w.history)); + if (skipVersionTest) { logger.warn( 'Skipping compatibility check as no local version history detected' ); logger.warn('Pushing these changes may overrite changes made to the app'); - } else if (!localProject.canMergeInto(remoteProject!)) { - if (!options.force) { - logger.error(`Error: Projects have diverged! + } else { + const divergentWorkflows = hasRemoteDiverged(localProject, remoteProject!); + if (divergentWorkflows) { + logger.warn( + `The following workflows have diverged: ${divergentWorkflows}` + ); + if (!options.force) { + logger.error(`Error: Projects have diverged! -The remote project has been edited since the local project was branched. Changes may be lost. + The remote project has been edited since the local project was branched. Changes may be lost. -Pass --force to override this error and deploy anyway.`); - return; + Pass --force to override this error and deploy anyway.`); + return; + } else { + logger.warn( + 'Remote project has not diverged from local project! Pushing anyway as -f passed' + ); + } } else { - logger.warn( - 'Remote project has not diverged from local project! Pushing anyway as -f passed' + logger.info( + 'Remote project has not diverged from local project - it is safe to deploy 🎉' ); } - } else { - logger.info( - 'Remote project has not diverged from local project - it is safe to deploy 🎉' - ); } logger.info('Merging changes into remote project'); @@ -180,6 +220,8 @@ Pass --force to override this error and deploy anyway.`); // TODO not totally sold on endpoint handling right now config.endpoint ??= localProject.openfn?.endpoint!; + // TODO: I want to report diff HERE, after the merged state and stuff has been built + if (options.dryRun) { logger.always('dryRun option set: skipping upload step'); } else { @@ -218,6 +260,14 @@ Pass --force to override this error and deploy anyway.`); merged.config ); + // TODO why isn't this right? oh, because the outpu path isn't quite right + updateForkedFrom(finalProject); + const configData = finalProject.generateConfig(); + await writeFile( + path.resolve(options.workspace, configData.path), + configData.content + ); + const finalOutputPath = getSerializePath(localProject, options.workspace!); logger.debug('Updating local project at ', finalOutputPath); await serialize(finalProject, finalOutputPath); @@ -267,3 +317,4 @@ export const reportDiff = (local: Project, remote: Project, logger: Logger) => { return diffs; }; +``; diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index 37cf67373..d1d618c79 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -331,6 +331,8 @@ To ignore this error and override the local file, pass --force (-f) options.force || // The user forced the checkout !hasAnyHistory; // the remote project has no history (can happen in old apps) + // TODO temporarily force skip + // TODO canMergeInto needs to return a reason if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { // TODO allow rename throw new Error('Error! An incompatible project exists at this location'); diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index be6f8b1a0..d367be7a2 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -216,3 +216,14 @@ export async function tidyWorkflowDir( // Return and sort for testing return toRemove.sort(); } + +export const updateForkedFrom = (proj: Project) => { + proj.cli.forked_from = proj.workflows.reduce((obj: any, wf) => { + if (wf.history.length) { + obj[wf.id] = wf.history.at(-1); + } + return obj; + }, {}); + + return proj; +}; diff --git a/packages/cli/test/projects/deploy.test.ts b/packages/cli/test/projects/deploy.test.ts index 903dabf52..47ddf0d45 100644 --- a/packages/cli/test/projects/deploy.test.ts +++ b/packages/cli/test/projects/deploy.test.ts @@ -10,6 +10,7 @@ import createLightningServer, { import { handler as deployHandler, + hasRemoteDiverged, reportDiff, } from '../../src/projects/deploy'; import { myProject_yaml, myProject_v1 } from './fixtures'; @@ -278,3 +279,53 @@ test.serial.skip( t.truthy(expectedLog); } ); + +test('hasRemoteDiverged: 1 workflow, no diverged', (t) => { + const local = { + workflows: [ + { + id: 'w', + }, + ], + cli: { + forked_from: { + w: 'a', + }, + }, + } as unknown as Project; + + const remote = { + getWorkflow: () => ({ + id: 'w', + history: ['a'], + }), + } as unknown as Project; + + const diverged = hasRemoteDiverged(local, remote); + t.falsy(diverged); +}); + +test('hasRemoteDiverged: 1 workflow, 1 diverged', (t) => { + const local = { + workflows: [ + { + id: 'w', + }, + ], + cli: { + forked_from: { + w: 'w', + }, + }, + } as unknown as Project; + + const remote = { + getWorkflow: () => ({ + id: 'w', + history: ['a', 'b'], + }), + } as unknown as Project; + + const diverged = hasRemoteDiverged(local, remote); + t.deepEqual(diverged, ['w']); +}); diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 8bb9f7283..e4800fd0b 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -12,7 +12,7 @@ import { getUuidForEdge, getUuidForStep } from './util/uuid'; import { merge, MergeProjectOptions } from './merge/merge-project'; import { diff as projectDiff } from './util/project-diff'; import { Workspace } from './Workspace'; -import { buildConfig } from './util/config'; +import { buildConfig, extractConfig } from './util/config'; import { Provisioner } from '@openfn/lexicon/lightning'; import { SandboxMeta, UUID, WorkspaceConfig } from '@openfn/lexicon'; @@ -256,6 +256,14 @@ export class Project { } return true; } + + /** + * Generates the contents of the openfn.yaml file, + * plus its file path + */ + generateConfig() { + return extractConfig(this); + } } export default Project; From be505e65bf5a164c25ab3176e979b18c279c48d9 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 29 Jan 2026 09:11:06 +0100 Subject: [PATCH 8/8] Update version hash (#1238) version hash now matches Lightning --- integration-tests/cli/test/sync.test.ts | 9 +- packages/cli/CHANGELOG.md | 2 +- packages/cli/src/projects/deploy.ts | 4 +- packages/cli/src/projects/fetch.ts | 8 +- packages/cli/test/projects/fetch.test.ts | 2 +- packages/cli/test/projects/fixtures.ts | 4 +- packages/project/README.md | 2 + packages/project/src/Workflow.ts | 15 +- packages/project/src/gen/generator.ts | 22 +- packages/project/src/gen/workflow.ohm | 2 +- packages/project/src/parse/from-app-state.ts | 2 +- .../project/src/serialize/to-app-state.ts | 11 +- packages/project/src/util/version.ts | 145 +++++-- .../test/fixtures/sample-v2-project.ts | 4 +- packages/project/test/gen/generator.test.ts | 53 ++- .../project/test/parse/from-app-state.test.ts | 34 +- .../test/serialize/to-app-state.test.ts | 39 +- .../test/util/version-workflow.test.ts | 388 +++++++++++++++++- 18 files changed, 662 insertions(+), 84 deletions(-) diff --git a/integration-tests/cli/test/sync.test.ts b/integration-tests/cli/test/sync.test.ts index 13e407b5a..38179a93e 100644 --- a/integration-tests/cli/test/sync.test.ts +++ b/integration-tests/cli/test/sync.test.ts @@ -30,7 +30,10 @@ const initWorkspace = (t: any) => { }; }; -const gen = (name = 'patients', workflows = ['trigger-job(body="fn()")']) => { +const gen = ( + name = 'patients', + workflows = ['trigger-job(expression="fn()")'] +) => { // generate a project const project = generateProject(name, workflows, { openfnUuid: true, @@ -44,7 +47,7 @@ test('fetch a new project', async (t) => { const { workspace, read } = initWorkspace(t); const project = gen(); - await run( + const { stdout } = await run( `openfn project fetch \ --workspace ${workspace} \ --endpoint ${endpoint} \ @@ -239,7 +242,7 @@ test('pull an update to project', async (t) => { test('checkout by alias', async (t) => { const { workspace, read } = initWorkspace(t); const main = gen(); - const staging = gen('patients-staging', ['trigger-job(body="fn(x)")']); + const staging = gen('patients-staging', ['trigger-job(expression="fn(x)")']); await run( `openfn project fetch \ diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 451eefcab..ad44a9741 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -4,7 +4,7 @@ ### Minor Changes -- 8b9f402: fetch: allow state files to be writtem to JSON with --format +- 8b9f402: fetch: allow state files to be written to JSON with --format ### Patch Changes diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 42bc34e8f..319653f45 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -153,6 +153,8 @@ Pass --force to override this error and deploy anyway.`); return false; } + // this fails now because the local project has no UUIDs + // But should that matter ,actually? const diffs = reportDiff(remoteProject!, localProject, logger); if (!diffs.length) { logger.success('Nothing to deploy'); @@ -264,7 +266,7 @@ Pass --force to override this error and deploy anyway.`); updateForkedFrom(finalProject); const configData = finalProject.generateConfig(); await writeFile( - path.resolve(options.workspace, configData.path), + path.resolve(options.workspace!, configData.path), configData.content ); diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index d1d618c79..8ca84ab40 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -335,7 +335,13 @@ To ignore this error and override the local file, pass --force (-f) // TODO canMergeInto needs to return a reason if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { // TODO allow rename - throw new Error('Error! An incompatible project exists at this location'); + const e = new Error( + `Error! An incompatible project exists at this location.` + ); + + delete e.stack; + + throw e; } } } diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index 301d656e7..398f9e58a 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -458,7 +458,7 @@ test.serial( lock_version: 1, }, id: 'my-workflow', - history: ['cli:02582f3bb088'], + history: ['cli:ba19e179317f'], }, ], }; diff --git a/packages/cli/test/projects/fixtures.ts b/packages/cli/test/projects/fixtures.ts index f3d8b9eb9..4b77482d0 100644 --- a/packages/cli/test/projects/fixtures.ts +++ b/packages/cli/test/projects/fixtures.ts @@ -43,7 +43,7 @@ export const myProject_v1: Provisioner.Project = { lock_version: 1, deleted_at: null, version_history: [ - 'cli:02582f3bb088', // alterstate + 'cli:ba19e179317f', // alterstate ], }, }, @@ -94,7 +94,7 @@ workflows: openfn: uuid: a9a3adef-b394-4405-814d-3ac4323f4b4b history: - - cli:02582f3bb088 + - cli:ba19e179317f openfn: uuid: 72ca3eb0-042c-47a0-a2a1-a545ed4a8406 inserted_at: 2025-04-23T11:19:32Z diff --git a/packages/project/README.md b/packages/project/README.md index 6d0d6d33e..1150b254b 100644 --- a/packages/project/README.md +++ b/packages/project/README.md @@ -91,3 +91,5 @@ Reference: parent(propName=propValue,x=y)-child a-b # can comment here to ``` + +Use special names `webhook` and `cron` to create trigger nodes (when converting into app state, the difference between a step and a trigger becomes important). diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index 07b976a0e..2d323bfbb 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -1,6 +1,6 @@ import * as l from '@openfn/lexicon'; import slugify from './util/slugify'; -import { generateHash } from './util/version'; +import { generateHash, HashOptions } from './util/version'; const clone = (obj: any) => JSON.parse(JSON.stringify(obj)); @@ -113,7 +113,14 @@ class Workflow { // Get properties on any step or edge by id or uuid get(id: string): WithMeta { - const item = this.index.edges[id] || this.index.steps[id]; + // first check if we're passed a UUID - in which case we map it to an id + if (id in this.index.id) { + id = this.index.id[id]; + } + + // now look up the item proper + let item = this.index.edges[id] || this.index.steps[id]; + if (!item) { throw new Error(`step/edge with id "${id}" does not exist in workflow`); } @@ -187,8 +194,8 @@ class Workflow { return this.index.uuid; } - getVersionHash() { - return generateHash(this); + getVersionHash(options?: HashOptions) { + return generateHash(this, options); } pushHistory(versionHash: string) { diff --git a/packages/project/src/gen/generator.ts b/packages/project/src/gen/generator.ts index cf41384a2..9cb09d930 100644 --- a/packages/project/src/gen/generator.ts +++ b/packages/project/src/gen/generator.ts @@ -57,12 +57,19 @@ const initOperations = (options: any = {}) => { if (!nodes[name]) { const id = slugify(name); nodes[name] = { - name: name, id, - openfn: { - uuid: uuid(id), - }, }; + if (/^(cron|webhook)$/.test(name)) { + // This sets up the node as a trigger + nodes[name].type = name; + } else { + nodes[name].name = name; + } + if (options.openfnUuid !== false) { + nodes[name].openfn = { + uuid: uuid(id), + }; + } } return nodes[name]; }; @@ -107,11 +114,14 @@ const initOperations = (options: any = {}) => { const n1 = parent.buildWorkflow(); const n2 = child.buildWorkflow(); const e = edge.buildWorkflow(); - e.openfn.uuid = uuid(`${n1.id}-${n2.id}`); + + if (options.openfnUuid !== false) { + e.openfn.uuid = uuid(`${n1.id}-${n2.id}`); + } n1.next ??= {}; - n1.next[n2.name] = e; + n1.next[n2.id ?? slugify(n2.name)] = e; return [n1, n2]; }, diff --git a/packages/project/src/gen/workflow.ohm b/packages/project/src/gen/workflow.ohm index fe2ee2502..a8ab5efa0 100644 --- a/packages/project/src/gen/workflow.ohm +++ b/packages/project/src/gen/workflow.ohm @@ -29,7 +29,7 @@ Workflow { prop = (alnum | "-" | "_")+ "=" propValue - propValue = quoted_prop | bool | int | alnum+ + propValue = quoted_prop | bool | int | alnum+ // TODO we only parse numbers as positive ints right now // fine for tests diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index ef3c355de..2587c8725 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -77,7 +77,7 @@ export const mapEdge = (edge: Provisioner.Edge) => { } if (edge.condition_label) { - e.name = edge.condition_label; + e.label = edge.condition_label; } // Do this last so that it serializes last diff --git a/packages/project/src/serialize/to-app-state.ts b/packages/project/src/serialize/to-app-state.ts index 2eb4cb8f4..8d428eb9a 100644 --- a/packages/project/src/serialize/to-app-state.ts +++ b/packages/project/src/serialize/to-app-state.ts @@ -57,7 +57,7 @@ export default function ( return state; } -const mapWorkflow = (workflow: Workflow) => { +export const mapWorkflow = (workflow: Workflow) => { if (workflow instanceof Workflow) { // @ts-ignore workflow = workflow.toJSON(); @@ -96,10 +96,10 @@ const mapWorkflow = (workflow: Workflow) => { let isTrigger = false; let node: Provisioner.Job | Provisioner.Trigger; - if (s.type && !s.expression) { + if (s.type) { isTrigger = true; node = { - type: s.type, + type: s.type ?? 'webhook', // this is mostly for tests ...renameKeys(s.openfn, { uuid: 'id' }), } as Provisioner.Trigger; wfState.triggers[node.type] = node; @@ -147,6 +147,11 @@ const mapWorkflow = (workflow: Workflow) => { e.source_job_id = node.id; } + if (rules.label) { + // TODO needs unit test + e.condition_label = rules.label; + } + if (rules.condition) { if (typeof rules.condition === 'boolean') { e.condition_type = rules.condition ? 'always' : 'never'; diff --git a/packages/project/src/util/version.ts b/packages/project/src/util/version.ts index ee73a560d..69ed2e054 100644 --- a/packages/project/src/util/version.ts +++ b/packages/project/src/util/version.ts @@ -1,73 +1,140 @@ -import { ConditionalStepEdge, Job, Trigger, Workflow } from '@openfn/lexicon'; import crypto from 'node:crypto'; +import { get } from 'lodash-es'; +import { mapWorkflow } from '../serialize/to-app-state'; +import Workflow from '../Workflow'; const SHORT_HASH_LENGTH = 12; -export const project = () => {}; - function isDefined(v: any) { return v !== undefined && v !== null; } -export const generateHash = (workflow: Workflow, source = 'cli') => { +export const parse = (version: string) => { + const [source, hash] = version.split(':'); + return { source, hash }; +}; + +export type HashOptions = { + source?: string; + sha?: boolean; +}; + +export const generateHash = ( + workflow: Workflow, + { source = 'cli', sha = true }: HashOptions = {} +) => { const parts: string[] = []; + // convert the workflow into a v1 state object + // this means we can match keys with lightning + // and everything gets cleaner + const wfState = mapWorkflow(workflow); + // These are the keys we hash against - const wfKeys = ['name', 'credentials'].sort() as Array; + const wfKeys = ['name', 'positions'].sort(); + + // These keys are manually sorted to match lightning equivalents const stepKeys = [ 'name', - 'adaptors', - 'adaptor', // there's both adaptor & adaptors key in steps somehow - 'expression', - 'configuration', // assumes a string credential id - 'expression', - - // TODO need to model trigger types in this, which I think are currently ignored - ].sort() as Array; + 'adaptor', + 'keychain_credential_id', + 'project_credential_id', + 'body', + ].sort(); + + const triggerKeys = ['type', 'cron_expression', 'enabled'].sort(); + const edgeKeys = [ - 'condition', + 'name', // generated 'label', - 'disabled', // This feels more like an option - should be excluded? + 'condition_type', + 'condition_label', + 'condition_expression', + 'enabled', ].sort(); wfKeys.forEach((key) => { - if (isDefined(workflow[key])) { - parts.push(key, serializeValue(workflow[key])); + const value = get(workflow, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); } }); - const steps = (workflow.steps || []).slice().sort((a, b) => { - const aName = a.name ?? ''; - const bName = b.name ?? ''; - return aName.localeCompare(bName); + // do the trigger first + for (const triggerId in wfState.triggers) { + const trigger = wfState.triggers[triggerId]; + triggerKeys.forEach((key) => { + const value = get(trigger, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); + } + }); + } + + // Now do all steps + const steps = Object.values(wfState.jobs).sort((a, b) => { + const aName = a.name ?? a.id ?? ''; + const bName = b.name ?? b.id ?? ''; + return aName.toLowerCase().localeCompare(bName.toLowerCase()); }); + for (const step of steps) { stepKeys.forEach((key) => { - if (isDefined((step as any)[key])) { - parts.push(key, serializeValue((step as any)[key])); + const value = get(step, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); } }); + } + + // this is annoying + const uuidMap: any = {}; + for (const t in wfState.triggers) { + const uuid = wfState.triggers[t].id; + uuidMap[uuid] = wfState.triggers[t]; + // set the type as the trigger name, to get the right value in the map + (wfState.triggers[t] as any).name = wfState.triggers[t].type; + } + for (const j in wfState.jobs) { + const uuid = wfState.jobs[j].id; + uuidMap[uuid] = wfState.jobs[j]; + } + + const edges = Object.values(wfState.edges) + .map((edge) => { + const source = uuidMap[edge.source_trigger_id! ?? edge.source_job_id]; + const target = uuidMap[edge.target_job_id]; + + (edge as any).name = `${source.name ?? source.id}-${ + target.name ?? target.id + }`; + return edge; + }) + .sort((a: any, b: any) => { + // sort edges by name + // where name is sourcename-target name + const aName = a.name ?? ''; + const bName = b.name ?? ''; + return aName.localeCompare(bName); + }); - if (step.next && Array.isArray(step.next)) { - const steps = step.next.slice() as Array; - steps.slice().sort((a: ConditionalStepEdge, b: ConditionalStepEdge) => { - const aLabel = a.label || ''; - const bLabel = b.label || ''; - return aLabel.localeCompare(bLabel); - }); - for (const edge of step.next) { - edgeKeys.forEach((key) => { - if (isDefined(edge[key])) { - parts.push(key, serializeValue(edge[key])); - } - }); + // now do edges + for (const edge of edges) { + edgeKeys.forEach((key) => { + const value = get(edge, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); } - } + }); } const str = parts.join(''); - const hash = crypto.createHash('sha256').update(str).digest('hex'); - return `${source}:${hash.substring(0, SHORT_HASH_LENGTH)}`; + if (sha) { + const hash = crypto.createHash('sha256').update(str).digest('hex'); + return `${source}:${hash.substring(0, SHORT_HASH_LENGTH)}`; + } else { + return `${source}:${str}`; + } }; function serializeValue(val: unknown) { diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index 4029eb90f..6b9cbeb8d 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -53,8 +53,8 @@ options: color: red workflows: - steps: - - name: b - id: b + - id: b + name: b openfn: uuid: 3 project_credential_id: x diff --git a/packages/project/test/gen/generator.test.ts b/packages/project/test/gen/generator.test.ts index 1ad5c2319..94e522b96 100644 --- a/packages/project/test/gen/generator.test.ts +++ b/packages/project/test/gen/generator.test.ts @@ -4,15 +4,17 @@ import { generateWorkflow, generateProject } from '../../src/gen/generator'; import * as fixtures from './fixtures'; import Workflow from '../../src/Workflow'; +const LOG_OUTPUTS = false; + // Generate a workflow with a fixed UUID seed // Pass test context to log the result -const gen = (src: string, t: ExecutionContext, options = {}) => { +const gen = (src: string, t?: ExecutionContext, options = {}) => { const result = generateWorkflow(src, { uuidSeed: 1, printErrors: false, ...options, }); - if (t) { + if (LOG_OUTPUTS && t) { t.log(JSON.stringify(result.toJSON(), null, 2)); } return result.toJSON(); @@ -24,6 +26,24 @@ test('it should generate a simple workflow', (t) => { t.deepEqual(result, fixtures.ab); }); +test('it should generate a simple workflow without UUIDs', (t) => { + const result = gen('a-b', t, { + openfnUuid: false, + }); + + t.log(JSON.stringify(result)); + + t.deepEqual(result, { + steps: [ + { name: 'a', id: 'a', next: { b: { openfn: {} } } }, + { name: 'b', id: 'b' }, + ], + name: 'Workflow', + id: 'workflow', + history: [], + }); +}); + test('it should return a Workflow instance', (t) => { const result = generateWorkflow('a-b'); @@ -125,7 +145,6 @@ test('it should generate a workflow with openfn meta', (t) => { a-b`, t ); - t.log(result); t.deepEqual(result.openfn, { lock_version: 123, concurrency: 3, @@ -395,6 +414,34 @@ test('it should generate several node pairs', (t) => { t.deepEqual(result, expected); }); +test('it should generate a cron trigger', (t) => { + const result = generateWorkflow('cron-a', { uuidSeed: 1 }); + + const [trigger, node] = result.steps; + + t.deepEqual(trigger, { + id: 'cron', + type: 'cron', + openfn: { uuid: 1 }, + next: { a: { openfn: { uuid: 3 } } }, + }); + t.deepEqual(node, { id: 'a', openfn: { uuid: 2 }, name: 'a' }); +}); + +test('it should generate a webhook trigger', (t) => { + const result = generateWorkflow('webhook-a', { uuidSeed: 1 }); + + const [trigger, node] = result.steps; + + t.deepEqual(trigger, { + id: 'webhook', + type: 'webhook', + openfn: { uuid: 1 }, + next: { a: { openfn: { uuid: 3 } } }, + }); + t.deepEqual(node, { id: 'a', openfn: { uuid: 2 }, name: 'a' }); +}); + test('it should generate a node with a prop', (t) => { const result = gen('a(expression=y)-b', t); const expected = _.cloneDeep(fixtures.ab); diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 8b5f138e2..23fc44782 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -138,7 +138,37 @@ test('should create a Project from prov state with a workflow', (t) => { }); }); -test('mapWorkflow: map a simple trigger', (t) => { +test('mapWorkflow: map a cron trigger', (t) => { + const mapped = mapWorkflow({ + id: 'cron', + name: 'w', + deleted_at: null, + triggers: { + cron: { + id: '1234', + type: 'cron', + cron_expression: '0 1 0 0', + enabled: true, + }, + }, + jobs: {}, + edges: {}, + }); + + const [trigger] = mapped.steps; + t.deepEqual(trigger, { + id: 'cron', + type: 'cron', + next: {}, + openfn: { + enabled: true, + uuid: '1234', + cron_expression: '0 1 0 0', + }, + }); +}); + +test('mapWorkflow: map a webhook trigger', (t) => { const mapped = mapWorkflow(state.workflows['my-workflow']); const [trigger] = mapped.steps; @@ -291,7 +321,7 @@ test('mapEdge: map label', (t) => { } as any); t.deepEqual(e, { disabled: true, - name: 'abc', + label: 'abc', }); }); diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index b6a7fa570..3d4c21cfe 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -262,7 +262,7 @@ test('should handle credentials', (t) => { t.is(step.project_credential_id, 'p'); }); -test.only('should ignore forked_from', (t) => { +test('should ignore forked_from', (t) => { const data = { id: 'my-project', workflows: [ @@ -293,9 +293,7 @@ test.only('should ignore forked_from', (t) => { }, }; const proj = new Project(data); - console.log(proj); const state = toAppState(proj, { format: 'json' }); - console.log(state); t.falsy((state as any).forked_form); }); @@ -332,7 +330,40 @@ test('should ignore workflow start keys', (t) => { t.falsy(state.workflows['wf'].start); }); -test.todo('handle edge labels'); +test('should handle edge labels', (t) => { + const data = { + id: 'my-project', + workflows: [ + { + id: 'wf', + name: 'wf', + start: 'step', + steps: [ + { + id: 'trigger', + type: 'webhook', + next: { + step: { + label: 'hello', + }, + }, + }, + { + id: 'step', + expression: '.', + configuration: 'p', + openfn: { + keychain_credential_id: 'k', + }, + }, + ], + }, + ], + }; + + const state = toAppState(new Project(data), { format: 'json' }); + t.is(state.workflows.wf.edges['trigger->step'].condition_label, 'hello'); +}); test('serialize steps and trigger in alphabetical order', (t) => { const wf = `@name wf diff --git a/packages/project/test/util/version-workflow.test.ts b/packages/project/test/util/version-workflow.test.ts index fc9b23dae..ffe612a0c 100644 --- a/packages/project/test/util/version-workflow.test.ts +++ b/packages/project/test/util/version-workflow.test.ts @@ -1,10 +1,76 @@ import test from 'ava'; -import { generateHash } from '../../src/util/version'; -import { generateWorkflow } from '../../src'; +import { generateHash, parse } from '../../src/util/version'; +import Project, { generateWorkflow } from '../../src'; -// TODO just caught a bug with both of these - needs to add tests around this -test.todo('include edge label in hash'); -test.todo('include edge expression in hash'); +// this is an actual lightning workflow state, copied verbatim +// todo already out of data as the version will change soon +// next, update this +const example = { + id: '320157d2-260d-4e32-91c0-db935547c263', + name: 'Turtle Power', + edges: [ + { + enabled: true, + id: 'ed3ebfbf-6fa3-4438-b21d-06f7eec216c1', + condition_type: 'always', + source_trigger_id: 'bf10f31a-cf51-45a2-95a4-756d0a25af53', + target_job_id: '4d18c46b-3bb4-4af1-81e2-07f9aee527fc', + }, + { + enabled: true, + id: '253bf2d7-1a01-44c8-8e2e-ccf50de92dff', + condition_type: 'js_expression', + condition_label: 'always tbh', + condition_expression: 'state.data', + source_job_id: '4d18c46b-3bb4-4af1-81e2-07f9aee527fc', + target_job_id: '40b839bd-5ade-414e-8dde-ed3ae77239ea', + }, + ], + version_history: ['app:211291f6e6d5'], + inserted_at: '2025-12-19T15:26:49Z', + jobs: [ + { + id: '4d18c46b-3bb4-4af1-81e2-07f9aee527fc', + name: 'Transform data', + body: 'fri', + adaptor: '@openfn/language-http@7.2.6', + project_credential_id: 'dd409089-5569-4157-8cf6-528ace283348', + }, + { + id: '40b839bd-5ade-414e-8dde-ed3ae77239ea', + name: 'do something', + body: '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + adaptor: '@openfn/language-http@7.2.6', + project_credential_id: null, + }, + ], + triggers: [ + { + enabled: false, + id: 'bf10f31a-cf51-45a2-95a4-756d0a25af53', + type: 'webhook', + }, + ], + updated_at: '2026-01-23T12:08:47Z', + lock_version: 34, + deleted_at: null, + concurrency: null, +}; + +test('match lightning version', async (t) => { + const [expected] = example.version_history; + + // load the project from v1 state + const proj = await Project.from('state', { + workflows: [example], + }); + + const wf = proj.workflows[0]; + const hash = wf.getVersionHash(); + t.log(expected); + t.log(hash); + t.is(parse(hash).hash, parse(expected).hash); +}); test('generate an 12 character version hash for a basic workflow', (t) => { const workflow = generateWorkflow( @@ -15,31 +81,240 @@ test('generate an 12 character version hash for a basic workflow', (t) => { ` ); const hash = workflow.getVersionHash(); - t.is(hash, 'cli:518f491717a7'); + t.is(hash, 'cli:72aed7c5f224'); +}); + +test('ordering: generate version string with no steps', (t) => { + const workflow = generateWorkflow( + ` + @name a + @id some-id + ` + ); + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:a'); +}); + +test('ordering: generate version string with webook trigger and step', (t) => { + const workflow = generateWorkflow( + ` + @name a + @id some-id + trigger(type=webhook)-x(adaptor=http,expression=fn,project_credential_id=abc) + ` + ); + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:awebhookhttpfnxabctruewebhook-x'); +}); + +test('ordering: multiple steps are sorted alphabetically by name', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + z-x + a-x + m-x + ` + ); + // With step keys sorted: adaptor, body, keychain_credential_id, name, project_credential_id + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfamxztruea-xtruem-xtruez-x'); +}); + +test('ordering: step names are sorted case-insensitively', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + Z-x + a-x + B-x + ` + ); + // Steps should appear in order: a, B, x, Z (case-insensitive sort) + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfaBxZtruea-xtrueB-xtrueZ-x'); +}); + +test('ordering: step keys appear in sorted order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-step(project_credential_id=cred,name=step,expression=code,adaptor=http) + ` + ); + // Step keys sorted: adaptor, body, keychain_credential_id, name, project_credential_id + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfahttpcodestepcredtruea-step'); +}); + +test('ordering: multiple edges are sorted by edge name', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + z-a + a-b + m-n + ` + ); + // Edges sorted by "source-target" name: a-b, m-n, z-a + // Each edge has enabled=true and its generated name + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfabmnztruea-btruem-ntruez-a'); +}); + +test('ordering: edge keys appear in sorted order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-(label=lbl,condition=always,disabled=true)-b + ` + ); + // Edge keys sorted: condition_expression, condition_label, condition_type, enabled, label, name + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfablblalwaysfalsea-b'); +}); + +test('ordering: trigger keys appear in sorted order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + t(type=cron,cron_expression="* * *",enabled=false)-x(expression=code) + ` + ); + // Trigger keys sorted: cron_expression, enabled, type + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wf* * *falsecroncodextruecron-x'); +}); + +test('ordering: complete workflow with all elements', (t) => { + const workflow = generateWorkflow( + ` + @name complete + @id some-id + trigger(type=webhook)-step2(adaptor=http,expression=fn2,project_credential_id=c2) + step1(adaptor=common,expression=fn1,project_credential_id=c1)-step2 + ` + ); + const hash = workflow.getVersionHash({ sha: false }); + t.is( + hash, + 'cli:completewebhookcommonfn1step1c1httpfn2step2c2truestep1-step2truewebhook-step2' + ); +}); + +test('ordering: multiple edges from same source are sorted by target', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-z + a-m + a-b + ` + ); + // Edges: a-b, a-m, a-z (sorted by full edge name) + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfabmztruea-btruea-mtruea-z'); +}); + +test('ordering: workflow with webhook trigger connected to step', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + trigger(type=webhook)-step + ` + ); + // Workflow name, trigger type, step name, edge (enabled + name) + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfwebhooksteptruewebhook-step'); +}); + +test('ordering: steps with partial fields maintain sorted key order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-step(name=step,adaptor=http) + ` + ); + // Step keys sorted: adaptor, body, keychain_credential_id, name, project_credential_id + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfahttpsteptruea-step'); +}); + +test('ordering: edge with js_expression condition', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-(condition="state.x > 5",label=check)-b + ` + ); + // Edge keys sorted: condition_expression, condition_label, condition_type, enabled, label, name + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfabstate.x > 5checkjs_expressiontruea-b'); +}); + +test('ordering: undefined fields are omitted', (t) => { + const workflow1 = generateWorkflow( + ` + @name wf + @id some-id + a-b(name=b) + ` + ); + + const workflow2 = generateWorkflow( + ` + @name wf + @id some-id + a-b(name=b) + ` + ); + + // Both should produce the same hash + const hash1 = workflow1.getVersionHash({ sha: false }); + const hash2 = workflow2.getVersionHash({ sha: false }); + + t.is(hash1, 'cli:wfabtruea-b'); + t.is(hash1, hash2); }); +// TODO more ordering tests + test('unique hash but different steps order', (t) => { const workflow1 = generateWorkflow( ` @name same-workflow @id id-one a-b - b-c + a-c + a-d ` ); + + // different order of nodes but should generate the same hash const workflow2 = generateWorkflow( ` @name same-workflow @id id-two + a-d a-c - c-b + a-b ` ); - // different order of nodes (b & c changed position) but should generate the same hash // validate second step is actually different t.is(workflow1.steps[1].name, 'b'); - t.is(workflow2.steps[1].name, 'c'); + t.is(workflow2.steps[1].name, 'd'); + // assert that hashes are the same t.is(generateHash(workflow1), generateHash(workflow2)); }); @@ -74,6 +349,81 @@ test('hash changes when workflow name changes', (t) => { t.not(generateHash(wf1), generateHash(wf2)); }); +test('hash a trigger', (t) => { + // check that various changes on a trigger update the hash + const webhook = generateWorkflow( + `@name wf-1 + @id workflow-id + t(type=webhook)-x(expression=x) + ` + ); + const cron = generateWorkflow( + `@name wf-1 + @id workflow-id + t(type=cron)-x(expression=x) + ` + ); + + t.not(generateHash(webhook), generateHash(cron)); + + const cronEnabled = generateWorkflow( + `@name wf-1 + @id workflow-id + t(enabled=false)-x + ` + ); + t.not(generateHash(webhook), generateHash(cronEnabled)); + + const cronExpression = generateWorkflow( + `@name wf-1 + @id workflow-id + t(cron_expression="1")-x + ` + ); + t.not(generateHash(webhook), generateHash(cronExpression)); +}); + +test('hash changes across an edge', (t) => { + const basicEdge = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-b + ` + ); + + const withLabel = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-(label=x)-b + ` + ); + + t.not(generateHash(basicEdge), generateHash(withLabel)); + + const withCondition = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-(condition=always)-b + ` + ); + + t.not(generateHash(basicEdge), generateHash(withCondition)); + + const withDisabled = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-(disabled=true)-b + ` + ); + + t.not(generateHash(basicEdge), generateHash(withDisabled)); +}); + +// TODO joe to think more about credential mapping (keychain and project cred keys) // can't get credentials to work in the generator, need to fix that test.skip('hash changes when credentials field changes', (t) => { const wf1 = generateWorkflow( @@ -157,3 +507,21 @@ test('ignored fields do not affect hash', (t) => { ); t.is(generateHash(wf1), generateHash(wf1_ignored)); }); + +// This test is important because when merging, the local workflow +// representation won't have UUIDs in it - and that should be fine, nothing should break +test('works without UUIDs', (t) => { + const workflow = generateWorkflow( + ` + @name a + @id some-id + webhook-transform_data(name="Transform data",expression="fn(s => s)") + `, + { + openfnUuid: false, + } + ); + + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:awebhookfn(s => s)Transform datatruewebhook-Transform data'); +});