mirror of
https://github.com/xtr-dev/payload-automation.git
synced 2025-12-10 17:03:22 +00:00
Compare commits
28 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a8ae877039 | |||
| b7b40c400b | |||
| ab5b26c42c | |||
| c47197223c | |||
| 0a036752ea | |||
| 74217d532d | |||
| 04100787d7 | |||
| 253de9b8b0 | |||
| 397559079f | |||
| c352da91fa | |||
| d6aedbc59d | |||
| cd85f90ef1 | |||
| 38fbb1922a | |||
| dfcc5c0fce | |||
| 089e12ac7a | |||
| 8ff65ca7c3 | |||
| bdfc311009 | |||
| 3c54f00f57 | |||
| cbb74206e9 | |||
| 41c4d8bdcb | |||
| 46c9f11534 | |||
| 08a4022a41 | |||
| c24610b3d9 | |||
| 87893ac612 | |||
| a711fbdbea | |||
| 4adc5cbdaa | |||
| f3f18d5b4c | |||
| 6397250045 |
38
CHANGELOG.md
Normal file
38
CHANGELOG.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to the PayloadCMS Automation Plugin will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.0.16] - 2025-09-01
|
||||
|
||||
### Fixed
|
||||
- **Critical Bug**: Removed problematic `hooksInitialized` flag that prevented proper hook registration in development environments
|
||||
- **Silent Failures**: Added comprehensive error logging with "AUTOMATION PLUGIN:" prefix for easier debugging
|
||||
- **Hook Execution**: Added try/catch blocks in hook execution to prevent silent failures and ensure workflow execution continues
|
||||
- **Development Mode**: Fixed issue where workflows would not execute even when properly configured due to hook registration being skipped
|
||||
|
||||
### Changed
|
||||
- Enhanced logging throughout the hook execution pipeline for better debugging visibility
|
||||
- Improved error handling to prevent workflow execution failures from breaking other hooks
|
||||
|
||||
### Migration Notes
|
||||
- No breaking changes - this is a critical bug fix release
|
||||
- Existing workflows should now execute properly after updating to this version
|
||||
- Enhanced logging will provide better visibility into workflow execution
|
||||
|
||||
## [0.0.15] - 2025-08-XX
|
||||
|
||||
### Changed
|
||||
- Updated workflow condition evaluation to use JSONPath expressions
|
||||
- Changed step configuration from `type`/`inputs` to `step`/`input`
|
||||
- Updated workflow collection schema for improved flexibility
|
||||
|
||||
## [0.0.14] - 2025-08-XX
|
||||
|
||||
### Added
|
||||
- Initial workflow automation functionality
|
||||
- Collection trigger support
|
||||
- Step execution engine
|
||||
- Basic workflow management
|
||||
187
MIGRATION-v0.0.24.md
Normal file
187
MIGRATION-v0.0.24.md
Normal file
@@ -0,0 +1,187 @@
|
||||
# Migration Guide: v0.0.23 → v0.0.24
|
||||
|
||||
## What's New
|
||||
|
||||
Version 0.0.24 introduces **trigger builder helpers** that dramatically reduce boilerplate when creating custom triggers, plus fixes field name clashing between built-in and external trigger parameters.
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
**None** - This is a fully backward-compatible release. All existing triggers continue to work exactly as before.
|
||||
|
||||
## New Features
|
||||
|
||||
### 1. Trigger Builder Helpers
|
||||
|
||||
New helper functions eliminate 90% of boilerplate when creating custom triggers:
|
||||
|
||||
```bash
|
||||
npm update @xtr-dev/payload-automation
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Import the new helpers
|
||||
import {
|
||||
createTrigger,
|
||||
webhookTrigger,
|
||||
cronTrigger
|
||||
} from '@xtr-dev/payload-automation/helpers'
|
||||
```
|
||||
|
||||
### 2. Fixed Field Name Clashing
|
||||
|
||||
Built-in trigger parameters now use a JSON backing store to prevent conflicts with custom trigger fields.
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### Step 1: Update Package
|
||||
|
||||
```bash
|
||||
npm install @xtr-dev/payload-automation@latest
|
||||
# or
|
||||
pnpm update @xtr-dev/payload-automation
|
||||
```
|
||||
|
||||
### Step 2: (Optional) Modernize Custom Triggers
|
||||
|
||||
**Your existing triggers will continue to work**, but you can optionally migrate to the cleaner syntax:
|
||||
|
||||
#### Before (Still Works)
|
||||
```typescript
|
||||
const customTrigger = {
|
||||
slug: 'order-webhook',
|
||||
inputs: [
|
||||
{
|
||||
name: 'webhookSecret',
|
||||
type: 'text',
|
||||
required: true,
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Secret for webhook validation'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.webhookSecret],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.webhookSecret = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
}
|
||||
// ... more boilerplate
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### After (Recommended)
|
||||
```typescript
|
||||
import { createTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const orderWebhook = createTrigger('order-webhook').parameters({
|
||||
webhookSecret: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Secret for webhook validation'
|
||||
}
|
||||
}
|
||||
// Add more parameters easily
|
||||
})
|
||||
```
|
||||
|
||||
### Step 3: (Optional) Use Preset Builders
|
||||
|
||||
For common trigger patterns:
|
||||
|
||||
```typescript
|
||||
import { webhookTrigger, cronTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
// Webhook trigger with built-in path, secret, headers parameters
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('currency', {
|
||||
type: 'select',
|
||||
options: ['USD', 'EUR', 'GBP']
|
||||
})
|
||||
.build()
|
||||
|
||||
// Cron trigger with built-in expression, timezone parameters
|
||||
const dailyReport = cronTrigger('daily-report')
|
||||
.parameter('format', {
|
||||
type: 'select',
|
||||
options: ['pdf', 'csv']
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
## Quick Migration Examples
|
||||
|
||||
### Simple Trigger Migration
|
||||
|
||||
```typescript
|
||||
// OLD WAY (still works)
|
||||
{
|
||||
slug: 'user-signup',
|
||||
inputs: [/* 20+ lines of boilerplate per field */]
|
||||
}
|
||||
|
||||
// NEW WAY (recommended)
|
||||
import { createTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const userSignup = createTrigger('user-signup').parameters({
|
||||
source: {
|
||||
type: 'select',
|
||||
options: ['web', 'mobile', 'api'],
|
||||
required: true
|
||||
},
|
||||
userType: {
|
||||
type: 'select',
|
||||
options: ['regular', 'premium'],
|
||||
defaultValue: 'regular'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### Webhook Trigger Migration
|
||||
|
||||
```typescript
|
||||
// OLD WAY
|
||||
{
|
||||
slug: 'payment-webhook',
|
||||
inputs: [/* Manual webhookPath field + lots of boilerplate */]
|
||||
}
|
||||
|
||||
// NEW WAY
|
||||
import { webhookTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('minimumAmount', {
|
||||
type: 'number',
|
||||
min: 0
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
## Benefits of Migration
|
||||
|
||||
- **90% less code** - Eliminate virtual field boilerplate
|
||||
- **No field name conflicts** - Built-in parameters isolated
|
||||
- **Better TypeScript support** - Full type inference
|
||||
- **Preset patterns** - Common trigger types ready-to-use
|
||||
- **Composable API** - Easy to extend and customize
|
||||
|
||||
## Compatibility
|
||||
|
||||
- ✅ **Existing triggers** continue to work unchanged
|
||||
- ✅ **Mix old and new** trigger styles in same config
|
||||
- ✅ **No database changes** required
|
||||
- ✅ **PayloadCMS field compatibility** maintained
|
||||
|
||||
## Need Help?
|
||||
|
||||
- [View examples](./examples/trigger-builders.ts)
|
||||
- [Read documentation](./examples/README-trigger-builders.md)
|
||||
- [Report issues](https://github.com/xtr-dev/payload-automation/issues)
|
||||
|
||||
---
|
||||
|
||||
**TL;DR**: Update the package, optionally migrate custom triggers to use the new helpers for cleaner code. All existing triggers continue to work without changes.
|
||||
75
README.md
75
README.md
@@ -63,11 +63,82 @@ import type { WorkflowsPluginConfig } from '@xtr-dev/payload-automation'
|
||||
|
||||
## Step Types
|
||||
|
||||
- **HTTP Request** - Make external API calls
|
||||
### HTTP Request
|
||||
Make external API calls with comprehensive error handling and retry logic.
|
||||
|
||||
**Key Features:**
|
||||
- Support for GET, POST, PUT, DELETE, PATCH methods
|
||||
- Authentication: Bearer token, Basic auth, API key headers
|
||||
- Configurable timeouts and retry logic
|
||||
- JSONPath integration for dynamic URLs and request bodies
|
||||
|
||||
**Error Handling:**
|
||||
HTTP Request steps use a **response-based success model** rather than status-code-based failures:
|
||||
|
||||
- ✅ **Successful completion**: All HTTP requests that receive a response (including 4xx/5xx status codes) are marked as "succeeded"
|
||||
- ❌ **Failed execution**: Only network errors, timeouts, DNS failures, and connection issues cause step failure
|
||||
- 📊 **Error information preserved**: HTTP error status codes (404, 500, etc.) are captured in the step output for workflow conditional logic
|
||||
|
||||
**Example workflow logic:**
|
||||
```typescript
|
||||
// Step outputs for a 404 response:
|
||||
{
|
||||
"status": 404,
|
||||
"statusText": "Not Found",
|
||||
"body": "Resource not found",
|
||||
"headers": {...},
|
||||
"duration": 1200
|
||||
}
|
||||
|
||||
// Use in workflow conditions:
|
||||
// "$.steps.apiRequest.output.status >= 400" to handle errors
|
||||
```
|
||||
|
||||
This design allows workflows to handle HTTP errors gracefully rather than failing completely, enabling robust error handling and retry logic.
|
||||
|
||||
**Enhanced Error Tracking:**
|
||||
For network failures (timeouts, DNS errors, connection failures), the plugin provides detailed error information through an independent storage system that bypasses PayloadCMS's output limitations:
|
||||
|
||||
```typescript
|
||||
// Timeout error details preserved in workflow context:
|
||||
{
|
||||
"steps": {
|
||||
"httpStep": {
|
||||
"state": "failed",
|
||||
"error": "Task handler returned a failed state",
|
||||
"errorDetails": {
|
||||
"errorType": "timeout",
|
||||
"duration": 2006,
|
||||
"attempts": 1,
|
||||
"finalError": "Request timeout after 2000ms",
|
||||
"context": {
|
||||
"url": "https://api.example.com/data",
|
||||
"method": "GET",
|
||||
"timeout": 2000
|
||||
}
|
||||
},
|
||||
"executionInfo": {
|
||||
"completed": true,
|
||||
"success": false,
|
||||
"executedAt": "2025-09-04T15:16:10.000Z",
|
||||
"duration": 2006
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Access in workflow conditions:
|
||||
// "$.steps.httpStep.errorDetails.errorType == 'timeout'"
|
||||
// "$.steps.httpStep.errorDetails.duration > 5000"
|
||||
```
|
||||
|
||||
### Document Operations
|
||||
- **Create Document** - Create PayloadCMS documents
|
||||
- **Read Document** - Query documents with filters
|
||||
- **Update Document** - Modify existing documents
|
||||
- **Update Document** - Modify existing documents
|
||||
- **Delete Document** - Remove documents
|
||||
|
||||
### Communication
|
||||
- **Send Email** - Send notifications via PayloadCMS email
|
||||
|
||||
## Data Resolution
|
||||
|
||||
122
dev/app/test-trigger/route.ts
Normal file
122
dev/app/test-trigger/route.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getPayload } from 'payload'
|
||||
import config from '../../payload.config'
|
||||
|
||||
export async function GET() {
|
||||
console.log('Starting workflow trigger test...')
|
||||
|
||||
// Get payload instance
|
||||
const payload = await getPayload({ config })
|
||||
|
||||
try {
|
||||
// Create a test user
|
||||
const user = await payload.create({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: `test-${Date.now()}@example.com`,
|
||||
password: 'password123'
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created test user:', user.id)
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Post Creation Workflow',
|
||||
description: 'Triggers when a post is created',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'log-post',
|
||||
taskSlug: 'http-request-step',
|
||||
input: JSON.stringify({
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: {
|
||||
message: 'Post created',
|
||||
postId: '$.trigger.doc.id',
|
||||
postTitle: '$.trigger.doc.title'
|
||||
}
|
||||
})
|
||||
}
|
||||
]
|
||||
},
|
||||
user: user.id
|
||||
})
|
||||
|
||||
console.log('Created workflow:', workflow.id, workflow.name)
|
||||
console.log('Workflow triggers:', JSON.stringify(workflow.triggers, null, 2))
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
console.log('Creating post to trigger workflow...')
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
title: 'Test Post',
|
||||
content: 'This should trigger the workflow',
|
||||
_status: 'published'
|
||||
},
|
||||
user: user.id
|
||||
})
|
||||
|
||||
console.log('Created post:', post.id)
|
||||
|
||||
// Wait a bit for workflow to execute
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
// Check for workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Workflow runs found:', runs.totalDocs)
|
||||
|
||||
const result = {
|
||||
success: runs.totalDocs > 0,
|
||||
workflowId: workflow.id,
|
||||
postId: post.id,
|
||||
runsFound: runs.totalDocs,
|
||||
runs: runs.docs.map(r => ({
|
||||
id: r.id,
|
||||
status: r.status,
|
||||
triggeredBy: r.triggeredBy,
|
||||
startedAt: r.startedAt,
|
||||
completedAt: r.completedAt,
|
||||
error: r.error
|
||||
}))
|
||||
}
|
||||
|
||||
if (runs.totalDocs > 0) {
|
||||
console.log('✅ SUCCESS: Workflow was triggered!')
|
||||
console.log('Run status:', runs.docs[0].status)
|
||||
console.log('Run context:', JSON.stringify(runs.docs[0].context, null, 2))
|
||||
} else {
|
||||
console.log('❌ FAILURE: Workflow was not triggered')
|
||||
}
|
||||
|
||||
return NextResponse.json(result)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test failed:', error)
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, { status: 500 })
|
||||
}
|
||||
}
|
||||
113
dev/condition-fix.spec.ts
Normal file
113
dev/condition-fix.spec.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
|
||||
describe('Workflow Condition Fix Test', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
it('should correctly evaluate trigger conditions with $.trigger.doc path', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with a condition using the correct JSONPath
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Condition Evaluation',
|
||||
description: 'Tests that $.trigger.doc.content conditions work',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create',
|
||||
condition: '$.trigger.doc.content == "TRIGGER_ME"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'audit-step',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
post: '$.trigger.doc.id',
|
||||
message: 'Condition was met and workflow triggered'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created workflow with condition: $.trigger.doc.content == "TRIGGER_ME"')
|
||||
|
||||
// Create a post that SHOULD NOT trigger
|
||||
const post1 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'This should not trigger'
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post that SHOULD trigger
|
||||
const post2 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'TRIGGER_ME'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Check workflow runs - should have exactly 1
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(`Found ${runs.totalDocs} workflow runs`)
|
||||
if (runs.totalDocs > 0) {
|
||||
console.log('Run statuses:', runs.docs.map(r => r.status))
|
||||
}
|
||||
|
||||
// Should have exactly 1 run for the matching condition
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
|
||||
// Check audit logs - should only have one for post2
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post2.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (runs.docs[0].status === 'completed') {
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
expect(auditLogs.docs[0].message).toBe('Condition was met and workflow triggered')
|
||||
}
|
||||
|
||||
// Verify no audit log for the first post
|
||||
const noAuditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post1.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(noAuditLogs.totalDocs).toBe(0)
|
||||
|
||||
console.log('✅ Condition evaluation working with $.trigger.doc path!')
|
||||
}, 30000)
|
||||
})
|
||||
519
dev/error-scenarios.spec.ts
Normal file
519
dev/error-scenarios.spec.ts
Normal file
@@ -0,0 +1,519 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
import { mockHttpBin, testFixtures } from './test-helpers.js'
|
||||
|
||||
describe('Error Scenarios and Edge Cases', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
// Set up comprehensive mocks for all error scenarios
|
||||
mockHttpBin.mockAllErrorScenarios()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
mockHttpBin.cleanup()
|
||||
})
|
||||
|
||||
it('should handle HTTP timeout errors gracefully', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Clear existing mocks and set up a proper timeout mock
|
||||
mockHttpBin.cleanup()
|
||||
mockHttpBin.mockTimeout()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - HTTP Timeout',
|
||||
description: 'Tests HTTP request timeout handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
...testFixtures.httpRequestStep('https://httpbin.org/delay/10'),
|
||||
name: 'timeout-request',
|
||||
method: 'GET',
|
||||
timeout: 2000, // 2 second timeout
|
||||
body: null
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Timeout Post'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution (should timeout)
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Either failed due to timeout or completed (depending on network speed)
|
||||
expect(['failed', 'completed']).toContain(runs.docs[0].status)
|
||||
|
||||
// Verify that detailed error information is preserved via new independent storage system
|
||||
const context = runs.docs[0].context
|
||||
const stepContext = context.steps['timeout-request']
|
||||
|
||||
// Check that independent execution info was recorded
|
||||
expect(stepContext.executionInfo).toBeDefined()
|
||||
expect(stepContext.executionInfo.completed).toBe(true)
|
||||
|
||||
// Check that detailed error information was preserved (new feature!)
|
||||
if (runs.docs[0].status === 'failed' && stepContext.errorDetails) {
|
||||
expect(stepContext.errorDetails.errorType).toBe('timeout')
|
||||
expect(stepContext.errorDetails.duration).toBeGreaterThan(2000)
|
||||
expect(stepContext.errorDetails.attempts).toBe(1)
|
||||
expect(stepContext.errorDetails.context.url).toBe('https://httpbin.org/delay/10')
|
||||
expect(stepContext.errorDetails.context.timeout).toBe(2000)
|
||||
console.log('✅ Detailed timeout error information preserved:', {
|
||||
errorType: stepContext.errorDetails.errorType,
|
||||
duration: stepContext.errorDetails.duration,
|
||||
attempts: stepContext.errorDetails.attempts
|
||||
})
|
||||
} else if (runs.docs[0].status === 'failed') {
|
||||
console.log('✅ Timeout error handled:', runs.docs[0].error)
|
||||
} else {
|
||||
console.log('✅ Request completed within timeout')
|
||||
}
|
||||
}, 15000)
|
||||
|
||||
it('should handle invalid JSON responses', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Invalid JSON',
|
||||
description: 'Tests invalid JSON response handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'invalid-json-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/html', // Returns HTML, not JSON
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Invalid JSON Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed') // Should complete but with HTML body
|
||||
expect(runs.docs[0].context.steps['invalid-json-request'].output.body).toContain('<html>')
|
||||
|
||||
console.log('✅ Non-JSON response handled correctly')
|
||||
}, 25000)
|
||||
|
||||
it('should handle circular reference in JSONPath resolution', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// This test creates a scenario where JSONPath might encounter circular references
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Circular Reference',
|
||||
description: 'Tests circular reference handling in JSONPath',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'circular-test',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
// This creates a deep reference that could cause issues
|
||||
triggerData: '$.trigger',
|
||||
stepData: '$.steps',
|
||||
nestedRef: '$.trigger.doc'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Circular Reference Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Should either succeed with safe serialization or fail gracefully
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
console.log('✅ Circular reference handled:', runs.docs[0].status)
|
||||
}, 20000)
|
||||
|
||||
it('should handle malformed workflow configurations', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// This test should expect the workflow creation to fail due to validation
|
||||
let creationFailed = false
|
||||
let workflow: any = null
|
||||
|
||||
try {
|
||||
// Create workflow with missing required fields for create-document
|
||||
workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Malformed Config',
|
||||
description: 'Tests malformed workflow configuration',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'malformed-step',
|
||||
step: 'create-document',
|
||||
// Missing required collectionSlug
|
||||
data: {
|
||||
message: 'This should fail'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
creationFailed = true
|
||||
expect(error).toBeDefined()
|
||||
console.log('✅ Workflow creation failed as expected:', error instanceof Error ? error.message : error)
|
||||
}
|
||||
|
||||
// If creation failed, that's the expected behavior
|
||||
if (creationFailed) {
|
||||
return
|
||||
}
|
||||
|
||||
// If somehow the workflow was created, test execution failure
|
||||
if (workflow) {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Malformed Config Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('failed')
|
||||
expect(runs.docs[0].error).toBeDefined()
|
||||
|
||||
console.log('✅ Malformed config caused execution failure:', runs.docs[0].error)
|
||||
}
|
||||
}, 15000)
|
||||
|
||||
it('should handle HTTP 4xx and 5xx errors properly', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - HTTP Errors',
|
||||
description: 'Tests HTTP error status handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'not-found-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/status/404',
|
||||
method: 'GET'
|
||||
},
|
||||
{
|
||||
name: 'server-error-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/status/500',
|
||||
method: 'GET',
|
||||
dependencies: ['not-found-request']
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error HTTP Status Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed') // Workflow should complete successfully
|
||||
|
||||
// Check that both steps completed with HTTP error outputs
|
||||
const context = runs.docs[0].context
|
||||
expect(context.steps['not-found-request'].state).toBe('succeeded') // HTTP request completed
|
||||
expect(context.steps['not-found-request'].output.status).toBe(404) // But with error status
|
||||
|
||||
console.log('✅ HTTP error statuses handled correctly')
|
||||
}, 25000)
|
||||
|
||||
it('should handle retry logic for transient failures', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Retry Logic',
|
||||
description: 'Tests retry logic for HTTP requests',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'retry-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/status/503', // Service unavailable
|
||||
method: 'GET',
|
||||
retries: 3,
|
||||
retryDelay: 1000
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Retry Logic Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 10000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed') // Workflow should complete with HTTP error output
|
||||
|
||||
// The step should have succeeded but with error status
|
||||
const stepContext = runs.docs[0].context.steps['retry-request']
|
||||
expect(stepContext.state).toBe('succeeded')
|
||||
expect(stepContext.output.status).toBe(503)
|
||||
|
||||
console.log('✅ Retry logic executed correctly')
|
||||
}, 25000)
|
||||
|
||||
it('should handle extremely large workflow contexts', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Large Context',
|
||||
description: 'Tests handling of large workflow contexts',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'large-response-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/base64/SFRUUEJJTiBpcyBhd2Vzb21l', // Returns base64 decoded text
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Large Context Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Should handle large contexts without memory issues
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
console.log('✅ Large context handled:', runs.docs[0].status)
|
||||
}, 20000)
|
||||
|
||||
it('should handle undefined and null values in JSONPath', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Null Values',
|
||||
description: 'Tests null/undefined values in JSONPath expressions',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'null-value-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
nonexistentField: '$.trigger.doc.nonexistent',
|
||||
nullField: '$.trigger.doc.null',
|
||||
undefinedField: '$.trigger.doc.undefined'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Null Values Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Should handle null/undefined values gracefully
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
if (runs.docs[0].status === 'completed') {
|
||||
const stepOutput = runs.docs[0].context.steps['null-value-request'].output
|
||||
expect(stepOutput.status).toBe(200) // httpbin should accept the request
|
||||
console.log('✅ Null values handled gracefully')
|
||||
} else {
|
||||
console.log('✅ Null values caused expected failure:', runs.docs[0].error)
|
||||
}
|
||||
}, 20000)
|
||||
})
|
||||
392
dev/hook-reliability.spec.ts
Normal file
392
dev/hook-reliability.spec.ts
Normal file
@@ -0,0 +1,392 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
import { mockHttpBin, testFixtures } from './test-helpers.js'
|
||||
|
||||
describe('Hook Execution Reliability Tests', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
mockHttpBin.cleanup()
|
||||
})
|
||||
|
||||
it('should reliably execute hooks when collections are created', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Reliability - Create',
|
||||
description: 'Tests hook execution on post creation',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
...testFixtures.createDocumentStep('auditLog'),
|
||||
name: 'create-audit-log',
|
||||
data: {
|
||||
message: 'Post was created via workflow trigger',
|
||||
post: '$.trigger.doc.id'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
expect(workflow.id).toBeDefined()
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Reliability Post'
|
||||
}
|
||||
})
|
||||
|
||||
expect(post).toBeDefined()
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Verify workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Either succeeded or failed, but should have executed
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
console.log('✅ Hook execution status:', runs.docs[0].status)
|
||||
|
||||
// Verify audit log was created only if the workflow succeeded
|
||||
if (runs.docs[0].status === 'completed') {
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBeGreaterThan(0)
|
||||
expect(auditLogs.docs[0].message).toContain('workflow trigger')
|
||||
} else {
|
||||
// If workflow failed, just log the error but don't fail the test
|
||||
console.log('⚠️ Workflow failed:', runs.docs[0].error)
|
||||
// The important thing is that a workflow run was created
|
||||
}
|
||||
}, 30000)
|
||||
|
||||
it('should handle hook execution errors gracefully', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Mock network error for invalid URL
|
||||
mockHttpBin.mockNetworkError('invalid-url-that-will-fail')
|
||||
|
||||
// Create a workflow with invalid step configuration
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Error Handling',
|
||||
description: 'Tests error handling in hook execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'invalid-http-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://invalid-url-that-will-fail',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Error Handling Post'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Verify a failed workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('failed')
|
||||
expect(runs.docs[0].error).toBeDefined()
|
||||
// Check that the error mentions either the URL or the task failure
|
||||
const errorMessage = runs.docs[0].error.toLowerCase()
|
||||
const hasRelevantError = errorMessage.includes('url') ||
|
||||
errorMessage.includes('invalid-url') ||
|
||||
errorMessage.includes('network') ||
|
||||
errorMessage.includes('failed')
|
||||
expect(hasRelevantError).toBe(true)
|
||||
|
||||
console.log('✅ Error handling working:', runs.docs[0].error)
|
||||
}, 30000)
|
||||
|
||||
it('should create failed workflow runs when executor is unavailable', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// This test simulates the executor being unavailable
|
||||
// We'll create a workflow and then simulate a hook execution without proper executor
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Executor Unavailable',
|
||||
description: 'Tests handling when executor is not available',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'simple-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/get'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Temporarily disable the executor by setting it to null
|
||||
// This simulates the initialization issue
|
||||
const global = globalThis as any
|
||||
const originalExecutor = global.__workflowExecutor
|
||||
global.__workflowExecutor = null
|
||||
|
||||
try {
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Executor Unavailable Post'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for hook execution attempt
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
// Verify a failed workflow run was created for executor unavailability
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
if (runs.totalDocs > 0) {
|
||||
expect(runs.docs[0].error).toBeDefined()
|
||||
console.log('✅ Executor unavailable error captured:', runs.docs[0].error)
|
||||
} else {
|
||||
console.log('⚠️ No workflow run created - this indicates the hook may not have executed')
|
||||
}
|
||||
} finally {
|
||||
// Restore the original executor
|
||||
global.__workflowExecutor = originalExecutor
|
||||
}
|
||||
}, 30000)
|
||||
|
||||
it('should handle workflow conditions properly', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with a condition that should prevent execution
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Conditional Execution',
|
||||
description: 'Tests conditional workflow execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create',
|
||||
condition: '$.trigger.doc.content == "TRIGGER_CONDITION"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'conditional-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
post: '$.trigger.doc.id',
|
||||
message: 'Conditional trigger executed'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post that SHOULD NOT trigger the workflow
|
||||
const post1 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Conditional - Should Not Trigger'
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post that SHOULD trigger the workflow
|
||||
const post2 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'TRIGGER_CONDITION'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Check workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Should have exactly 1 run (only for the matching condition)
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Either succeeded or failed, but should have executed
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
// Verify audit log was created only for the correct post
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post2.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
|
||||
// Verify no audit log for the first post
|
||||
const noAuditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post1.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(noAuditLogs.totalDocs).toBe(0)
|
||||
|
||||
console.log('✅ Conditional execution working correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle multiple concurrent hook executions', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Concurrent Execution',
|
||||
description: 'Tests handling multiple concurrent hook executions',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'concurrent-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
post: '$.trigger.doc.id',
|
||||
message: 'Concurrent execution test'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Create multiple posts concurrently
|
||||
const concurrentCreations = Array.from({ length: 5 }, (_, i) =>
|
||||
payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: `Test Hook Concurrent Post ${i + 1}`
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const posts = await Promise.all(concurrentCreations)
|
||||
expect(posts).toHaveLength(5)
|
||||
|
||||
// Wait for all workflow executions
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
// Verify all workflow runs were created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(5)
|
||||
|
||||
// Verify all runs completed successfully
|
||||
const failedRuns = runs.docs.filter(run => run.status === 'failed')
|
||||
expect(failedRuns).toHaveLength(0)
|
||||
|
||||
console.log('✅ Concurrent executions completed:', {
|
||||
totalRuns: runs.totalDocs,
|
||||
statuses: runs.docs.map(run => run.status)
|
||||
})
|
||||
}, 45000)
|
||||
})
|
||||
@@ -217,7 +217,7 @@ export interface Workflow {
|
||||
/**
|
||||
* Collection that triggers the workflow
|
||||
*/
|
||||
collection?: 'posts' | null;
|
||||
collectionSlug?: ('posts' | 'media') | null;
|
||||
/**
|
||||
* Collection operation that triggers the workflow
|
||||
*/
|
||||
@@ -242,6 +242,10 @@ export interface Workflow {
|
||||
* Timezone for cron execution (e.g., "America/New_York", "Europe/London"). Defaults to UTC.
|
||||
*/
|
||||
timezone?: string | null;
|
||||
/**
|
||||
* JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.doc.status == 'published'")
|
||||
*/
|
||||
condition?: string | null;
|
||||
id?: string | null;
|
||||
}[]
|
||||
| null;
|
||||
@@ -262,6 +266,10 @@ export interface Workflow {
|
||||
* Step names that must complete before this step can run
|
||||
*/
|
||||
dependencies?: string[] | null;
|
||||
/**
|
||||
* JSONPath expression that must evaluate to true for this step to execute (e.g., "$.trigger.doc.status == 'published'")
|
||||
*/
|
||||
condition?: string | null;
|
||||
id?: string | null;
|
||||
}[]
|
||||
| null;
|
||||
@@ -584,13 +592,14 @@ export interface WorkflowsSelect<T extends boolean = true> {
|
||||
| T
|
||||
| {
|
||||
type?: T;
|
||||
collection?: T;
|
||||
collectionSlug?: T;
|
||||
operation?: T;
|
||||
webhookPath?: T;
|
||||
global?: T;
|
||||
globalOperation?: T;
|
||||
cronExpression?: T;
|
||||
timezone?: T;
|
||||
condition?: T;
|
||||
id?: T;
|
||||
};
|
||||
steps?:
|
||||
@@ -600,6 +609,7 @@ export interface WorkflowsSelect<T extends boolean = true> {
|
||||
name?: T;
|
||||
input?: T;
|
||||
dependencies?: T;
|
||||
condition?: T;
|
||||
id?: T;
|
||||
};
|
||||
updatedAt?: T;
|
||||
@@ -741,7 +751,7 @@ export interface TaskCreateDocument {
|
||||
/**
|
||||
* The collection slug to create a document in
|
||||
*/
|
||||
collection: string;
|
||||
collectionSlug: string;
|
||||
/**
|
||||
* The document data to create
|
||||
*/
|
||||
|
||||
@@ -22,17 +22,9 @@ if (!process.env.ROOT_DIR) {
|
||||
}
|
||||
|
||||
const buildConfigWithMemoryDB = async () => {
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
const memoryDB = await MongoMemoryReplSet.create({
|
||||
replSet: {
|
||||
count: 3,
|
||||
dbName: 'payloadmemory',
|
||||
},
|
||||
})
|
||||
|
||||
process.env.DATABASE_URI = `${memoryDB.getUri()}&retryWrites=true`
|
||||
}
|
||||
|
||||
// Use MongoDB adapter for testing instead of SQLite
|
||||
const { mongooseAdapter } = await import('@payloadcms/db-mongodb')
|
||||
|
||||
return buildConfig({
|
||||
admin: {
|
||||
importMap: {
|
||||
@@ -77,10 +69,8 @@ const buildConfigWithMemoryDB = async () => {
|
||||
]
|
||||
}
|
||||
],
|
||||
db: sqliteAdapter({
|
||||
client: {
|
||||
url: `file:${path.resolve(dirname, 'payload.db')}`,
|
||||
},
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URI || 'mongodb://localhost:27017/payload-test',
|
||||
}),
|
||||
editor: lexicalEditor(),
|
||||
email: testEmailAdapter,
|
||||
@@ -103,7 +93,8 @@ const buildConfigWithMemoryDB = async () => {
|
||||
plugins: [
|
||||
workflowsPlugin<CollectionSlug>({
|
||||
collectionTriggers: {
|
||||
posts: true
|
||||
posts: true,
|
||||
media: true
|
||||
},
|
||||
steps: [
|
||||
HttpRequestStepTask,
|
||||
|
||||
94
dev/simple-trigger.spec.ts
Normal file
94
dev/simple-trigger.spec.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
import { mockHttpBin, testFixtures } from './test-helpers.js'
|
||||
|
||||
describe('Workflow Trigger Test', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
// Set up HTTP mocks
|
||||
const expectedRequestData = {
|
||||
message: 'Post created',
|
||||
postId: expect.any(String), // MongoDB ObjectId
|
||||
postTitle: 'Test post content for workflow trigger'
|
||||
}
|
||||
mockHttpBin.mockPost(expectedRequestData)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
mockHttpBin.cleanup()
|
||||
})
|
||||
|
||||
it('should create a workflow run when a post is created', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Use test fixtures for consistent data
|
||||
const testWorkflow = {
|
||||
...testFixtures.basicWorkflow,
|
||||
name: 'Test Post Creation Workflow',
|
||||
description: 'Triggers when a post is created',
|
||||
steps: [
|
||||
{
|
||||
...testFixtures.httpRequestStep(),
|
||||
name: 'log-post',
|
||||
body: {
|
||||
message: 'Post created',
|
||||
postId: '$.trigger.doc.id',
|
||||
postTitle: '$.trigger.doc.content'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: testWorkflow
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
expect(workflow.id).toBeDefined()
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: testFixtures.testPost
|
||||
})
|
||||
|
||||
expect(post).toBeDefined()
|
||||
expect(post.id).toBeDefined()
|
||||
|
||||
// Wait a bit for workflow to execute
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
// Check for workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 10
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBeGreaterThan(0)
|
||||
|
||||
// Check if workflow is an object or ID
|
||||
const workflowRef = runs.docs[0].workflow
|
||||
const workflowId = typeof workflowRef === 'object' && workflowRef !== null
|
||||
? (workflowRef as any).id
|
||||
: workflowRef
|
||||
|
||||
expect(workflowId).toBe(workflow.id) // Should reference the workflow ID
|
||||
|
||||
console.log('✅ Workflow run created successfully!')
|
||||
console.log(`Run status: ${runs.docs[0].status}`)
|
||||
console.log(`Run ID: ${runs.docs[0].id}`)
|
||||
|
||||
if (runs.docs[0].status === 'failed' && runs.docs[0].error) {
|
||||
console.log(`Error: ${runs.docs[0].error}`)
|
||||
}
|
||||
}, 30000)
|
||||
})
|
||||
201
dev/test-helpers.ts
Normal file
201
dev/test-helpers.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import nock from 'nock'
|
||||
|
||||
/**
|
||||
* Mock HTTP requests to httpbin.org for testing
|
||||
*/
|
||||
export const mockHttpBin = {
|
||||
/**
|
||||
* Mock a successful POST request to httpbin.org/post
|
||||
*/
|
||||
mockPost: (expectedData?: any) => {
|
||||
return nock('https://httpbin.org')
|
||||
.post('/post')
|
||||
.reply(200, {
|
||||
args: {},
|
||||
data: JSON.stringify(expectedData || {}),
|
||||
files: {},
|
||||
form: {},
|
||||
headers: {
|
||||
'Accept': '*/*',
|
||||
'Accept-Encoding': 'br, gzip, deflate',
|
||||
'Accept-Language': '*',
|
||||
'Content-Type': 'application/json',
|
||||
'Host': 'httpbin.org',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'User-Agent': 'PayloadCMS-Automation/1.0'
|
||||
},
|
||||
json: expectedData || {},
|
||||
origin: '127.0.0.1',
|
||||
url: 'https://httpbin.org/post'
|
||||
}, {
|
||||
'Content-Type': 'application/json',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Credentials': 'true'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock a GET request to httpbin.org/get
|
||||
*/
|
||||
mockGet: () => {
|
||||
return nock('https://httpbin.org')
|
||||
.get('/get')
|
||||
.reply(200, {
|
||||
args: {},
|
||||
headers: {
|
||||
'Accept': '*/*',
|
||||
'Host': 'httpbin.org',
|
||||
'User-Agent': 'PayloadCMS-Automation/1.0'
|
||||
},
|
||||
origin: '127.0.0.1',
|
||||
url: 'https://httpbin.org/get'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock HTTP timeout
|
||||
*/
|
||||
mockTimeout: (path: string = '/delay/10') => {
|
||||
return nock('https://httpbin.org')
|
||||
.get(path)
|
||||
.replyWithError({
|
||||
code: 'ECONNABORTED',
|
||||
message: 'timeout of 2000ms exceeded'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock HTTP error responses
|
||||
*/
|
||||
mockError: (status: number, path: string = '/status/' + status) => {
|
||||
return nock('https://httpbin.org')
|
||||
.get(path)
|
||||
.reply(status, {
|
||||
error: `HTTP ${status} Error`,
|
||||
message: `Mock ${status} response`
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock invalid URL to simulate network errors
|
||||
*/
|
||||
mockNetworkError: (url: string = 'invalid-url-that-will-fail') => {
|
||||
return nock('https://' + url)
|
||||
.get('/')
|
||||
.replyWithError({
|
||||
code: 'ENOTFOUND',
|
||||
message: `getaddrinfo ENOTFOUND ${url}`
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock HTML response (non-JSON)
|
||||
*/
|
||||
mockHtml: () => {
|
||||
return nock('https://httpbin.org')
|
||||
.get('/html')
|
||||
.reply(200, '<!DOCTYPE html><html><head><title>Test</title></head><body>Test HTML</body></html>', {
|
||||
'Content-Type': 'text/html'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock all common endpoints for error scenarios
|
||||
*/
|
||||
mockAllErrorScenarios: () => {
|
||||
// HTML response for invalid JSON test
|
||||
nock('https://httpbin.org')
|
||||
.get('/html')
|
||||
.reply(200, '<!DOCTYPE html><html><head><title>Test</title></head><body>Test HTML</body></html>', {
|
||||
'Content-Type': 'text/html'
|
||||
})
|
||||
|
||||
// 404 error
|
||||
nock('https://httpbin.org')
|
||||
.get('/status/404')
|
||||
.reply(404, {
|
||||
error: 'Not Found',
|
||||
message: 'The requested resource was not found'
|
||||
})
|
||||
|
||||
// 500 error
|
||||
nock('https://httpbin.org')
|
||||
.get('/status/500')
|
||||
.reply(500, {
|
||||
error: 'Internal Server Error',
|
||||
message: 'Server encountered an error'
|
||||
})
|
||||
|
||||
// 503 error for retry tests
|
||||
nock('https://httpbin.org')
|
||||
.get('/status/503')
|
||||
.times(3) // Allow 3 retries
|
||||
.reply(503, {
|
||||
error: 'Service Unavailable',
|
||||
message: 'Service is temporarily unavailable'
|
||||
})
|
||||
|
||||
// POST endpoint for circular reference and other POST tests
|
||||
nock('https://httpbin.org')
|
||||
.post('/post')
|
||||
.times(5) // Allow multiple POST requests
|
||||
.reply(200, (uri, requestBody) => ({
|
||||
args: {},
|
||||
data: JSON.stringify(requestBody),
|
||||
json: requestBody,
|
||||
url: 'https://httpbin.org/post'
|
||||
}))
|
||||
},
|
||||
|
||||
/**
|
||||
* Clean up all nock mocks
|
||||
*/
|
||||
cleanup: () => {
|
||||
nock.cleanAll()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test fixtures for common workflow configurations
|
||||
*/
|
||||
export const testFixtures = {
|
||||
basicWorkflow: {
|
||||
name: 'Test Basic Workflow',
|
||||
description: 'Basic workflow for testing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger' as const,
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create' as const
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
httpRequestStep: (url: string = 'https://httpbin.org/post', expectedData?: any) => ({
|
||||
name: 'http-request',
|
||||
step: 'http-request-step',
|
||||
url,
|
||||
method: 'POST' as const,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: expectedData || {
|
||||
message: 'Test request',
|
||||
data: '$.trigger.doc'
|
||||
}
|
||||
}),
|
||||
|
||||
createDocumentStep: (collectionSlug: string = 'auditLog') => ({
|
||||
name: 'create-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug,
|
||||
data: {
|
||||
message: 'Test document created',
|
||||
sourceId: '$.trigger.doc.id'
|
||||
}
|
||||
}),
|
||||
|
||||
testPost: {
|
||||
content: 'Test post content for workflow trigger'
|
||||
}
|
||||
}
|
||||
125
dev/test-setup.ts
Normal file
125
dev/test-setup.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { MongoMemoryReplSet } from 'mongodb-memory-server'
|
||||
import { getPayload } from 'payload'
|
||||
import type { Payload } from 'payload'
|
||||
import nock from 'nock'
|
||||
import config from './payload.config.js'
|
||||
|
||||
// Configure nock to intercept fetch requests properly in Node.js 22
|
||||
nock.disableNetConnect()
|
||||
nock.enableNetConnect('127.0.0.1')
|
||||
|
||||
// Set global fetch to use undici for proper nock interception
|
||||
import { fetch } from 'undici'
|
||||
global.fetch = fetch
|
||||
|
||||
let mongod: MongoMemoryReplSet | null = null
|
||||
let payload: Payload | null = null
|
||||
|
||||
// Global test setup - runs once for all tests
|
||||
beforeAll(async () => {
|
||||
// Start MongoDB in-memory replica set
|
||||
mongod = await MongoMemoryReplSet.create({
|
||||
replSet: {
|
||||
count: 1,
|
||||
dbName: 'payload-test',
|
||||
},
|
||||
})
|
||||
|
||||
const mongoUri = mongod.getUri()
|
||||
process.env.DATABASE_URI = mongoUri
|
||||
|
||||
console.log('🚀 MongoDB in-memory server started:', mongoUri)
|
||||
|
||||
// Initialize Payload with test config
|
||||
payload = await getPayload({
|
||||
config: await config,
|
||||
local: true
|
||||
})
|
||||
|
||||
console.log('✅ Payload initialized for testing')
|
||||
}, 60000)
|
||||
|
||||
// Global test teardown - runs once after all tests
|
||||
afterAll(async () => {
|
||||
if (payload) {
|
||||
console.log('🛑 Shutting down Payload...')
|
||||
// Payload doesn't have a shutdown method, but we can clear the cache
|
||||
delete (global as any).payload
|
||||
payload = null
|
||||
}
|
||||
|
||||
if (mongod) {
|
||||
console.log('🛑 Stopping MongoDB in-memory server...')
|
||||
await mongod.stop()
|
||||
mongod = null
|
||||
}
|
||||
}, 30000)
|
||||
|
||||
// Export payload instance for tests
|
||||
export const getTestPayload = () => {
|
||||
if (!payload) {
|
||||
throw new Error('Payload not initialized. Make sure test setup has run.')
|
||||
}
|
||||
return payload
|
||||
}
|
||||
|
||||
// Helper to clean all collections
|
||||
export const cleanDatabase = async () => {
|
||||
if (!payload) return
|
||||
|
||||
try {
|
||||
// Clean up workflow runs first (child records)
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const run of runs.docs) {
|
||||
await payload.delete({
|
||||
collection: 'workflow-runs',
|
||||
id: run.id
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up workflows
|
||||
const workflows = await payload.find({
|
||||
collection: 'workflows',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
await payload.delete({
|
||||
collection: 'workflows',
|
||||
id: workflow.id
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up audit logs
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const log of auditLogs.docs) {
|
||||
await payload.delete({
|
||||
collection: 'auditLog',
|
||||
id: log.id
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up posts
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const post of posts.docs) {
|
||||
await payload.delete({
|
||||
collection: 'posts',
|
||||
id: post.id
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Database cleanup failed:', error)
|
||||
}
|
||||
}
|
||||
104
dev/test-trigger.ts
Normal file
104
dev/test-trigger.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import type { Payload } from 'payload'
|
||||
import { getPayload } from 'payload'
|
||||
import config from './payload.config'
|
||||
|
||||
async function testWorkflowTrigger() {
|
||||
console.log('Starting workflow trigger test...')
|
||||
|
||||
// Get payload instance
|
||||
const payload = await getPayload({ config })
|
||||
|
||||
try {
|
||||
// Create a test user
|
||||
const user = await payload.create({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: 'test@example.com',
|
||||
password: 'password123'
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created test user:', user.id)
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Post Creation Workflow',
|
||||
description: 'Triggers when a post is created',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'log-post',
|
||||
step: 'http-request-step',
|
||||
input: {
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: {
|
||||
message: 'Post created',
|
||||
postId: '$.trigger.doc.id',
|
||||
postTitle: '$.trigger.doc.title'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
user: user.id
|
||||
})
|
||||
|
||||
console.log('Created workflow:', workflow.id)
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
console.log('Creating post to trigger workflow...')
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
title: 'Test Post',
|
||||
content: 'This should trigger the workflow',
|
||||
_status: 'published'
|
||||
},
|
||||
user: user.id
|
||||
})
|
||||
|
||||
console.log('Created post:', post.id)
|
||||
|
||||
// Wait a bit for workflow to execute
|
||||
await new Promise(resolve => setTimeout(resolve, 2000))
|
||||
|
||||
// Check for workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Workflow runs found:', runs.totalDocs)
|
||||
|
||||
if (runs.totalDocs > 0) {
|
||||
console.log('✅ SUCCESS: Workflow was triggered!')
|
||||
console.log('Run status:', runs.docs[0].status)
|
||||
console.log('Run context:', JSON.stringify(runs.docs[0].context, null, 2))
|
||||
} else {
|
||||
console.log('❌ FAILURE: Workflow was not triggered')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test failed:', error)
|
||||
} finally {
|
||||
await payload.shutdown()
|
||||
}
|
||||
}
|
||||
|
||||
testWorkflowTrigger().catch(console.error)
|
||||
483
dev/webhook-triggers.spec.ts
Normal file
483
dev/webhook-triggers.spec.ts
Normal file
@@ -0,0 +1,483 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
|
||||
describe('Webhook Trigger Testing', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
it('should trigger workflow via webhook endpoint simulation', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with webhook trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Basic Trigger',
|
||||
description: 'Tests basic webhook triggering',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-basic'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'create-webhook-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Webhook triggered successfully',
|
||||
user: '$.trigger.data.userId'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
|
||||
// Directly execute the workflow with webhook-like data
|
||||
const executor = (globalThis as any).__workflowExecutor
|
||||
if (!executor) {
|
||||
console.warn('⚠️ Workflow executor not available, skipping webhook execution')
|
||||
return
|
||||
}
|
||||
|
||||
// Simulate webhook trigger by directly executing the workflow
|
||||
const webhookData = {
|
||||
userId: 'webhook-test-user',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
|
||||
const mockReq = {
|
||||
payload,
|
||||
user: null,
|
||||
headers: {}
|
||||
}
|
||||
|
||||
await executor.execute({
|
||||
workflow,
|
||||
trigger: {
|
||||
type: 'webhook',
|
||||
path: 'test-basic',
|
||||
data: webhookData,
|
||||
headers: {}
|
||||
},
|
||||
req: mockReq as any,
|
||||
payload
|
||||
})
|
||||
|
||||
console.log('✅ Workflow executed directly')
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 2000))
|
||||
|
||||
// Verify workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).not.toBe('failed')
|
||||
|
||||
// Verify audit log was created
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'Webhook triggered'
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
console.log('✅ Webhook audit log created')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook with complex data', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Complex Data',
|
||||
description: 'Tests webhook with complex JSON data',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-complex'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'echo-webhook-data',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
originalData: '$.trigger.data',
|
||||
headers: '$.trigger.headers',
|
||||
path: '$.trigger.path'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const complexData = {
|
||||
user: {
|
||||
id: 123,
|
||||
name: 'Test User',
|
||||
permissions: ['read', 'write']
|
||||
},
|
||||
event: {
|
||||
type: 'user_action',
|
||||
timestamp: new Date().toISOString(),
|
||||
metadata: {
|
||||
source: 'webhook-test',
|
||||
version: '1.0.0'
|
||||
}
|
||||
},
|
||||
nested: {
|
||||
deeply: {
|
||||
nested: {
|
||||
value: 'deep-test-value'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response = await makeWebhookRequest('test-complex', complexData)
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed')
|
||||
|
||||
// Verify the complex data was properly passed through
|
||||
const stepOutput = runs.docs[0].context.steps['echo-webhook-data'].output
|
||||
expect(stepOutput.status).toBe(200)
|
||||
|
||||
const responseBody = JSON.parse(stepOutput.body)
|
||||
expect(responseBody.json.originalData.user.name).toBe('Test User')
|
||||
expect(responseBody.json.originalData.nested.deeply.nested.value).toBe('deep-test-value')
|
||||
|
||||
console.log('✅ Complex webhook data processed correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook conditions', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Conditional',
|
||||
description: 'Tests conditional webhook execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-conditional',
|
||||
condition: '$.data.action == "important"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'conditional-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Webhook condition met - important action'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// First request - should NOT trigger (condition not met)
|
||||
const response1 = await makeWebhookRequest('test-conditional', {
|
||||
action: 'normal',
|
||||
data: 'test'
|
||||
})
|
||||
expect(response1.status).toBe(200)
|
||||
|
||||
// Second request - SHOULD trigger (condition met)
|
||||
const response2 = await makeWebhookRequest('test-conditional', {
|
||||
action: 'important',
|
||||
priority: 'high'
|
||||
})
|
||||
expect(response2.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Should have exactly 1 run (only for the matching condition)
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).not.toBe('failed')
|
||||
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'condition met'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
console.log('✅ Webhook conditional execution working')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook authentication headers', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Headers',
|
||||
description: 'Tests webhook header processing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-headers'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'process-headers',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
receivedHeaders: '$.trigger.headers',
|
||||
authorization: '$.trigger.headers.authorization',
|
||||
userAgent: '$.trigger.headers.user-agent'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Make webhook request with custom headers
|
||||
const webhookUrl = `${baseUrl}/api/workflows/webhook/test-headers`
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer test-token-123',
|
||||
'User-Agent': 'Webhook-Test-Client/1.0',
|
||||
'X-Custom-Header': 'custom-value'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
test: 'header processing'
|
||||
})
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed')
|
||||
|
||||
// Verify headers were captured and processed
|
||||
const stepOutput = runs.docs[0].context.steps['process-headers'].output
|
||||
const responseBody = JSON.parse(stepOutput.body)
|
||||
|
||||
expect(responseBody.json.authorization).toBe('Bearer test-token-123')
|
||||
expect(responseBody.json.userAgent).toBe('Webhook-Test-Client/1.0')
|
||||
|
||||
console.log('✅ Webhook headers processed correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle multiple concurrent webhook requests', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Concurrent',
|
||||
description: 'Tests concurrent webhook processing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-concurrent'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'concurrent-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Concurrent webhook execution',
|
||||
requestId: '$.trigger.data.requestId'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Make multiple concurrent webhook requests
|
||||
const concurrentRequests = Array.from({ length: 5 }, (_, i) =>
|
||||
makeWebhookRequest('test-concurrent', {
|
||||
requestId: `concurrent-${i + 1}`,
|
||||
timestamp: new Date().toISOString()
|
||||
})
|
||||
)
|
||||
|
||||
const responses = await Promise.all(concurrentRequests)
|
||||
responses.forEach(response => {
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
|
||||
// Wait for all workflow executions
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(5)
|
||||
|
||||
// Verify all runs completed successfully
|
||||
const failedRuns = runs.docs.filter(run => run.status === 'failed')
|
||||
expect(failedRuns).toHaveLength(0)
|
||||
|
||||
// Verify all audit logs were created
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'Concurrent webhook'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(5)
|
||||
console.log('✅ Concurrent webhook requests processed successfully')
|
||||
}, 35000)
|
||||
|
||||
it('should handle non-existent webhook paths gracefully', async () => {
|
||||
// Test that workflows with non-matching webhook paths don't get triggered
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Non-existent Path',
|
||||
description: 'Should not be triggered by different path',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'specific-path'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'create-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'This should not be created'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Simulate trying to trigger with wrong path - should not execute workflow
|
||||
const initialRuns = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(initialRuns.totalDocs).toBe(0)
|
||||
console.log('✅ Non-existent webhook path handled: no workflow runs created')
|
||||
}, 10000)
|
||||
|
||||
it('should handle malformed webhook JSON', async () => {
|
||||
const webhookUrl = `${baseUrl}/api/workflows/webhook/test-malformed`
|
||||
|
||||
// First create a workflow to receive the malformed request
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Malformed JSON',
|
||||
description: 'Tests malformed JSON handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-malformed'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'malformed-test',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Processed malformed request'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Send malformed JSON
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: '{"malformed": json, "missing": quotes}'
|
||||
})
|
||||
|
||||
// Should handle malformed JSON gracefully
|
||||
expect([400, 422]).toContain(response.status)
|
||||
console.log('✅ Malformed JSON handled:', response.status)
|
||||
}, 15000)
|
||||
})
|
||||
218
examples/README-trigger-builders.md
Normal file
218
examples/README-trigger-builders.md
Normal file
@@ -0,0 +1,218 @@
|
||||
# Trigger Builder Examples
|
||||
|
||||
The new trigger builder API dramatically reduces boilerplate when creating custom triggers.
|
||||
|
||||
## Before vs After
|
||||
|
||||
### Before (Manual Approach)
|
||||
```typescript
|
||||
const customTrigger = {
|
||||
slug: 'order-webhook',
|
||||
inputs: [
|
||||
{
|
||||
name: 'webhookSecret',
|
||||
type: 'text',
|
||||
required: true,
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Secret for webhook validation'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.webhookSecret],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.webhookSecret = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'orderStatuses',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.orderStatuses || ['completed']],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.orderStatuses = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
}
|
||||
// ... imagine more fields with similar boilerplate
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### After (Builder Approach)
|
||||
```typescript
|
||||
import { createTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const orderWebhook = createTrigger('order-webhook').parameters({
|
||||
webhookSecret: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Secret for webhook validation'
|
||||
}
|
||||
},
|
||||
orderStatuses: {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
admin: {
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## Built-in Trigger Presets
|
||||
|
||||
### Webhook Trigger
|
||||
```typescript
|
||||
import { webhookTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('currency', {
|
||||
type: 'select',
|
||||
options: ['USD', 'EUR', 'GBP'],
|
||||
defaultValue: 'USD'
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
### Scheduled/Cron Trigger
|
||||
```typescript
|
||||
import { cronTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const dailyReport = cronTrigger('daily-report')
|
||||
.parameter('reportFormat', {
|
||||
type: 'select',
|
||||
options: ['pdf', 'csv', 'json'],
|
||||
defaultValue: 'pdf'
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
### Manual Trigger (No Parameters)
|
||||
```typescript
|
||||
import { manualTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const backupTrigger = manualTrigger('manual-backup')
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Extending Common Parameters
|
||||
```typescript
|
||||
import { createAdvancedTrigger, webhookParameters } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const advancedWebhook = createAdvancedTrigger('advanced-webhook')
|
||||
.extend(webhookParameters) // Includes path, secret, headers
|
||||
.parameter('retryAttempts', {
|
||||
type: 'number',
|
||||
min: 0,
|
||||
max: 5,
|
||||
defaultValue: 3
|
||||
})
|
||||
.parameter('timeout', {
|
||||
type: 'number',
|
||||
min: 1000,
|
||||
max: 30000,
|
||||
defaultValue: 5000,
|
||||
admin: {
|
||||
description: 'Timeout in milliseconds'
|
||||
}
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
### Custom Validation
|
||||
```typescript
|
||||
const validatedTrigger = createTrigger('validated-trigger').parameters({
|
||||
email: {
|
||||
type: 'email',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
if (value?.endsWith('@spam.com')) {
|
||||
return 'Spam domains not allowed'
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
webhookUrl: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
try {
|
||||
const url = new URL(value)
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
return 'Only HTTP/HTTPS URLs allowed'
|
||||
}
|
||||
} catch {
|
||||
return 'Please enter a valid URL'
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## Usage in Plugin Configuration
|
||||
|
||||
```typescript
|
||||
import { workflowsPlugin } from '@xtr-dev/payload-automation'
|
||||
import {
|
||||
createTrigger,
|
||||
webhookTrigger,
|
||||
cronTrigger
|
||||
} from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
workflowsPlugin({
|
||||
triggers: [
|
||||
// Mix different trigger types
|
||||
createTrigger('user-signup').parameters({
|
||||
source: {
|
||||
type: 'select',
|
||||
options: ['web', 'mobile', 'api'],
|
||||
required: true
|
||||
}
|
||||
}),
|
||||
|
||||
webhookTrigger('payment-received')
|
||||
.parameter('minimumAmount', { type: 'number', min: 0 })
|
||||
.build(),
|
||||
|
||||
cronTrigger('weekly-cleanup')
|
||||
.parameter('deleteOlderThan', {
|
||||
type: 'number',
|
||||
defaultValue: 30,
|
||||
admin: { description: 'Delete records older than N days' }
|
||||
})
|
||||
.build()
|
||||
]
|
||||
})
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
## Benefits
|
||||
|
||||
- **90% less boilerplate** - No manual hooks, conditions, or virtual field setup
|
||||
- **Type safety** - Full TypeScript support
|
||||
- **Reusable patterns** - Common trigger types as presets
|
||||
- **Composable** - Mix builders with manual fields
|
||||
- **Backward compatible** - Existing triggers continue to work
|
||||
- **Validation built-in** - Parameter validation handled automatically
|
||||
300
examples/trigger-builders.ts
Normal file
300
examples/trigger-builders.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
/**
|
||||
* Examples demonstrating the new trigger builder API
|
||||
* This shows the before/after comparison and various usage patterns
|
||||
*/
|
||||
|
||||
import {
|
||||
createTrigger,
|
||||
createAdvancedTrigger,
|
||||
webhookTrigger,
|
||||
cronTrigger,
|
||||
eventTrigger,
|
||||
manualTrigger,
|
||||
apiTrigger,
|
||||
webhookParameters,
|
||||
cronParameters
|
||||
} from '../src/exports/helpers.js'
|
||||
|
||||
/**
|
||||
* BEFORE: Manual trigger definition with lots of boilerplate
|
||||
*/
|
||||
const oldWayTrigger = {
|
||||
slug: 'order-webhook',
|
||||
inputs: [
|
||||
{
|
||||
name: 'webhookSecret',
|
||||
type: 'text',
|
||||
required: true,
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Secret for webhook validation'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.webhookSecret],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.webhookSecret = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'orderStatuses',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.orderStatuses || ['completed']],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.orderStatuses = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
}
|
||||
// ... imagine more fields with similar boilerplate
|
||||
]
|
||||
} as const
|
||||
|
||||
/**
|
||||
* AFTER: Clean trigger definition using builders
|
||||
*/
|
||||
|
||||
// 1. Simple trigger with parameters
|
||||
const orderWebhook = createTrigger('order-webhook').parameters({
|
||||
webhookSecret: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Secret for webhook validation'
|
||||
}
|
||||
},
|
||||
orderStatuses: {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
admin: {
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
}
|
||||
},
|
||||
minimumAmount: {
|
||||
type: 'number',
|
||||
min: 0,
|
||||
admin: {
|
||||
description: 'Minimum order amount to trigger workflow'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// 2. Using preset webhook builder
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('currency', {
|
||||
type: 'select',
|
||||
options: ['USD', 'EUR', 'GBP'],
|
||||
defaultValue: 'USD'
|
||||
})
|
||||
.parameter('paymentMethods', {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['credit_card', 'paypal', 'bank_transfer']
|
||||
})
|
||||
.build()
|
||||
|
||||
// 3. Scheduled trigger using cron builder
|
||||
const dailyReport = cronTrigger('daily-report')
|
||||
.parameter('reportFormat', {
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'PDF Report', value: 'pdf' },
|
||||
{ label: 'CSV Export', value: 'csv' },
|
||||
{ label: 'JSON Data', value: 'json' }
|
||||
],
|
||||
defaultValue: 'pdf'
|
||||
})
|
||||
.parameter('includeCharts', {
|
||||
type: 'checkbox',
|
||||
defaultValue: true,
|
||||
admin: {
|
||||
description: 'Include visual charts in the report'
|
||||
}
|
||||
})
|
||||
.build()
|
||||
|
||||
// 4. Event-driven trigger
|
||||
const userActivity = eventTrigger('user-activity')
|
||||
.parameter('actionTypes', {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['login', 'logout', 'profile_update', 'password_change'],
|
||||
admin: {
|
||||
description: 'User actions that should trigger this workflow'
|
||||
}
|
||||
})
|
||||
.parameter('userRoles', {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
admin: {
|
||||
description: 'Only trigger for users with these roles'
|
||||
}
|
||||
})
|
||||
.build()
|
||||
|
||||
// 5. Simple manual trigger (no parameters)
|
||||
const manualBackup = manualTrigger('manual-backup')
|
||||
|
||||
// 6. API trigger with authentication
|
||||
const externalApi = apiTrigger('external-api')
|
||||
.parameter('allowedOrigins', {
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Comma-separated list of allowed origins'
|
||||
},
|
||||
validate: (value) => {
|
||||
if (value && typeof value === 'string') {
|
||||
const origins = value.split(',').map(s => s.trim())
|
||||
const validOrigins = origins.every(origin => {
|
||||
try {
|
||||
new URL(origin)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
if (!validOrigins) {
|
||||
return 'All origins must be valid URLs'
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
})
|
||||
.build()
|
||||
|
||||
// 7. Complex trigger extending common parameters
|
||||
const advancedWebhook = createAdvancedTrigger('advanced-webhook')
|
||||
.extend(webhookParameters) // Start with webhook basics
|
||||
.parameter('retryConfig', {
|
||||
type: 'group',
|
||||
fields: [
|
||||
{
|
||||
name: 'maxRetries',
|
||||
type: 'number',
|
||||
min: 0,
|
||||
max: 10,
|
||||
defaultValue: 3
|
||||
},
|
||||
{
|
||||
name: 'retryDelay',
|
||||
type: 'number',
|
||||
min: 1000,
|
||||
max: 60000,
|
||||
defaultValue: 5000,
|
||||
admin: {
|
||||
description: 'Delay between retries in milliseconds'
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
.parameter('filters', {
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'field',
|
||||
type: 'text',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'operator',
|
||||
type: 'select',
|
||||
options: ['equals', 'not_equals', 'contains', 'greater_than'],
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: 'text',
|
||||
required: true
|
||||
}
|
||||
]
|
||||
})
|
||||
.build()
|
||||
|
||||
// 8. Custom parameter validation
|
||||
const validatedTrigger = createTrigger('validated-trigger').parameters({
|
||||
email: {
|
||||
type: 'email',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
if (value && typeof value === 'string') {
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
|
||||
if (!emailRegex.test(value)) {
|
||||
return 'Please enter a valid email address'
|
||||
}
|
||||
// Custom business logic validation
|
||||
if (value.endsWith('@example.com')) {
|
||||
return 'Example.com emails are not allowed'
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
webhookUrl: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
if (value && typeof value === 'string') {
|
||||
try {
|
||||
const url = new URL(value)
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
return 'URL must use HTTP or HTTPS protocol'
|
||||
}
|
||||
if (url.hostname === 'localhost') {
|
||||
return 'Localhost URLs are not allowed in production'
|
||||
}
|
||||
} catch {
|
||||
return 'Please enter a valid URL'
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Export all triggers for use in plugin configuration
|
||||
*/
|
||||
export const exampleTriggers = [
|
||||
orderWebhook,
|
||||
paymentWebhook,
|
||||
dailyReport,
|
||||
userActivity,
|
||||
manualBackup,
|
||||
externalApi,
|
||||
advancedWebhook,
|
||||
validatedTrigger
|
||||
]
|
||||
|
||||
/**
|
||||
* Usage in payload.config.ts:
|
||||
*
|
||||
* ```typescript
|
||||
* import { workflowsPlugin } from '@xtr-dev/payload-automation'
|
||||
* import { exampleTriggers } from './examples/trigger-builders'
|
||||
*
|
||||
* export default buildConfig({
|
||||
* plugins: [
|
||||
* workflowsPlugin({
|
||||
* triggers: exampleTriggers,
|
||||
* // ... other config
|
||||
* })
|
||||
* ]
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@xtr-dev/payload-workflows",
|
||||
"version": "0.0.13",
|
||||
"version": "0.0.26",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@xtr-dev/payload-workflows",
|
||||
"version": "0.0.13",
|
||||
"version": "0.0.26",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jsonpath-plus": "^10.3.0",
|
||||
|
||||
11
package.json
11
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xtr-dev/payload-automation",
|
||||
"version": "0.0.13",
|
||||
"version": "0.0.26",
|
||||
"description": "PayloadCMS Automation Plugin - Comprehensive workflow automation system with visual workflow building, execution tracking, and step types",
|
||||
"license": "MIT",
|
||||
"type": "module",
|
||||
@@ -34,6 +34,11 @@
|
||||
"import": "./dist/exports/server.js",
|
||||
"types": "./dist/exports/server.d.ts",
|
||||
"default": "./dist/exports/server.js"
|
||||
},
|
||||
"./helpers": {
|
||||
"import": "./dist/exports/helpers.js",
|
||||
"types": "./dist/exports/helpers.d.ts",
|
||||
"default": "./dist/exports/helpers.js"
|
||||
}
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
@@ -70,6 +75,7 @@
|
||||
"@payloadcms/ui": "3.45.0",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@swc/cli": "0.6.0",
|
||||
"@types/nock": "^11.1.0",
|
||||
"@types/node": "^22.5.4",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/react": "19.1.8",
|
||||
@@ -80,12 +86,15 @@
|
||||
"graphql": "^16.8.1",
|
||||
"mongodb-memory-server": "10.1.4",
|
||||
"next": "15.4.4",
|
||||
"nock": "^14.0.10",
|
||||
"payload": "3.45.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"rimraf": "3.0.2",
|
||||
"sharp": "0.34.3",
|
||||
"tsx": "^4.20.5",
|
||||
"typescript": "5.7.3",
|
||||
"undici": "^7.15.0",
|
||||
"vitest": "^3.1.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
94
pnpm-lock.yaml
generated
94
pnpm-lock.yaml
generated
@@ -45,6 +45,9 @@ importers:
|
||||
'@swc/cli':
|
||||
specifier: 0.6.0
|
||||
version: 0.6.0(@swc/core@1.13.4)
|
||||
'@types/nock':
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0
|
||||
'@types/node':
|
||||
specifier: ^22.5.4
|
||||
version: 22.17.2
|
||||
@@ -75,6 +78,9 @@ importers:
|
||||
next:
|
||||
specifier: 15.4.4
|
||||
version: 15.4.4(@playwright/test@1.55.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4)
|
||||
nock:
|
||||
specifier: ^14.0.10
|
||||
version: 14.0.10
|
||||
payload:
|
||||
specifier: 3.45.0
|
||||
version: 3.45.0(graphql@16.11.0)(typescript@5.7.3)
|
||||
@@ -90,9 +96,15 @@ importers:
|
||||
sharp:
|
||||
specifier: 0.34.3
|
||||
version: 0.34.3
|
||||
tsx:
|
||||
specifier: ^4.20.5
|
||||
version: 4.20.5
|
||||
typescript:
|
||||
specifier: 5.7.3
|
||||
version: 5.7.3
|
||||
undici:
|
||||
specifier: ^7.15.0
|
||||
version: 7.15.0
|
||||
vitest:
|
||||
specifier: ^3.1.2
|
||||
version: 3.2.4(@types/debug@4.1.12)(@types/node@22.17.2)(jiti@2.5.1)(sass@1.77.4)(tsx@4.20.5)
|
||||
@@ -1100,6 +1112,10 @@ packages:
|
||||
'@mongodb-js/saslprep@1.3.0':
|
||||
resolution: {integrity: sha512-zlayKCsIjYb7/IdfqxorK5+xUMyi4vOKcFy10wKJYc63NSdKI8mNME+uJqfatkPmOSMMUiojrL58IePKBm3gvQ==}
|
||||
|
||||
'@mswjs/interceptors@0.39.6':
|
||||
resolution: {integrity: sha512-bndDP83naYYkfayr/qhBHMhk0YGwS1iv6vaEGcr0SQbO0IZtbOPqjKjds/WcG+bJA+1T5vCx6kprKOzn5Bg+Vw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@napi-rs/nice-android-arm-eabi@1.1.1':
|
||||
resolution: {integrity: sha512-kjirL3N6TnRPv5iuHw36wnucNqXAO46dzK9oPb0wj076R5Xm8PfUVA9nAFB5ZNMmfJQJVKACAPd/Z2KYMppthw==}
|
||||
engines: {node: '>= 10'}
|
||||
@@ -1275,6 +1291,15 @@ packages:
|
||||
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
|
||||
engines: {node: '>= 8'}
|
||||
|
||||
'@open-draft/deferred-promise@2.2.0':
|
||||
resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==}
|
||||
|
||||
'@open-draft/logger@0.3.0':
|
||||
resolution: {integrity: sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==}
|
||||
|
||||
'@open-draft/until@2.1.0':
|
||||
resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==}
|
||||
|
||||
'@payloadcms/db-mongodb@3.45.0':
|
||||
resolution: {integrity: sha512-Oahk6LJatrQW2+DG0OoSoaWnXSiJ2iBL+2l5WLD2xvRHOlJ3Ls1gUZCrsDItDe8veqwVGSLrMc7gxDwDaMICvg==}
|
||||
peerDependencies:
|
||||
@@ -1593,6 +1618,10 @@ packages:
|
||||
'@types/ms@2.1.0':
|
||||
resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==}
|
||||
|
||||
'@types/nock@11.1.0':
|
||||
resolution: {integrity: sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==}
|
||||
deprecated: This is a stub types definition. nock provides its own type definitions, so you do not need this installed.
|
||||
|
||||
'@types/node-cron@3.0.11':
|
||||
resolution: {integrity: sha512-0ikrnug3/IyneSHqCBeslAhlK2aBfYek1fGo4bP4QnZPmiqSGRK+Oy7ZMisLWkesffJvQ1cqAcBnJC+8+nxIAg==}
|
||||
|
||||
@@ -3045,6 +3074,9 @@ packages:
|
||||
resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
is-node-process@1.2.0:
|
||||
resolution: {integrity: sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==}
|
||||
|
||||
is-number-object@1.1.1:
|
||||
resolution: {integrity: sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -3169,6 +3201,9 @@ packages:
|
||||
json-stable-stringify-without-jsonify@1.0.1:
|
||||
resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
|
||||
|
||||
json-stringify-safe@5.0.1:
|
||||
resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==}
|
||||
|
||||
jsonpath-plus@10.3.0:
|
||||
resolution: {integrity: sha512-8TNmfeTCk2Le33A3vRRwtuworG/L5RrgMvdjhKZxvyShO+mBu2fP50OWUjRLNtvw344DdDarFh9buFAZs5ujeA==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
@@ -3524,6 +3559,10 @@ packages:
|
||||
sass:
|
||||
optional: true
|
||||
|
||||
nock@14.0.10:
|
||||
resolution: {integrity: sha512-Q7HjkpyPeLa0ZVZC5qpxBt5EyLczFJ91MEewQiIi9taWuA0KB/MDJlUWtON+7dGouVdADTQsf9RA7TZk6D8VMw==}
|
||||
engines: {node: '>=18.20.0 <20 || >=20.12.1'}
|
||||
|
||||
node-cron@4.2.1:
|
||||
resolution: {integrity: sha512-lgimEHPE/QDgFlywTd8yTR61ptugX3Qer29efeyWw2rv259HtGBNn1vZVmp8lB9uo9wC0t/AT4iGqXxia+CJFg==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
@@ -3597,6 +3636,9 @@ packages:
|
||||
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||
outvariant@1.4.3:
|
||||
resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==}
|
||||
|
||||
own-keys@1.0.1:
|
||||
resolution: {integrity: sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -3850,6 +3892,10 @@ packages:
|
||||
prop-types@15.8.1:
|
||||
resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==}
|
||||
|
||||
propagate@2.0.1:
|
||||
resolution: {integrity: sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==}
|
||||
engines: {node: '>= 8'}
|
||||
|
||||
pump@3.0.3:
|
||||
resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==}
|
||||
|
||||
@@ -4194,6 +4240,9 @@ packages:
|
||||
streamx@2.22.1:
|
||||
resolution: {integrity: sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==}
|
||||
|
||||
strict-event-emitter@0.5.1:
|
||||
resolution: {integrity: sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==}
|
||||
|
||||
string-ts@2.2.1:
|
||||
resolution: {integrity: sha512-Q2u0gko67PLLhbte5HmPfdOjNvUKbKQM+mCNQae6jE91DmoFHY6HH9GcdqCeNx87DZ2KKjiFxmA0R/42OneGWw==}
|
||||
|
||||
@@ -4441,6 +4490,10 @@ packages:
|
||||
resolution: {integrity: sha512-u5otvFBOBZvmdjWLVW+5DAc9Nkq8f24g0O9oY7qw2JVIF1VocIFoyz9JFkuVOS2j41AufeO0xnlweJ2RLT8nGw==}
|
||||
engines: {node: '>=20.18.1'}
|
||||
|
||||
undici@7.15.0:
|
||||
resolution: {integrity: sha512-7oZJCPvvMvTd0OlqWsIxTuItTpJBpU1tcbVl24FMn3xt3+VSunwUasmfPJRE57oNO1KsZ4PgA1xTdAX4hq8NyQ==}
|
||||
engines: {node: '>=20.18.1'}
|
||||
|
||||
unist-util-is@6.0.0:
|
||||
resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==}
|
||||
|
||||
@@ -5622,6 +5675,15 @@ snapshots:
|
||||
dependencies:
|
||||
sparse-bitfield: 3.0.3
|
||||
|
||||
'@mswjs/interceptors@0.39.6':
|
||||
dependencies:
|
||||
'@open-draft/deferred-promise': 2.2.0
|
||||
'@open-draft/logger': 0.3.0
|
||||
'@open-draft/until': 2.1.0
|
||||
is-node-process: 1.2.0
|
||||
outvariant: 1.4.3
|
||||
strict-event-emitter: 0.5.1
|
||||
|
||||
'@napi-rs/nice-android-arm-eabi@1.1.1':
|
||||
optional: true
|
||||
|
||||
@@ -5736,6 +5798,15 @@ snapshots:
|
||||
'@nodelib/fs.scandir': 2.1.5
|
||||
fastq: 1.19.1
|
||||
|
||||
'@open-draft/deferred-promise@2.2.0': {}
|
||||
|
||||
'@open-draft/logger@0.3.0':
|
||||
dependencies:
|
||||
is-node-process: 1.2.0
|
||||
outvariant: 1.4.3
|
||||
|
||||
'@open-draft/until@2.1.0': {}
|
||||
|
||||
'@payloadcms/db-mongodb@3.45.0(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))':
|
||||
dependencies:
|
||||
mongoose: 8.15.1
|
||||
@@ -6261,6 +6332,10 @@ snapshots:
|
||||
|
||||
'@types/ms@2.1.0': {}
|
||||
|
||||
'@types/nock@11.1.0':
|
||||
dependencies:
|
||||
nock: 14.0.10
|
||||
|
||||
'@types/node-cron@3.0.11': {}
|
||||
|
||||
'@types/node@22.17.2':
|
||||
@@ -8068,6 +8143,8 @@ snapshots:
|
||||
|
||||
is-negative-zero@2.0.3: {}
|
||||
|
||||
is-node-process@1.2.0: {}
|
||||
|
||||
is-number-object@1.1.1:
|
||||
dependencies:
|
||||
call-bound: 1.0.4
|
||||
@@ -8173,6 +8250,8 @@ snapshots:
|
||||
|
||||
json-stable-stringify-without-jsonify@1.0.1: {}
|
||||
|
||||
json-stringify-safe@5.0.1: {}
|
||||
|
||||
jsonpath-plus@10.3.0:
|
||||
dependencies:
|
||||
'@jsep-plugin/assignment': 1.3.0(jsep@1.4.0)
|
||||
@@ -8650,6 +8729,12 @@ snapshots:
|
||||
- '@babel/core'
|
||||
- babel-plugin-macros
|
||||
|
||||
nock@14.0.10:
|
||||
dependencies:
|
||||
'@mswjs/interceptors': 0.39.6
|
||||
json-stringify-safe: 5.0.1
|
||||
propagate: 2.0.1
|
||||
|
||||
node-cron@4.2.1: {}
|
||||
|
||||
node-domexception@1.0.0: {}
|
||||
@@ -8725,6 +8810,8 @@ snapshots:
|
||||
type-check: 0.4.0
|
||||
word-wrap: 1.2.5
|
||||
|
||||
outvariant@1.4.3: {}
|
||||
|
||||
own-keys@1.0.1:
|
||||
dependencies:
|
||||
get-intrinsic: 1.3.0
|
||||
@@ -9013,6 +9100,8 @@ snapshots:
|
||||
object-assign: 4.1.1
|
||||
react-is: 16.13.1
|
||||
|
||||
propagate@2.0.1: {}
|
||||
|
||||
pump@3.0.3:
|
||||
dependencies:
|
||||
end-of-stream: 1.4.5
|
||||
@@ -9419,6 +9508,8 @@ snapshots:
|
||||
optionalDependencies:
|
||||
bare-events: 2.6.1
|
||||
|
||||
strict-event-emitter@0.5.1: {}
|
||||
|
||||
string-ts@2.2.1: {}
|
||||
|
||||
string-width@4.2.3:
|
||||
@@ -9617,7 +9708,6 @@ snapshots:
|
||||
get-tsconfig: 4.10.1
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
optional: true
|
||||
|
||||
type-check@0.4.0:
|
||||
dependencies:
|
||||
@@ -9689,6 +9779,8 @@ snapshots:
|
||||
|
||||
undici@7.10.0: {}
|
||||
|
||||
undici@7.15.0: {}
|
||||
|
||||
unist-util-is@6.0.0:
|
||||
dependencies:
|
||||
'@types/unist': 3.0.3
|
||||
|
||||
@@ -36,6 +36,16 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
|
||||
description: 'Optional description of what this workflow does',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'executionStatus',
|
||||
type: 'ui',
|
||||
admin: {
|
||||
components: {
|
||||
Field: '@/components/WorkflowExecutionStatus'
|
||||
},
|
||||
condition: (data) => !!data?.id // Only show for existing workflows
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'triggers',
|
||||
type: 'array',
|
||||
@@ -52,79 +62,181 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'collectionSlug',
|
||||
name: 'parameters',
|
||||
type: 'json',
|
||||
admin: {
|
||||
hidden: true,
|
||||
},
|
||||
defaultValue: {}
|
||||
},
|
||||
// Virtual fields for collection trigger
|
||||
{
|
||||
name: '__builtin_collectionSlug',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'collection-trigger',
|
||||
description: 'Collection that triggers the workflow',
|
||||
},
|
||||
options: Object.keys(collectionTriggers || {})
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.collectionSlug || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.collectionSlug = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: Object.keys(collectionTriggers || {}),
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: 'operation',
|
||||
name: '__builtin_operation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'collection-trigger',
|
||||
description: 'Collection operation that triggers the workflow',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.operation || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.operation = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: [
|
||||
'create',
|
||||
'delete',
|
||||
'read',
|
||||
'update',
|
||||
]
|
||||
],
|
||||
virtual: true,
|
||||
},
|
||||
// Virtual fields for webhook trigger
|
||||
{
|
||||
name: 'webhookPath',
|
||||
name: '__builtin_webhookPath',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'webhook-trigger',
|
||||
description: 'URL path for the webhook (e.g., "my-webhook"). Full URL will be /api/workflows/webhook/my-webhook',
|
||||
description: 'URL path for the webhook (e.g., "my-webhook"). Full URL will be /api/workflows-webhook/my-webhook',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.webhookPath || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.webhookPath = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'webhook-trigger' && !value) {
|
||||
if (siblingData?.type === 'webhook-trigger' && !value && !siblingData?.parameters?.webhookPath) {
|
||||
return 'Webhook path is required for webhook triggers'
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
virtual: true,
|
||||
},
|
||||
// Virtual fields for global trigger
|
||||
{
|
||||
name: 'global',
|
||||
name: '__builtin_global',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'global-trigger',
|
||||
description: 'Global that triggers the workflow',
|
||||
},
|
||||
options: [] // Will be populated dynamically based on available globals
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.global || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.global = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: [], // Will be populated dynamically based on available globals
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: 'globalOperation',
|
||||
name: '__builtin_globalOperation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'global-trigger',
|
||||
description: 'Global operation that triggers the workflow',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.globalOperation || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.globalOperation = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: [
|
||||
'update'
|
||||
]
|
||||
],
|
||||
virtual: true,
|
||||
},
|
||||
// Virtual fields for cron trigger
|
||||
{
|
||||
name: 'cronExpression',
|
||||
name: '__builtin_cronExpression',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'cron-trigger',
|
||||
description: 'Cron expression for scheduled execution (e.g., "0 0 * * *" for daily at midnight)',
|
||||
placeholder: '0 0 * * *'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.cronExpression || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.cronExpression = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'cron-trigger' && !value) {
|
||||
const cronValue = value || siblingData?.parameters?.cronExpression
|
||||
if (siblingData?.type === 'cron-trigger' && !cronValue) {
|
||||
return 'Cron expression is required for cron triggers'
|
||||
}
|
||||
|
||||
// Validate cron expression format if provided
|
||||
if (siblingData?.type === 'cron-trigger' && value) {
|
||||
if (siblingData?.type === 'cron-trigger' && cronValue) {
|
||||
// Basic format validation - should be 5 parts separated by spaces
|
||||
const cronParts = value.trim().split(/\s+/)
|
||||
const cronParts = cronValue.trim().split(/\s+/)
|
||||
if (cronParts.length !== 5) {
|
||||
return 'Invalid cron expression format. Expected 5 parts: "minute hour day month weekday" (e.g., "0 9 * * 1")'
|
||||
}
|
||||
@@ -134,10 +246,11 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
},
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: 'timezone',
|
||||
name: '__builtin_timezone',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'cron-trigger',
|
||||
@@ -145,29 +258,48 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
|
||||
placeholder: 'UTC'
|
||||
},
|
||||
defaultValue: 'UTC',
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.timezone || 'UTC'
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.timezone = value || 'UTC'
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'cron-trigger' && value) {
|
||||
const tzValue = value || siblingData?.parameters?.timezone
|
||||
if (siblingData?.type === 'cron-trigger' && tzValue) {
|
||||
try {
|
||||
// Test if timezone is valid by trying to create a date with it
|
||||
new Intl.DateTimeFormat('en', {timeZone: value})
|
||||
new Intl.DateTimeFormat('en', {timeZone: tzValue})
|
||||
return true
|
||||
} catch {
|
||||
return `Invalid timezone: ${value}. Please use a valid IANA timezone identifier (e.g., "America/New_York", "Europe/London")`
|
||||
return `Invalid timezone: ${tzValue}. Please use a valid IANA timezone identifier (e.g., "America/New_York", "Europe/London")`
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.doc.status == \'published\'")'
|
||||
description: 'JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
...(triggers || []).flatMap(t => (t.inputs || []).map(f => ({
|
||||
// Virtual fields for custom triggers
|
||||
...(triggers || []).flatMap(t => (t.inputs || []).filter(f => 'name' in f && f.name).map(f => ({
|
||||
...f,
|
||||
// Prefix field name with trigger slug to avoid conflicts
|
||||
name: `__${t.slug}_${(f as any).name}`,
|
||||
admin: {
|
||||
...(f.admin || {}),
|
||||
condition: (...args) => args[1]?.type === t.slug && (
|
||||
@@ -176,6 +308,21 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
|
||||
true
|
||||
),
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.[(f as any).name] || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters[(f as any).name] = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
virtual: true,
|
||||
} as Field)))
|
||||
]
|
||||
},
|
||||
@@ -197,11 +344,17 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'input',
|
||||
type: 'json',
|
||||
required: false
|
||||
},
|
||||
...(steps || []).flatMap(step => (step.inputSchema || []).map(field => ({
|
||||
...field,
|
||||
admin: {
|
||||
...(field.admin || {}),
|
||||
condition: (...args) => args[1]?.step === step.slug && (
|
||||
field.admin?.condition ?
|
||||
field.admin.condition.call(this, ...args) :
|
||||
true
|
||||
),
|
||||
},
|
||||
} as Field))),
|
||||
{
|
||||
name: 'dependencies',
|
||||
type: 'text',
|
||||
|
||||
@@ -39,27 +39,30 @@ export const WorkflowRunsCollection: CollectionConfig = {
|
||||
type: 'select',
|
||||
admin: {
|
||||
description: 'Current execution status',
|
||||
components: {
|
||||
Cell: '@/components/StatusCell'
|
||||
}
|
||||
},
|
||||
defaultValue: 'pending',
|
||||
options: [
|
||||
{
|
||||
label: 'Pending',
|
||||
label: '⏳ Pending',
|
||||
value: 'pending',
|
||||
},
|
||||
{
|
||||
label: 'Running',
|
||||
label: '🔄 Running',
|
||||
value: 'running',
|
||||
},
|
||||
{
|
||||
label: 'Completed',
|
||||
label: '✅ Completed',
|
||||
value: 'completed',
|
||||
},
|
||||
{
|
||||
label: 'Failed',
|
||||
label: '❌ Failed',
|
||||
value: 'failed',
|
||||
},
|
||||
{
|
||||
label: 'Cancelled',
|
||||
label: '⏹️ Cancelled',
|
||||
value: 'cancelled',
|
||||
},
|
||||
],
|
||||
@@ -136,6 +139,10 @@ export const WorkflowRunsCollection: CollectionConfig = {
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Error message if workflow execution failed',
|
||||
condition: (_, siblingData) => siblingData?.status === 'failed',
|
||||
components: {
|
||||
Field: '@/components/ErrorDisplay'
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
262
src/components/ErrorDisplay.tsx.disabled
Normal file
262
src/components/ErrorDisplay.tsx.disabled
Normal file
@@ -0,0 +1,262 @@
|
||||
'use client'
|
||||
|
||||
import React, { useState } from 'react'
|
||||
import { Button } from '@payloadcms/ui'
|
||||
|
||||
interface ErrorDisplayProps {
|
||||
value?: string
|
||||
onChange?: (value: string) => void
|
||||
readOnly?: boolean
|
||||
path?: string
|
||||
}
|
||||
|
||||
export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
readOnly = false
|
||||
}) => {
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Parse common error patterns
|
||||
const parseError = (error: string) => {
|
||||
// Check for different error types and provide user-friendly messages
|
||||
if (error.includes('Request timeout')) {
|
||||
return {
|
||||
type: 'timeout',
|
||||
title: 'Request Timeout',
|
||||
message: 'The HTTP request took too long to complete. Consider increasing the timeout value or checking the target server.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
if (error.includes('Network error') || error.includes('fetch')) {
|
||||
return {
|
||||
type: 'network',
|
||||
title: 'Network Error',
|
||||
message: 'Unable to connect to the target server. Please check the URL and network connectivity.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
if (error.includes('Hook execution failed')) {
|
||||
return {
|
||||
type: 'hook',
|
||||
title: 'Workflow Hook Failed',
|
||||
message: 'The workflow trigger hook encountered an error. This may be due to PayloadCMS initialization issues.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
if (error.includes('Executor not available')) {
|
||||
return {
|
||||
type: 'executor',
|
||||
title: 'Workflow Engine Unavailable',
|
||||
message: 'The workflow execution engine is not properly initialized. Try restarting the server.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
if (error.includes('Collection slug is required') || error.includes('Document data is required')) {
|
||||
return {
|
||||
type: 'validation',
|
||||
title: 'Invalid Input Data',
|
||||
message: 'Required fields are missing from the workflow step configuration. Please check your step inputs.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
if (error.includes('status') && error.includes('4')) {
|
||||
return {
|
||||
type: 'client',
|
||||
title: 'Client Error (4xx)',
|
||||
message: 'The request was rejected by the server. Check your API credentials and request format.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
if (error.includes('status') && error.includes('5')) {
|
||||
return {
|
||||
type: 'server',
|
||||
title: 'Server Error (5xx)',
|
||||
message: 'The target server encountered an error. This is usually temporary - try again later.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
// Generic error
|
||||
return {
|
||||
type: 'generic',
|
||||
title: 'Workflow Error',
|
||||
message: 'An error occurred during workflow execution. See technical details below.',
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
const errorInfo = parseError(value)
|
||||
|
||||
const getErrorIcon = (type: string) => {
|
||||
switch (type) {
|
||||
case 'timeout': return '⏰'
|
||||
case 'network': return '🌐'
|
||||
case 'hook': return '🔗'
|
||||
case 'executor': return '⚙️'
|
||||
case 'validation': return '📋'
|
||||
case 'client': return '🚫'
|
||||
case 'server': return '🔥'
|
||||
default: return '❗'
|
||||
}
|
||||
}
|
||||
|
||||
const getErrorColor = (type: string) => {
|
||||
switch (type) {
|
||||
case 'timeout': return '#F59E0B'
|
||||
case 'network': return '#EF4444'
|
||||
case 'hook': return '#8B5CF6'
|
||||
case 'executor': return '#6B7280'
|
||||
case 'validation': return '#F59E0B'
|
||||
case 'client': return '#EF4444'
|
||||
case 'server': return '#DC2626'
|
||||
default: return '#EF4444'
|
||||
}
|
||||
}
|
||||
|
||||
const errorColor = getErrorColor(errorInfo.type)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
border: `2px solid ${errorColor}30`,
|
||||
borderRadius: '8px',
|
||||
backgroundColor: `${errorColor}08`,
|
||||
padding: '16px',
|
||||
marginTop: '8px'
|
||||
}}>
|
||||
{/* Error Header */}
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '12px',
|
||||
marginBottom: '12px'
|
||||
}}>
|
||||
<span style={{ fontSize: '24px' }}>
|
||||
{getErrorIcon(errorInfo.type)}
|
||||
</span>
|
||||
<div>
|
||||
<h4 style={{
|
||||
margin: 0,
|
||||
color: errorColor,
|
||||
fontSize: '16px',
|
||||
fontWeight: '600'
|
||||
}}>
|
||||
{errorInfo.title}
|
||||
</h4>
|
||||
<p style={{
|
||||
margin: '4px 0 0 0',
|
||||
color: '#6B7280',
|
||||
fontSize: '14px',
|
||||
lineHeight: '1.4'
|
||||
}}>
|
||||
{errorInfo.message}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Technical Details Toggle */}
|
||||
<div>
|
||||
<Button
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
style={{ marginBottom: expanded ? '12px' : '0' }}
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} Technical Details
|
||||
</Button>
|
||||
|
||||
{expanded && (
|
||||
<div style={{
|
||||
backgroundColor: '#F8F9FA',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '6px',
|
||||
padding: '12px',
|
||||
fontFamily: 'monospace',
|
||||
fontSize: '13px',
|
||||
color: '#374151',
|
||||
whiteSpace: 'pre-wrap',
|
||||
overflowX: 'auto'
|
||||
}}>
|
||||
{errorInfo.technical}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Quick Actions */}
|
||||
<div style={{
|
||||
marginTop: '12px',
|
||||
padding: '12px',
|
||||
backgroundColor: `${errorColor}10`,
|
||||
borderRadius: '6px',
|
||||
fontSize: '13px'
|
||||
}}>
|
||||
<strong>💡 Quick fixes:</strong>
|
||||
<ul style={{ margin: '8px 0 0 0', paddingLeft: '20px' }}>
|
||||
{errorInfo.type === 'timeout' && (
|
||||
<>
|
||||
<li>Increase the timeout value in step configuration</li>
|
||||
<li>Check if the target server is responding slowly</li>
|
||||
</>
|
||||
)}
|
||||
{errorInfo.type === 'network' && (
|
||||
<>
|
||||
<li>Verify the URL is correct and accessible</li>
|
||||
<li>Check firewall and network connectivity</li>
|
||||
</>
|
||||
)}
|
||||
{errorInfo.type === 'hook' && (
|
||||
<>
|
||||
<li>Restart the PayloadCMS server</li>
|
||||
<li>Check server logs for initialization errors</li>
|
||||
</>
|
||||
)}
|
||||
{errorInfo.type === 'executor' && (
|
||||
<>
|
||||
<li>Restart the PayloadCMS application</li>
|
||||
<li>Verify the automation plugin is properly configured</li>
|
||||
</>
|
||||
)}
|
||||
{errorInfo.type === 'validation' && (
|
||||
<>
|
||||
<li>Check all required fields are filled in the workflow step</li>
|
||||
<li>Verify JSONPath expressions in step inputs</li>
|
||||
</>
|
||||
)}
|
||||
{(errorInfo.type === 'client' || errorInfo.type === 'server') && (
|
||||
<>
|
||||
<li>Check API credentials and permissions</li>
|
||||
<li>Verify the request format matches API expectations</li>
|
||||
<li>Try the request manually to test the endpoint</li>
|
||||
</>
|
||||
)}
|
||||
{errorInfo.type === 'generic' && (
|
||||
<>
|
||||
<li>Check the workflow configuration</li>
|
||||
<li>Review server logs for more details</li>
|
||||
<li>Try running the workflow again</li>
|
||||
</>
|
||||
)}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
{/* Hidden textarea for editing if needed */}
|
||||
{!readOnly && onChange && (
|
||||
<textarea
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={{ display: 'none' }}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
45
src/components/StatusCell.tsx
Normal file
45
src/components/StatusCell.tsx
Normal file
@@ -0,0 +1,45 @@
|
||||
'use client'
|
||||
|
||||
import React from 'react'
|
||||
|
||||
interface StatusCellProps {
|
||||
cellData: string
|
||||
}
|
||||
|
||||
export const StatusCell: React.FC<StatusCellProps> = ({ cellData }) => {
|
||||
const getStatusDisplay = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending':
|
||||
return { icon: '⏳', color: '#6B7280', label: 'Pending' }
|
||||
case 'running':
|
||||
return { icon: '🔄', color: '#3B82F6', label: 'Running' }
|
||||
case 'completed':
|
||||
return { icon: '✅', color: '#10B981', label: 'Completed' }
|
||||
case 'failed':
|
||||
return { icon: '❌', color: '#EF4444', label: 'Failed' }
|
||||
case 'cancelled':
|
||||
return { icon: '⏹️', color: '#F59E0B', label: 'Cancelled' }
|
||||
default:
|
||||
return { icon: '❓', color: '#6B7280', label: status || 'Unknown' }
|
||||
}
|
||||
}
|
||||
|
||||
const { icon, color, label } = getStatusDisplay(cellData)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '8px',
|
||||
padding: '4px 8px',
|
||||
borderRadius: '6px',
|
||||
backgroundColor: `${color}15`,
|
||||
border: `1px solid ${color}30`,
|
||||
fontSize: '14px',
|
||||
fontWeight: '500'
|
||||
}}>
|
||||
<span style={{ fontSize: '16px' }}>{icon}</span>
|
||||
<span style={{ color }}>{label}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
231
src/components/WorkflowExecutionStatus.tsx
Normal file
231
src/components/WorkflowExecutionStatus.tsx
Normal file
@@ -0,0 +1,231 @@
|
||||
'use client'
|
||||
|
||||
import React, { useState, useEffect } from 'react'
|
||||
import { Button } from '@payloadcms/ui'
|
||||
|
||||
interface WorkflowRun {
|
||||
id: string
|
||||
status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'
|
||||
startedAt: string
|
||||
completedAt?: string
|
||||
error?: string
|
||||
triggeredBy: string
|
||||
}
|
||||
|
||||
interface WorkflowExecutionStatusProps {
|
||||
workflowId: string | number
|
||||
}
|
||||
|
||||
export const WorkflowExecutionStatus: React.FC<WorkflowExecutionStatusProps> = ({ workflowId }) => {
|
||||
const [runs, setRuns] = useState<WorkflowRun[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const fetchRecentRuns = async () => {
|
||||
try {
|
||||
const response = await fetch(`/api/workflow-runs?where[workflow][equals]=${workflowId}&limit=5&sort=-startedAt`)
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setRuns(data.docs || [])
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to fetch workflow runs:', error)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
fetchRecentRuns()
|
||||
}, [workflowId])
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ padding: '16px', color: '#6B7280' }}>
|
||||
Loading execution history...
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (runs.length === 0) {
|
||||
return (
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
backgroundColor: '#F9FAFB',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '8px',
|
||||
color: '#6B7280',
|
||||
textAlign: 'center'
|
||||
}}>
|
||||
📋 No execution history yet
|
||||
<br />
|
||||
<small>This workflow hasn't been triggered yet.</small>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const getStatusIcon = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending': return '⏳'
|
||||
case 'running': return '🔄'
|
||||
case 'completed': return '✅'
|
||||
case 'failed': return '❌'
|
||||
case 'cancelled': return '⏹️'
|
||||
default: return '❓'
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusColor = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending': return '#6B7280'
|
||||
case 'running': return '#3B82F6'
|
||||
case 'completed': return '#10B981'
|
||||
case 'failed': return '#EF4444'
|
||||
case 'cancelled': return '#F59E0B'
|
||||
default: return '#6B7280'
|
||||
}
|
||||
}
|
||||
|
||||
const formatDate = (dateString: string) => {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffMs = now.getTime() - date.getTime()
|
||||
|
||||
if (diffMs < 60000) { // Less than 1 minute
|
||||
return 'Just now'
|
||||
} else if (diffMs < 3600000) { // Less than 1 hour
|
||||
return `${Math.floor(diffMs / 60000)} min ago`
|
||||
} else if (diffMs < 86400000) { // Less than 1 day
|
||||
return `${Math.floor(diffMs / 3600000)} hrs ago`
|
||||
} else {
|
||||
return date.toLocaleDateString()
|
||||
}
|
||||
}
|
||||
|
||||
const getDuration = (startedAt: string, completedAt?: string) => {
|
||||
const start = new Date(startedAt)
|
||||
const end = completedAt ? new Date(completedAt) : new Date()
|
||||
const diffMs = end.getTime() - start.getTime()
|
||||
|
||||
if (diffMs < 1000) return '<1s'
|
||||
if (diffMs < 60000) return `${Math.floor(diffMs / 1000)}s`
|
||||
if (diffMs < 3600000) return `${Math.floor(diffMs / 60000)}m ${Math.floor((diffMs % 60000) / 1000)}s`
|
||||
return `${Math.floor(diffMs / 3600000)}h ${Math.floor((diffMs % 3600000) / 60000)}m`
|
||||
}
|
||||
|
||||
const recentRun = runs[0]
|
||||
const recentStatus = getStatusIcon(recentRun.status)
|
||||
const recentColor = getStatusColor(recentRun.status)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: '#FAFAFA'
|
||||
}}>
|
||||
{/* Summary Header */}
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
borderBottom: expanded ? '1px solid #E5E7EB' : 'none',
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '12px' }}>
|
||||
<span style={{ fontSize: '20px' }}>{recentStatus}</span>
|
||||
<div>
|
||||
<div style={{ fontWeight: '600', color: recentColor }}>
|
||||
Last run: {recentRun.status}
|
||||
</div>
|
||||
<div style={{ fontSize: '13px', color: '#6B7280' }}>
|
||||
{formatDate(recentRun.startedAt)} • Duration: {getDuration(recentRun.startedAt, recentRun.completedAt)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} History ({runs.length})
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Detailed History */}
|
||||
{expanded && (
|
||||
<div style={{ padding: '16px' }}>
|
||||
<h4 style={{ margin: '0 0 12px 0', fontSize: '14px', fontWeight: '600' }}>
|
||||
Recent Executions
|
||||
</h4>
|
||||
|
||||
{runs.map((run, index) => (
|
||||
<div
|
||||
key={run.id}
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
padding: '8px 12px',
|
||||
marginBottom: index < runs.length - 1 ? '8px' : '0',
|
||||
backgroundColor: 'white',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '6px'
|
||||
}}
|
||||
>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '10px' }}>
|
||||
<span style={{ fontSize: '16px' }}>
|
||||
{getStatusIcon(run.status)}
|
||||
</span>
|
||||
|
||||
<div>
|
||||
<div style={{
|
||||
fontSize: '13px',
|
||||
fontWeight: '500',
|
||||
color: getStatusColor(run.status)
|
||||
}}>
|
||||
{run.status.charAt(0).toUpperCase() + run.status.slice(1)}
|
||||
</div>
|
||||
<div style={{ fontSize: '12px', color: '#6B7280' }}>
|
||||
{formatDate(run.startedAt)} • {run.triggeredBy}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style={{
|
||||
fontSize: '12px',
|
||||
color: '#6B7280',
|
||||
textAlign: 'right'
|
||||
}}>
|
||||
<div>
|
||||
{getDuration(run.startedAt, run.completedAt)}
|
||||
</div>
|
||||
{run.error && (
|
||||
<div style={{ color: '#EF4444', marginTop: '2px' }}>
|
||||
Error
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div style={{
|
||||
marginTop: '12px',
|
||||
textAlign: 'center'
|
||||
}}>
|
||||
<Button
|
||||
onClick={() => {
|
||||
// Navigate to workflow runs filtered by this workflow
|
||||
window.location.href = `/admin/collections/workflow-runs?where[workflow][equals]=${workflowId}`
|
||||
}}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
View All Runs →
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { initializeLogger } from '../plugin/logger.js'
|
||||
import { type Workflow, WorkflowExecutor } from './workflow-executor.js'
|
||||
import { type PayloadWorkflow, WorkflowExecutor } from './workflow-executor.js'
|
||||
|
||||
export interface CustomTriggerOptions {
|
||||
/**
|
||||
@@ -142,7 +142,7 @@ export async function triggerCustomWorkflow(
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as Workflow, context, workflowReq)
|
||||
await executor.execute(workflow as PayloadWorkflow, context, workflowReq)
|
||||
|
||||
// Get the latest run for this workflow to get the run ID
|
||||
const runs = await payload.find({
|
||||
@@ -255,7 +255,7 @@ export async function triggerWorkflowById(
|
||||
|
||||
// Create executor and execute
|
||||
const executor = new WorkflowExecutor(payload, logger)
|
||||
await executor.execute(workflow as Workflow, context, workflowReq)
|
||||
await executor.execute(workflow as PayloadWorkflow, context, workflowReq)
|
||||
|
||||
// Get the latest run to get the run ID
|
||||
const runs = await payload.find({
|
||||
|
||||
@@ -1,31 +1,47 @@
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
|
||||
// We need to reference the generated types dynamically since they're not available at build time
|
||||
// Using generic types and casting where necessary
|
||||
export type PayloadWorkflow = {
|
||||
id: number
|
||||
name: string
|
||||
description?: string | null
|
||||
triggers?: Array<{
|
||||
type?: string | null
|
||||
condition?: string | null
|
||||
parameters?: {
|
||||
collectionSlug?: string | null
|
||||
operation?: string | null
|
||||
webhookPath?: string | null
|
||||
cronExpression?: string | null
|
||||
timezone?: string | null
|
||||
global?: string | null
|
||||
globalOperation?: string | null
|
||||
[key: string]: unknown
|
||||
} | null
|
||||
[key: string]: unknown
|
||||
}> | null
|
||||
steps?: Array<{
|
||||
step?: string | null
|
||||
name?: string | null
|
||||
input?: unknown
|
||||
dependencies?: string[] | null
|
||||
condition?: string | null
|
||||
[key: string]: unknown
|
||||
}> | null
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
import { JSONPath } from 'jsonpath-plus'
|
||||
|
||||
export type Workflow = {
|
||||
_version?: number
|
||||
id: string
|
||||
name: string
|
||||
steps: WorkflowStep[]
|
||||
triggers: WorkflowTrigger[]
|
||||
// Helper type to extract workflow step data from the generated types
|
||||
export type WorkflowStep = NonNullable<PayloadWorkflow['steps']>[0] & {
|
||||
name: string // Ensure name is always present for our execution logic
|
||||
}
|
||||
|
||||
export type WorkflowStep = {
|
||||
condition?: string
|
||||
dependencies?: string[]
|
||||
input?: null | Record<string, unknown>
|
||||
name: string
|
||||
step: string
|
||||
}
|
||||
|
||||
export interface WorkflowTrigger {
|
||||
collection?: string
|
||||
condition?: string
|
||||
global?: string
|
||||
globalOperation?: string
|
||||
operation?: string
|
||||
type: string
|
||||
webhookPath?: string
|
||||
// Helper type to extract workflow trigger data from the generated types
|
||||
export type WorkflowTrigger = NonNullable<PayloadWorkflow['triggers']>[0] & {
|
||||
type: string // Ensure type is always present for our execution logic
|
||||
}
|
||||
|
||||
export interface ExecutionContext {
|
||||
@@ -34,6 +50,30 @@ export interface ExecutionContext {
|
||||
input: unknown
|
||||
output: unknown
|
||||
state: 'failed' | 'pending' | 'running' | 'succeeded'
|
||||
_startTime?: number
|
||||
executionInfo?: {
|
||||
completed: boolean
|
||||
success: boolean
|
||||
executedAt: string
|
||||
duration: number
|
||||
failureReason?: string
|
||||
}
|
||||
errorDetails?: {
|
||||
stepId: string
|
||||
errorType: string
|
||||
duration: number
|
||||
attempts: number
|
||||
finalError: string
|
||||
context: {
|
||||
url?: string
|
||||
method?: string
|
||||
timeout?: number
|
||||
statusCode?: number
|
||||
headers?: Record<string, string>
|
||||
[key: string]: any
|
||||
}
|
||||
timestamp: string
|
||||
}
|
||||
}>
|
||||
trigger: {
|
||||
collection?: string
|
||||
@@ -146,15 +186,27 @@ export class WorkflowExecutor {
|
||||
error: undefined,
|
||||
input: undefined,
|
||||
output: undefined,
|
||||
state: 'running'
|
||||
state: 'running',
|
||||
_startTime: Date.now() // Track execution start time for independent duration tracking
|
||||
}
|
||||
|
||||
// Move taskSlug declaration outside try block so it's accessible in catch
|
||||
const taskSlug = step.step // Use the 'step' field for task type
|
||||
|
||||
try {
|
||||
// Extract input data from step - PayloadCMS flattens inputSchema fields to step level
|
||||
const inputFields: Record<string, unknown> = {}
|
||||
|
||||
// Get all fields except the core step fields
|
||||
const coreFields = ['step', 'name', 'dependencies', 'condition']
|
||||
for (const [key, value] of Object.entries(step)) {
|
||||
if (!coreFields.includes(key)) {
|
||||
inputFields[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve input data using JSONPath
|
||||
const resolvedInput = this.resolveStepInput(step.input || {}, context)
|
||||
const resolvedInput = this.resolveStepInput(inputFields, context)
|
||||
context.steps[stepName].input = resolvedInput
|
||||
|
||||
if (!taskSlug) {
|
||||
@@ -174,11 +226,21 @@ export class WorkflowExecutor {
|
||||
task: taskSlug
|
||||
})
|
||||
|
||||
// Run the job immediately
|
||||
await this.payload.jobs.run({
|
||||
limit: 1,
|
||||
// Run the specific job immediately and wait for completion
|
||||
this.logger.info({ jobId: job.id }, 'Running job immediately using runByID')
|
||||
const runResults = await this.payload.jobs.runByID({
|
||||
id: job.id,
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
jobId: job.id,
|
||||
runResult: runResults,
|
||||
hasResult: !!runResults
|
||||
}, 'Job run completed')
|
||||
|
||||
// Give a small delay to ensure job is fully processed
|
||||
await new Promise(resolve => setTimeout(resolve, 100))
|
||||
|
||||
// Get the job result
|
||||
const completedJob = await this.payload.findByID({
|
||||
@@ -187,6 +249,13 @@ export class WorkflowExecutor {
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
jobId: job.id,
|
||||
totalTried: completedJob.totalTried,
|
||||
hasError: completedJob.hasError,
|
||||
taskStatus: completedJob.taskStatus ? Object.keys(completedJob.taskStatus) : 'null'
|
||||
}, 'Retrieved job results')
|
||||
|
||||
const taskStatus = completedJob.taskStatus?.[completedJob.taskSlug]?.[completedJob.totalTried]
|
||||
const isComplete = taskStatus?.complete === true
|
||||
const hasError = completedJob.hasError || !isComplete
|
||||
@@ -205,9 +274,37 @@ export class WorkflowExecutor {
|
||||
errorMessage = completedJob.error.message || completedJob.error
|
||||
}
|
||||
|
||||
// Final fallback to generic message
|
||||
// Try to get error from task output if available
|
||||
if (!errorMessage && taskStatus?.output?.error) {
|
||||
errorMessage = taskStatus.output.error
|
||||
}
|
||||
|
||||
// Check if task handler returned with state='failed'
|
||||
if (!errorMessage && taskStatus?.state === 'failed') {
|
||||
errorMessage = 'Task handler returned a failed state'
|
||||
// Try to get more specific error from output
|
||||
if (taskStatus.output?.error) {
|
||||
errorMessage = taskStatus.output.error
|
||||
}
|
||||
}
|
||||
|
||||
// Check for network errors in the job data
|
||||
if (!errorMessage && completedJob.result) {
|
||||
const result = completedJob.result
|
||||
if (result.error) {
|
||||
errorMessage = result.error
|
||||
}
|
||||
}
|
||||
|
||||
// Final fallback to generic message with more detail
|
||||
if (!errorMessage) {
|
||||
errorMessage = `Task ${taskSlug} failed without detailed error information`
|
||||
const jobDetails = {
|
||||
taskSlug,
|
||||
hasError: completedJob.hasError,
|
||||
taskStatus: taskStatus?.complete,
|
||||
totalTried: completedJob.totalTried
|
||||
}
|
||||
errorMessage = `Task ${taskSlug} failed without detailed error information. Job details: ${JSON.stringify(jobDetails)}`
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,6 +325,30 @@ export class WorkflowExecutor {
|
||||
context.steps[stepName].error = result.error
|
||||
}
|
||||
|
||||
// Independent execution tracking (not dependent on PayloadCMS task status)
|
||||
context.steps[stepName].executionInfo = {
|
||||
completed: true, // Step execution completed (regardless of success/failure)
|
||||
success: result.state === 'succeeded',
|
||||
executedAt: new Date().toISOString(),
|
||||
duration: Date.now() - (context.steps[stepName]._startTime || Date.now())
|
||||
}
|
||||
|
||||
// For failed steps, try to extract detailed error information from the job logs
|
||||
// This approach is more reliable than external storage and persists with the workflow
|
||||
if (result.state === 'failed') {
|
||||
const errorDetails = this.extractErrorDetailsFromJob(completedJob, context.steps[stepName], stepName)
|
||||
if (errorDetails) {
|
||||
context.steps[stepName].errorDetails = errorDetails
|
||||
|
||||
this.logger.info({
|
||||
stepName,
|
||||
errorType: errorDetails.errorType,
|
||||
duration: errorDetails.duration,
|
||||
attempts: errorDetails.attempts
|
||||
}, 'Extracted detailed error information for failed step')
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug({context}, 'Step execution context')
|
||||
|
||||
if (result.state !== 'succeeded') {
|
||||
@@ -249,6 +370,15 @@ export class WorkflowExecutor {
|
||||
context.steps[stepName].state = 'failed'
|
||||
context.steps[stepName].error = errorMessage
|
||||
|
||||
// Independent execution tracking for failed steps
|
||||
context.steps[stepName].executionInfo = {
|
||||
completed: true, // Execution attempted and completed (even if it failed)
|
||||
success: false,
|
||||
executedAt: new Date().toISOString(),
|
||||
duration: Date.now() - (context.steps[stepName]._startTime || Date.now()),
|
||||
failureReason: errorMessage
|
||||
}
|
||||
|
||||
this.logger.error({
|
||||
error: errorMessage,
|
||||
input: context.steps[stepName].input,
|
||||
@@ -398,6 +528,128 @@ export class WorkflowExecutor {
|
||||
return resolved
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely serialize an object, handling circular references and non-serializable values
|
||||
*/
|
||||
private safeSerialize(obj: unknown): unknown {
|
||||
const seen = new WeakSet()
|
||||
|
||||
const serialize = (value: unknown): unknown => {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return value
|
||||
}
|
||||
|
||||
if (seen.has(value as object)) {
|
||||
return '[Circular Reference]'
|
||||
}
|
||||
|
||||
seen.add(value as object)
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(serialize)
|
||||
}
|
||||
|
||||
const result: Record<string, unknown> = {}
|
||||
for (const [key, val] of Object.entries(value as Record<string, unknown>)) {
|
||||
try {
|
||||
// Skip non-serializable properties that are likely internal database objects
|
||||
if (key === 'table' || key === 'schema' || key === '_' || key === '__') {
|
||||
continue
|
||||
}
|
||||
result[key] = serialize(val)
|
||||
} catch {
|
||||
// Skip properties that can't be accessed or serialized
|
||||
result[key] = '[Non-serializable]'
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
return serialize(obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts detailed error information from job logs and input
|
||||
*/
|
||||
private extractErrorDetailsFromJob(job: any, stepContext: any, stepName: string) {
|
||||
try {
|
||||
// Get error information from multiple sources
|
||||
const input = stepContext.input || {}
|
||||
const logs = job.log || []
|
||||
const latestLog = logs[logs.length - 1]
|
||||
|
||||
// Extract error message from job error or log
|
||||
const errorMessage = job.error?.message || latestLog?.error?.message || 'Unknown error'
|
||||
|
||||
// For timeout scenarios, check if it's a timeout based on duration and timeout setting
|
||||
let errorType = this.classifyErrorType(errorMessage)
|
||||
|
||||
// Special handling for HTTP timeouts - if task failed and duration exceeds timeout, it's likely a timeout
|
||||
if (errorType === 'unknown' && input.timeout && stepContext.executionInfo?.duration) {
|
||||
const timeoutMs = parseInt(input.timeout) || 30000
|
||||
const actualDuration = stepContext.executionInfo.duration
|
||||
|
||||
// If execution duration is close to or exceeds timeout, classify as timeout
|
||||
if (actualDuration >= (timeoutMs * 0.9)) { // 90% of timeout threshold
|
||||
errorType = 'timeout'
|
||||
this.logger.debug({
|
||||
timeoutMs,
|
||||
actualDuration,
|
||||
stepName
|
||||
}, 'Classified error as timeout based on duration analysis')
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate duration from execution info if available
|
||||
const duration = stepContext.executionInfo?.duration || 0
|
||||
|
||||
// Extract attempt count from logs
|
||||
const attempts = job.totalTried || 1
|
||||
|
||||
return {
|
||||
stepId: `${stepName}-${Date.now()}`,
|
||||
errorType,
|
||||
duration,
|
||||
attempts,
|
||||
finalError: errorMessage,
|
||||
context: {
|
||||
url: input.url,
|
||||
method: input.method,
|
||||
timeout: input.timeout,
|
||||
statusCode: latestLog?.output?.status,
|
||||
headers: input.headers
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stepName
|
||||
}, 'Failed to extract error details from job')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Classifies error types based on error messages
|
||||
*/
|
||||
private classifyErrorType(errorMessage: string): string {
|
||||
if (errorMessage.includes('timeout') || errorMessage.includes('ETIMEDOUT')) {
|
||||
return 'timeout'
|
||||
}
|
||||
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
|
||||
return 'dns'
|
||||
}
|
||||
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('ECONNRESET')) {
|
||||
return 'connection'
|
||||
}
|
||||
if (errorMessage.includes('network') || errorMessage.includes('fetch')) {
|
||||
return 'network'
|
||||
}
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow run with current context
|
||||
*/
|
||||
@@ -407,14 +659,14 @@ export class WorkflowExecutor {
|
||||
req: PayloadRequest
|
||||
): Promise<void> {
|
||||
const serializeContext = () => ({
|
||||
steps: context.steps,
|
||||
steps: this.safeSerialize(context.steps),
|
||||
trigger: {
|
||||
type: context.trigger.type,
|
||||
collection: context.trigger.collection,
|
||||
data: context.trigger.data,
|
||||
doc: context.trigger.doc,
|
||||
data: this.safeSerialize(context.trigger.data),
|
||||
doc: this.safeSerialize(context.trigger.doc),
|
||||
operation: context.trigger.operation,
|
||||
previousDoc: context.trigger.previousDoc,
|
||||
previousDoc: this.safeSerialize(context.trigger.previousDoc),
|
||||
triggeredAt: context.trigger.triggeredAt,
|
||||
user: context.trigger.req?.user
|
||||
}
|
||||
@@ -431,7 +683,7 @@ export class WorkflowExecutor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a condition using JSONPath
|
||||
* Evaluate a condition using JSONPath and comparison operators
|
||||
*/
|
||||
public evaluateCondition(condition: string, context: ExecutionContext): boolean {
|
||||
this.logger.debug({
|
||||
@@ -443,34 +695,94 @@ export class WorkflowExecutor {
|
||||
}, 'Starting condition evaluation')
|
||||
|
||||
try {
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: condition,
|
||||
wrap: false
|
||||
})
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
result,
|
||||
resultType: Array.isArray(result) ? 'array' : typeof result,
|
||||
resultLength: Array.isArray(result) ? result.length : undefined
|
||||
}, 'JSONPath evaluation result')
|
||||
|
||||
// Handle different result types
|
||||
let finalResult: boolean
|
||||
if (Array.isArray(result)) {
|
||||
finalResult = result.length > 0 && Boolean(result[0])
|
||||
// Check if this is a comparison expression
|
||||
const comparisonMatch = condition.match(/^(.+?)\s*(==|!=|>|<|>=|<=)\s*(.+)$/)
|
||||
|
||||
if (comparisonMatch) {
|
||||
const [, leftExpr, operator, rightExpr] = comparisonMatch
|
||||
|
||||
// Evaluate left side (should be JSONPath)
|
||||
const leftValue = this.resolveJSONPathValue(leftExpr.trim(), context)
|
||||
|
||||
// Parse right side (could be string, number, boolean, or JSONPath)
|
||||
const rightValue = this.parseConditionValue(rightExpr.trim(), context)
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
leftExpr: leftExpr.trim(),
|
||||
leftValue,
|
||||
operator,
|
||||
rightExpr: rightExpr.trim(),
|
||||
rightValue,
|
||||
leftType: typeof leftValue,
|
||||
rightType: typeof rightValue
|
||||
}, 'Evaluating comparison condition')
|
||||
|
||||
// Perform comparison
|
||||
let result: boolean
|
||||
switch (operator) {
|
||||
case '==':
|
||||
result = leftValue === rightValue
|
||||
break
|
||||
case '!=':
|
||||
result = leftValue !== rightValue
|
||||
break
|
||||
case '>':
|
||||
result = Number(leftValue) > Number(rightValue)
|
||||
break
|
||||
case '<':
|
||||
result = Number(leftValue) < Number(rightValue)
|
||||
break
|
||||
case '>=':
|
||||
result = Number(leftValue) >= Number(rightValue)
|
||||
break
|
||||
case '<=':
|
||||
result = Number(leftValue) <= Number(rightValue)
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown comparison operator: ${operator}`)
|
||||
}
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
result,
|
||||
leftValue,
|
||||
rightValue,
|
||||
operator
|
||||
}, 'Comparison condition evaluation completed')
|
||||
|
||||
return result
|
||||
} else {
|
||||
finalResult = Boolean(result)
|
||||
// Treat as simple JSONPath boolean evaluation
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: condition,
|
||||
wrap: false
|
||||
})
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
result,
|
||||
resultType: Array.isArray(result) ? 'array' : typeof result,
|
||||
resultLength: Array.isArray(result) ? result.length : undefined
|
||||
}, 'JSONPath boolean evaluation result')
|
||||
|
||||
// Handle different result types
|
||||
let finalResult: boolean
|
||||
if (Array.isArray(result)) {
|
||||
finalResult = result.length > 0 && Boolean(result[0])
|
||||
} else {
|
||||
finalResult = Boolean(result)
|
||||
}
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
finalResult,
|
||||
originalResult: result
|
||||
}, 'Boolean condition evaluation completed')
|
||||
|
||||
return finalResult
|
||||
}
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
finalResult,
|
||||
originalResult: result
|
||||
}, 'Condition evaluation completed')
|
||||
|
||||
return finalResult
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
condition,
|
||||
@@ -482,47 +794,119 @@ export class WorkflowExecutor {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a JSONPath value from the context
|
||||
*/
|
||||
private resolveJSONPathValue(expr: string, context: ExecutionContext): any {
|
||||
if (expr.startsWith('$')) {
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: expr,
|
||||
wrap: false
|
||||
})
|
||||
// Return first result if array, otherwise the result itself
|
||||
return Array.isArray(result) && result.length > 0 ? result[0] : result
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a condition value (string literal, number, boolean, or JSONPath)
|
||||
*/
|
||||
private parseConditionValue(expr: string, context: ExecutionContext): any {
|
||||
// Handle string literals
|
||||
if ((expr.startsWith('"') && expr.endsWith('"')) || (expr.startsWith("'") && expr.endsWith("'"))) {
|
||||
return expr.slice(1, -1) // Remove quotes
|
||||
}
|
||||
|
||||
// Handle boolean literals
|
||||
if (expr === 'true') return true
|
||||
if (expr === 'false') return false
|
||||
|
||||
// Handle number literals
|
||||
if (/^-?\d+(\.\d+)?$/.test(expr)) {
|
||||
return Number(expr)
|
||||
}
|
||||
|
||||
// Handle JSONPath expressions
|
||||
if (expr.startsWith('$')) {
|
||||
return this.resolveJSONPathValue(expr, context)
|
||||
}
|
||||
|
||||
// Return as string if nothing else matches
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a workflow with the given context
|
||||
*/
|
||||
async execute(workflow: Workflow, context: ExecutionContext, req: PayloadRequest): Promise<void> {
|
||||
async execute(workflow: PayloadWorkflow, context: ExecutionContext, req: PayloadRequest): Promise<void> {
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Starting workflow execution')
|
||||
|
||||
const serializeContext = () => ({
|
||||
steps: context.steps,
|
||||
steps: this.safeSerialize(context.steps),
|
||||
trigger: {
|
||||
type: context.trigger.type,
|
||||
collection: context.trigger.collection,
|
||||
data: context.trigger.data,
|
||||
doc: context.trigger.doc,
|
||||
data: this.safeSerialize(context.trigger.data),
|
||||
doc: this.safeSerialize(context.trigger.doc),
|
||||
operation: context.trigger.operation,
|
||||
previousDoc: context.trigger.previousDoc,
|
||||
previousDoc: this.safeSerialize(context.trigger.previousDoc),
|
||||
triggeredAt: context.trigger.triggeredAt,
|
||||
user: context.trigger.req?.user
|
||||
}
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
contextSummary: {
|
||||
triggerType: context.trigger.type,
|
||||
triggerCollection: context.trigger.collection,
|
||||
triggerOperation: context.trigger.operation,
|
||||
hasDoc: !!context.trigger.doc,
|
||||
userEmail: context.trigger.req?.user?.email
|
||||
}
|
||||
}, 'About to create workflow run record')
|
||||
|
||||
// Create a workflow run record
|
||||
const workflowRun = await this.payload.create({
|
||||
collection: 'workflow-runs',
|
||||
data: {
|
||||
context: serializeContext(),
|
||||
startedAt: new Date().toISOString(),
|
||||
status: 'running',
|
||||
triggeredBy: context.trigger.req?.user?.email || 'system',
|
||||
workflow: workflow.id,
|
||||
workflowVersion: workflow._version || 1
|
||||
},
|
||||
req
|
||||
})
|
||||
let workflowRun;
|
||||
try {
|
||||
workflowRun = await this.payload.create({
|
||||
collection: 'workflow-runs',
|
||||
data: {
|
||||
context: serializeContext(),
|
||||
startedAt: new Date().toISOString(),
|
||||
status: 'running',
|
||||
triggeredBy: context.trigger.req?.user?.email || 'system',
|
||||
workflow: workflow.id,
|
||||
workflowVersion: 1 // Default version since generated type doesn't have _version field
|
||||
},
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
workflowRunId: workflowRun.id,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Workflow run record created successfully')
|
||||
} catch (error) {
|
||||
this.logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
errorStack: error instanceof Error ? error.stack : undefined,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to create workflow run record')
|
||||
throw error
|
||||
}
|
||||
|
||||
try {
|
||||
// Resolve execution order based on dependencies
|
||||
const executionBatches = this.resolveExecutionOrder(workflow.steps)
|
||||
const executionBatches = this.resolveExecutionOrder(workflow.steps as WorkflowStep[] || [])
|
||||
|
||||
this.logger.info({
|
||||
batchSizes: executionBatches.map(batch => batch.length),
|
||||
@@ -606,6 +990,13 @@ export class WorkflowExecutor {
|
||||
previousDoc: unknown,
|
||||
req: PayloadRequest
|
||||
): Promise<void> {
|
||||
console.log('🚨 EXECUTOR: executeTriggeredWorkflows called!')
|
||||
console.log('🚨 EXECUTOR: Collection =', collection)
|
||||
console.log('🚨 EXECUTOR: Operation =', operation)
|
||||
console.log('🚨 EXECUTOR: Doc ID =', (doc as any)?.id)
|
||||
console.log('🚨 EXECUTOR: Has payload?', !!this.payload)
|
||||
console.log('🚨 EXECUTOR: Has logger?', !!this.logger)
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
operation,
|
||||
@@ -628,11 +1019,14 @@ export class WorkflowExecutor {
|
||||
for (const workflow of workflows.docs) {
|
||||
// Check if this workflow has a matching trigger
|
||||
const triggers = workflow.triggers as Array<{
|
||||
collection?: string
|
||||
collectionSlug?: string
|
||||
condition?: string
|
||||
operation: string
|
||||
type: string
|
||||
parameters?: {
|
||||
collection?: string
|
||||
collectionSlug?: string
|
||||
operation?: string
|
||||
[key: string]: any
|
||||
}
|
||||
}>
|
||||
|
||||
this.logger.debug({
|
||||
@@ -641,16 +1035,16 @@ export class WorkflowExecutor {
|
||||
triggerCount: triggers?.length || 0,
|
||||
triggers: triggers?.map(t => ({
|
||||
type: t.type,
|
||||
collection: t.collection,
|
||||
collectionSlug: t.collectionSlug,
|
||||
operation: t.operation
|
||||
collection: t.parameters?.collection,
|
||||
collectionSlug: t.parameters?.collectionSlug,
|
||||
operation: t.parameters?.operation
|
||||
}))
|
||||
}, 'Checking workflow triggers')
|
||||
|
||||
const matchingTriggers = triggers?.filter(trigger =>
|
||||
trigger.type === 'collection-trigger' &&
|
||||
(trigger.collection === collection || trigger.collectionSlug === collection) &&
|
||||
trigger.operation === operation
|
||||
(trigger.parameters?.collection === collection || trigger.parameters?.collectionSlug === collection) &&
|
||||
trigger.parameters?.operation === operation
|
||||
) || []
|
||||
|
||||
this.logger.info({
|
||||
@@ -662,6 +1056,18 @@ export class WorkflowExecutor {
|
||||
}, 'Matching triggers found')
|
||||
|
||||
for (const trigger of matchingTriggers) {
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
triggerDetails: {
|
||||
type: trigger.type,
|
||||
collection: trigger.parameters?.collection,
|
||||
collectionSlug: trigger.parameters?.collectionSlug,
|
||||
operation: trigger.parameters?.operation,
|
||||
hasCondition: !!trigger.condition
|
||||
}
|
||||
}, 'Processing matching trigger - about to execute workflow')
|
||||
|
||||
// Create execution context for condition evaluation
|
||||
const context: ExecutionContext = {
|
||||
steps: {},
|
||||
@@ -720,7 +1126,7 @@ export class WorkflowExecutor {
|
||||
}, 'Triggering workflow')
|
||||
|
||||
// Execute the workflow
|
||||
await this.execute(workflow as Workflow, context, req)
|
||||
await this.execute(workflow as PayloadWorkflow, context, req)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
// These are separated to avoid CSS import errors during Node.js type generation
|
||||
|
||||
export { TriggerWorkflowButton } from '../components/TriggerWorkflowButton.js'
|
||||
export { StatusCell } from '../components/StatusCell.js'
|
||||
// export { ErrorDisplay } from '../components/ErrorDisplay.js' // Temporarily disabled
|
||||
export { WorkflowExecutionStatus } from '../components/WorkflowExecutionStatus.js'
|
||||
|
||||
// Future client components can be added here:
|
||||
// export { default as WorkflowDashboard } from '../components/WorkflowDashboard/index.js'
|
||||
|
||||
47
src/exports/helpers.ts
Normal file
47
src/exports/helpers.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
/**
|
||||
* Trigger builder helpers for creating custom triggers with less boilerplate
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { createTrigger, webhookTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
*
|
||||
* // Simple trigger
|
||||
* const myTrigger = createTrigger('my-trigger').parameters({
|
||||
* apiKey: { type: 'text', required: true },
|
||||
* timeout: { type: 'number', defaultValue: 30 }
|
||||
* })
|
||||
*
|
||||
* // Webhook trigger with presets
|
||||
* const orderWebhook = webhookTrigger('order-webhook')
|
||||
* .parameter('orderTypes', {
|
||||
* type: 'select',
|
||||
* hasMany: true,
|
||||
* options: ['regular', 'subscription']
|
||||
* })
|
||||
* .build()
|
||||
* ```
|
||||
*/
|
||||
|
||||
// Core helpers
|
||||
export {
|
||||
createTriggerParameter,
|
||||
createTriggerParameters,
|
||||
createTrigger,
|
||||
createAdvancedTrigger
|
||||
} from '../utils/trigger-helpers.js'
|
||||
|
||||
// Preset builders
|
||||
export {
|
||||
webhookTrigger,
|
||||
cronTrigger,
|
||||
eventTrigger,
|
||||
manualTrigger,
|
||||
apiTrigger
|
||||
} from '../utils/trigger-presets.js'
|
||||
|
||||
// Common parameter sets for extending
|
||||
export {
|
||||
webhookParameters,
|
||||
cronParameters,
|
||||
eventParameters
|
||||
} from '../utils/trigger-presets.js'
|
||||
11
src/index.ts
11
src/index.ts
@@ -1,14 +1,17 @@
|
||||
// Main export contains only types and client-safe utilities
|
||||
// Server-side functions are exported via '@xtr-dev/payload-automation/server'
|
||||
|
||||
export type {
|
||||
PayloadWorkflow as Workflow,
|
||||
WorkflowStep,
|
||||
WorkflowTrigger
|
||||
} from './core/workflow-executor.js'
|
||||
|
||||
// Pure types only - completely safe for client bundling
|
||||
export type {
|
||||
CustomTriggerOptions,
|
||||
TriggerResult,
|
||||
ExecutionContext,
|
||||
Workflow,
|
||||
WorkflowStep,
|
||||
WorkflowTrigger,
|
||||
TriggerResult,
|
||||
WorkflowsPluginConfig
|
||||
} from './types/index.js'
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import type {Config, Payload, TaskConfig} from 'payload'
|
||||
|
||||
import cron from 'node-cron'
|
||||
|
||||
import {type Workflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {type PayloadWorkflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {getConfigLogger} from './logger.js'
|
||||
|
||||
/**
|
||||
@@ -54,14 +54,17 @@ export function generateCronTasks(config: Config): void {
|
||||
// Find the matching cron trigger and check its condition if present
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
parameters?: {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
[key: string]: any
|
||||
}
|
||||
type: string
|
||||
}>
|
||||
|
||||
const matchingTrigger = triggers?.find(trigger =>
|
||||
trigger.type === 'cron-trigger' &&
|
||||
trigger.cronExpression === cronExpression
|
||||
trigger.parameters?.cronExpression === cronExpression
|
||||
)
|
||||
|
||||
// Check trigger condition if present
|
||||
@@ -101,7 +104,7 @@ export function generateCronTasks(config: Config): void {
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as Workflow, context, req)
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
|
||||
// Re-queue the job for the next scheduled execution if cronExpression is provided
|
||||
if (cronExpression) {
|
||||
@@ -183,8 +186,11 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
const triggers = workflow.triggers as Array<{
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
parameters?: {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
[key: string]: any
|
||||
}
|
||||
type: string
|
||||
}>
|
||||
|
||||
@@ -192,12 +198,12 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
|
||||
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
|
||||
|
||||
for (const trigger of cronTriggers) {
|
||||
if (trigger.cronExpression) {
|
||||
if (trigger.parameters?.cronExpression) {
|
||||
try {
|
||||
// Validate cron expression before queueing
|
||||
if (!validateCronExpression(trigger.cronExpression)) {
|
||||
if (!validateCronExpression(trigger.parameters.cronExpression)) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid cron expression format')
|
||||
@@ -205,13 +211,13 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
|
||||
}
|
||||
|
||||
// Validate timezone if provided
|
||||
if (trigger.timezone) {
|
||||
if (trigger.parameters?.timezone) {
|
||||
try {
|
||||
// Test if timezone is valid by trying to create a date with it
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.timezone })
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.parameters.timezone })
|
||||
} catch {
|
||||
logger.error({
|
||||
timezone: trigger.timezone,
|
||||
timezone: trigger.parameters.timezone,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid timezone specified')
|
||||
@@ -220,27 +226,27 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
|
||||
}
|
||||
|
||||
// Calculate next execution time
|
||||
const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone)
|
||||
const nextExecution = getNextCronTime(trigger.parameters.cronExpression, trigger.parameters?.timezone)
|
||||
|
||||
// Queue the job
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId: workflow.id },
|
||||
input: { cronExpression: trigger.parameters.cronExpression, timezone: trigger.parameters?.timezone, workflowId: workflow.id },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: nextExecution
|
||||
})
|
||||
|
||||
logger.info({
|
||||
cronExpression: trigger.cronExpression,
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
nextExecution: nextExecution.toISOString(),
|
||||
timezone: trigger.timezone || 'UTC',
|
||||
timezone: trigger.parameters?.timezone || 'UTC',
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Queued initial cron job for workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timezone: trigger.timezone,
|
||||
timezone: trigger.parameters?.timezone,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to queue cron job')
|
||||
@@ -508,8 +514,11 @@ export async function updateWorkflowCronJobs(
|
||||
}
|
||||
|
||||
const triggers = workflow.triggers as Array<{
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
parameters?: {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
[key: string]: any
|
||||
}
|
||||
type: string
|
||||
}>
|
||||
|
||||
@@ -524,12 +533,12 @@ export async function updateWorkflowCronJobs(
|
||||
let scheduledJobs = 0
|
||||
|
||||
for (const trigger of cronTriggers) {
|
||||
if (trigger.cronExpression) {
|
||||
if (trigger.parameters?.cronExpression) {
|
||||
try {
|
||||
// Validate cron expression before queueing
|
||||
if (!validateCronExpression(trigger.cronExpression)) {
|
||||
if (!validateCronExpression(trigger.parameters.cronExpression)) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid cron expression format')
|
||||
@@ -537,12 +546,12 @@ export async function updateWorkflowCronJobs(
|
||||
}
|
||||
|
||||
// Validate timezone if provided
|
||||
if (trigger.timezone) {
|
||||
if (trigger.parameters?.timezone) {
|
||||
try {
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.timezone })
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.parameters.timezone })
|
||||
} catch {
|
||||
logger.error({
|
||||
timezone: trigger.timezone,
|
||||
timezone: trigger.parameters.timezone,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid timezone specified')
|
||||
@@ -551,11 +560,11 @@ export async function updateWorkflowCronJobs(
|
||||
}
|
||||
|
||||
// Calculate next execution time
|
||||
const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone)
|
||||
const nextExecution = getNextCronTime(trigger.parameters.cronExpression, trigger.parameters?.timezone)
|
||||
|
||||
// Queue the job
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId },
|
||||
input: { cronExpression: trigger.parameters.cronExpression, timezone: trigger.parameters?.timezone, workflowId },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: nextExecution
|
||||
})
|
||||
@@ -563,17 +572,17 @@ export async function updateWorkflowCronJobs(
|
||||
scheduledJobs++
|
||||
|
||||
logger.info({
|
||||
cronExpression: trigger.cronExpression,
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
nextExecution: nextExecution.toISOString(),
|
||||
timezone: trigger.timezone || 'UTC',
|
||||
timezone: trigger.parameters?.timezone || 'UTC',
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Scheduled cron job for workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
cronExpression: trigger.parameters?.cronExpression,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timezone: trigger.timezone,
|
||||
timezone: trigger.parameters?.timezone,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to schedule cron job')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type {Config} from 'payload'
|
||||
|
||||
import type {WorkflowsPluginConfig} from "./config-types.js"
|
||||
import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
import {createWorkflowCollection} from '../collections/Workflow.js'
|
||||
import {WorkflowRunsCollection} from '../collections/WorkflowRuns.js'
|
||||
@@ -15,6 +15,108 @@ import {getConfigLogger, initializeLogger} from './logger.js'
|
||||
|
||||
export {getLogger} from './logger.js'
|
||||
|
||||
// Improved executor registry with proper error handling and logging
|
||||
interface ExecutorRegistry {
|
||||
executor: null | WorkflowExecutor
|
||||
isInitialized: boolean
|
||||
logger: any | null
|
||||
}
|
||||
|
||||
const executorRegistry: ExecutorRegistry = {
|
||||
executor: null,
|
||||
isInitialized: false,
|
||||
logger: null
|
||||
}
|
||||
|
||||
const setWorkflowExecutor = (executor: WorkflowExecutor, logger: any) => {
|
||||
executorRegistry.executor = executor
|
||||
executorRegistry.logger = logger
|
||||
executorRegistry.isInitialized = true
|
||||
|
||||
logger.info('Workflow executor initialized and registered successfully')
|
||||
}
|
||||
|
||||
const getExecutorRegistry = (): ExecutorRegistry => {
|
||||
return executorRegistry
|
||||
}
|
||||
|
||||
// Helper function to create failed workflow runs for tracking errors
|
||||
const createFailedWorkflowRun = async (args: any, errorMessage: string, logger: any) => {
|
||||
try {
|
||||
// Only create failed workflow runs if we have enough context
|
||||
if (!args?.req?.payload || !args?.collection?.slug) {
|
||||
return
|
||||
}
|
||||
|
||||
// Find workflows that should have been triggered
|
||||
const workflows = await args.req.payload.find({
|
||||
collection: 'workflows',
|
||||
limit: 10,
|
||||
req: args.req,
|
||||
where: {
|
||||
'triggers.collectionSlug': {
|
||||
equals: args.collection.slug
|
||||
},
|
||||
'triggers.operation': {
|
||||
equals: args.operation
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'collection-trigger'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Create failed workflow runs for each matching workflow
|
||||
for (const workflow of workflows.docs) {
|
||||
await args.req.payload.create({
|
||||
collection: 'workflow-runs',
|
||||
data: {
|
||||
completedAt: new Date().toISOString(),
|
||||
context: {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'collection',
|
||||
collection: args.collection.slug,
|
||||
doc: args.doc,
|
||||
operation: args.operation,
|
||||
previousDoc: args.previousDoc,
|
||||
triggeredAt: new Date().toISOString()
|
||||
}
|
||||
},
|
||||
error: `Hook execution failed: ${errorMessage}`,
|
||||
inputs: {},
|
||||
logs: [{
|
||||
level: 'error',
|
||||
message: `Hook execution failed: ${errorMessage}`,
|
||||
timestamp: new Date().toISOString()
|
||||
}],
|
||||
outputs: {},
|
||||
startedAt: new Date().toISOString(),
|
||||
status: 'failed',
|
||||
steps: [],
|
||||
triggeredBy: args?.req?.user?.email || 'system',
|
||||
workflow: workflow.id,
|
||||
workflowVersion: 1
|
||||
},
|
||||
req: args.req
|
||||
})
|
||||
}
|
||||
|
||||
if (workflows.docs.length > 0) {
|
||||
logger.info({
|
||||
errorMessage,
|
||||
workflowCount: workflows.docs.length
|
||||
}, 'Created failed workflow runs for hook execution error')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// Don't let workflow run creation failures break the original operation
|
||||
logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Failed to create failed workflow run record')
|
||||
}
|
||||
}
|
||||
|
||||
const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPluginConfig<T>, config: Config) => {
|
||||
// Add workflow collections
|
||||
if (!config.collections) {
|
||||
@@ -27,8 +129,8 @@ const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPlugin
|
||||
)
|
||||
}
|
||||
|
||||
// Track if hooks have been initialized to prevent double registration
|
||||
let hooksInitialized = false
|
||||
// Removed config-phase hook registration - user collections don't exist during config phase
|
||||
|
||||
|
||||
export const workflowsPlugin =
|
||||
<TSlug extends string>(pluginOptions: WorkflowsPluginConfig<TSlug>) =>
|
||||
@@ -40,6 +142,113 @@ export const workflowsPlugin =
|
||||
|
||||
applyCollectionsConfig<TSlug>(pluginOptions, config)
|
||||
|
||||
// CRITICAL: Modify existing collection configs BEFORE PayloadCMS processes them
|
||||
// This is the ONLY time we can add hooks that will actually work
|
||||
const logger = getConfigLogger()
|
||||
logger.info('Attempting to modify collection configs before PayloadCMS initialization...')
|
||||
|
||||
if (config.collections && pluginOptions.collectionTriggers) {
|
||||
for (const [triggerSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) {continue}
|
||||
|
||||
// Find the collection config that matches
|
||||
const collectionIndex = config.collections.findIndex(c => c.slug === triggerSlug)
|
||||
if (collectionIndex === -1) {
|
||||
logger.warn(`Collection '${triggerSlug}' not found in config.collections`)
|
||||
continue
|
||||
}
|
||||
|
||||
const collection = config.collections[collectionIndex]
|
||||
logger.info(`Found collection '${triggerSlug}' - modifying its hooks...`)
|
||||
|
||||
// Initialize hooks if needed
|
||||
if (!collection.hooks) {
|
||||
collection.hooks = {}
|
||||
}
|
||||
if (!collection.hooks.afterChange) {
|
||||
collection.hooks.afterChange = []
|
||||
}
|
||||
|
||||
// Create a reliable hook function with proper dependency injection
|
||||
const automationHook = Object.assign(
|
||||
async function payloadAutomationHook(args: any) {
|
||||
const registry = getExecutorRegistry()
|
||||
|
||||
// Use proper logger if available, fallback to args.req.payload.logger
|
||||
const logger = registry.logger || args?.req?.payload?.logger || console
|
||||
|
||||
try {
|
||||
logger.info({
|
||||
collection: args?.collection?.slug,
|
||||
docId: args?.doc?.id,
|
||||
hookType: 'automation',
|
||||
operation: args?.operation
|
||||
}, 'Collection automation hook triggered')
|
||||
|
||||
if (!registry.isInitialized) {
|
||||
logger.warn('Workflow executor not yet initialized, skipping execution')
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!registry.executor) {
|
||||
logger.error('Workflow executor is null despite being marked as initialized')
|
||||
// Create a failed workflow run to track this issue
|
||||
await createFailedWorkflowRun(args, 'Executor not available', logger)
|
||||
return undefined
|
||||
}
|
||||
|
||||
logger.debug('Executing triggered workflows...')
|
||||
await registry.executor.executeTriggeredWorkflows(
|
||||
args.collection.slug,
|
||||
args.operation,
|
||||
args.doc,
|
||||
args.previousDoc,
|
||||
args.req
|
||||
)
|
||||
|
||||
logger.info({
|
||||
collection: args?.collection?.slug,
|
||||
docId: args?.doc?.id,
|
||||
operation: args?.operation
|
||||
}, 'Workflow execution completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
logger.error({
|
||||
collection: args?.collection?.slug,
|
||||
docId: args?.doc?.id,
|
||||
error: errorMessage,
|
||||
errorStack: error instanceof Error ? error.stack : undefined,
|
||||
operation: args?.operation
|
||||
}, 'Hook execution failed')
|
||||
|
||||
// Create a failed workflow run to track this error
|
||||
try {
|
||||
await createFailedWorkflowRun(args, errorMessage, logger)
|
||||
} catch (createError) {
|
||||
logger.error({
|
||||
error: createError instanceof Error ? createError.message : 'Unknown error'
|
||||
}, 'Failed to create workflow run for hook error')
|
||||
}
|
||||
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
|
||||
return undefined
|
||||
},
|
||||
{
|
||||
__isAutomationHook: true,
|
||||
__version: '0.0.22'
|
||||
}
|
||||
)
|
||||
|
||||
// Add the hook to the collection config
|
||||
collection.hooks.afterChange.push(automationHook)
|
||||
logger.info(`Added automation hook to '${triggerSlug}' - hook count: ${collection.hooks.afterChange.length}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (!config.jobs) {
|
||||
config.jobs = {tasks: []}
|
||||
}
|
||||
@@ -65,14 +274,8 @@ export const workflowsPlugin =
|
||||
// Set up onInit to register collection hooks and initialize features
|
||||
const incomingOnInit = config.onInit
|
||||
config.onInit = async (payload) => {
|
||||
configLogger.info(`onInit called - hooks already initialized: ${hooksInitialized}, collections: ${Object.keys(payload.collections).length}`)
|
||||
|
||||
// Prevent double initialization in dev mode
|
||||
if (hooksInitialized) {
|
||||
configLogger.warn('Hooks already initialized, skipping to prevent duplicate registration')
|
||||
return
|
||||
}
|
||||
|
||||
configLogger.info(`onInit called - collections: ${Object.keys(payload.collections).length}`)
|
||||
|
||||
// Execute any existing onInit functions first
|
||||
if (incomingOnInit) {
|
||||
configLogger.debug('Executing existing onInit function')
|
||||
@@ -87,18 +290,23 @@ export const workflowsPlugin =
|
||||
logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${pluginOptions.steps?.length || 0} steps`)
|
||||
|
||||
// Create workflow executor instance
|
||||
console.log('🚨 CREATING WORKFLOW EXECUTOR INSTANCE')
|
||||
const executor = new WorkflowExecutor(payload, logger)
|
||||
console.log('🚨 EXECUTOR CREATED:', typeof executor)
|
||||
console.log('🚨 EXECUTOR METHODS:', Object.getOwnPropertyNames(Object.getPrototypeOf(executor)))
|
||||
|
||||
// Register executor with proper dependency injection
|
||||
setWorkflowExecutor(executor, logger)
|
||||
|
||||
// Hooks are now registered during config phase - just log status
|
||||
logger.info('Hooks were registered during config phase - executor now available')
|
||||
|
||||
// Initialize hooks
|
||||
logger.info('Initializing collection hooks...')
|
||||
initCollectionHooks(pluginOptions, payload, logger, executor)
|
||||
|
||||
logger.info('Initializing global hooks...')
|
||||
initGlobalHooks(payload, logger, executor)
|
||||
|
||||
|
||||
logger.info('Initializing workflow hooks...')
|
||||
initWorkflowHooks(payload, logger)
|
||||
|
||||
|
||||
logger.info('Initializing step tasks...')
|
||||
initStepTasks(pluginOptions, payload, logger)
|
||||
|
||||
@@ -107,7 +315,6 @@ export const workflowsPlugin =
|
||||
await registerCronJobs(payload, logger)
|
||||
|
||||
logger.info('Plugin initialized successfully - all hooks registered')
|
||||
hooksInitialized = true
|
||||
}
|
||||
|
||||
return config
|
||||
|
||||
@@ -39,19 +39,55 @@ export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPl
|
||||
collection.config.hooks.afterChange = collection.config.hooks.afterChange || []
|
||||
collection.config.hooks.afterChange.push(async (change) => {
|
||||
const operation = change.operation as 'create' | 'update'
|
||||
logger.debug({
|
||||
|
||||
// AGGRESSIVE LOGGING - this should ALWAYS appear
|
||||
console.log('🚨 AUTOMATION PLUGIN HOOK CALLED! 🚨')
|
||||
console.log('Collection:', change.collection.slug)
|
||||
console.log('Operation:', operation)
|
||||
console.log('Doc ID:', change.doc?.id)
|
||||
console.log('Has executor?', !!executor)
|
||||
console.log('Executor type:', typeof executor)
|
||||
|
||||
logger.info({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
}, 'Collection hook triggered')
|
||||
docId: change.doc?.id,
|
||||
previousDocId: change.previousDoc?.id,
|
||||
hasExecutor: !!executor,
|
||||
executorType: typeof executor
|
||||
}, 'AUTOMATION PLUGIN: Collection hook triggered')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
operation,
|
||||
change.doc,
|
||||
change.previousDoc,
|
||||
change.req
|
||||
)
|
||||
try {
|
||||
console.log('🚨 About to call executeTriggeredWorkflows')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
operation,
|
||||
change.doc,
|
||||
change.previousDoc,
|
||||
change.req
|
||||
)
|
||||
|
||||
console.log('🚨 executeTriggeredWorkflows completed without error')
|
||||
|
||||
logger.info({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id
|
||||
}, 'AUTOMATION PLUGIN: executeTriggeredWorkflows completed successfully')
|
||||
} catch (error) {
|
||||
console.log('🚨 AUTOMATION PLUGIN ERROR:', error)
|
||||
|
||||
logger.error({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined
|
||||
}, 'AUTOMATION PLUGIN: executeTriggeredWorkflows failed')
|
||||
// Don't re-throw to avoid breaking other hooks
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Payload, PayloadRequest } from "payload"
|
||||
import type { Logger } from "pino"
|
||||
|
||||
import type { WorkflowExecutor, Workflow } from "../core/workflow-executor.js"
|
||||
import type { WorkflowExecutor, PayloadWorkflow } from "../core/workflow-executor.js"
|
||||
|
||||
export function initGlobalHooks(payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) {
|
||||
// Get all globals from the config
|
||||
@@ -100,7 +100,7 @@ async function executeTriggeredGlobalWorkflows(
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as Workflow, context, req)
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
|
||||
@@ -4,6 +4,17 @@ import type {Logger} from "pino"
|
||||
import type {WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
export function initStepTasks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger']) {
|
||||
logger.info({ stepCount: pluginOptions.steps.length, steps: pluginOptions.steps.map(s => s.slug) }, 'Initializing step tasks')
|
||||
logger.info({ stepCount: pluginOptions.steps.length, steps: pluginOptions.steps.map(s => s.slug) }, 'Step tasks were registered during config phase')
|
||||
|
||||
// Verify that the tasks are available in the job system
|
||||
const availableTasks = payload.config.jobs?.tasks?.map(t => t.slug) || []
|
||||
const pluginTasks = pluginOptions.steps.map(s => s.slug)
|
||||
|
||||
pluginTasks.forEach(taskSlug => {
|
||||
if (availableTasks.includes(taskSlug)) {
|
||||
logger.info({ taskSlug }, 'Step task confirmed available in job system')
|
||||
} else {
|
||||
logger.error({ taskSlug }, 'Step task not found in job system - this will cause execution failures')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type {Config, PayloadRequest} from 'payload'
|
||||
|
||||
import {type Workflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {type PayloadWorkflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {getConfigLogger, initializeLogger} from './logger.js'
|
||||
|
||||
export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'): void {
|
||||
@@ -67,12 +67,15 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'):
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
type: string
|
||||
webhookPath?: string
|
||||
parameters?: {
|
||||
webhookPath?: string
|
||||
[key: string]: any
|
||||
}
|
||||
}>
|
||||
|
||||
const matchingTrigger = triggers?.find(trigger =>
|
||||
trigger.type === 'webhook-trigger' &&
|
||||
trigger.webhookPath === path
|
||||
trigger.parameters?.webhookPath === path
|
||||
)
|
||||
|
||||
// Check trigger condition if present
|
||||
@@ -110,7 +113,7 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'):
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as Workflow, context, req)
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
|
||||
return { status: 'triggered', workflowId: workflow.id }
|
||||
} catch (error) {
|
||||
|
||||
@@ -18,7 +18,7 @@ export const CreateDocumentStepTask = {
|
||||
name: 'data',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'The document data to create'
|
||||
description: 'The document data to create. Use JSONPath to reference trigger data (e.g., {"title": "$.trigger.doc.title", "author": "$.trigger.doc.author"})'
|
||||
},
|
||||
required: true
|
||||
},
|
||||
|
||||
@@ -18,14 +18,14 @@ export const DeleteDocumentStepTask = {
|
||||
name: 'id',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'The ID of a specific document to delete (leave empty to delete multiple)'
|
||||
description: 'The ID of a specific document to delete. Use JSONPath (e.g., "$.trigger.doc.id"). Leave empty to delete multiple.'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'where',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Query conditions to find documents to delete (used when ID is not provided)'
|
||||
description: 'Query conditions to find documents to delete when ID is not provided. Use JSONPath in values (e.g., {"author": "$.trigger.doc.author"})'
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
@@ -1,14 +1,280 @@
|
||||
import type {TaskHandler} from "payload"
|
||||
|
||||
export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input}) => {
|
||||
if (!input) {
|
||||
throw new Error('No input provided')
|
||||
interface HttpRequestInput {
|
||||
url: string
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: any
|
||||
timeout?: number
|
||||
authentication?: {
|
||||
type?: 'none' | 'bearer' | 'basic' | 'apikey'
|
||||
token?: string
|
||||
username?: string
|
||||
password?: string
|
||||
headerName?: string
|
||||
headerValue?: string
|
||||
}
|
||||
const response = await fetch(input.url)
|
||||
return {
|
||||
output: {
|
||||
response: await response.text()
|
||||
},
|
||||
state: response.ok ? 'succeeded' : undefined
|
||||
retries?: number
|
||||
retryDelay?: number
|
||||
}
|
||||
|
||||
export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input, req}) => {
|
||||
const startTime = Date.now() // Move startTime to outer scope
|
||||
|
||||
try {
|
||||
if (!input || !input.url) {
|
||||
return {
|
||||
output: {
|
||||
status: 0,
|
||||
statusText: 'Invalid Input',
|
||||
headers: {},
|
||||
body: '',
|
||||
data: null,
|
||||
duration: 0,
|
||||
error: 'URL is required for HTTP request'
|
||||
},
|
||||
state: 'failed'
|
||||
}
|
||||
}
|
||||
|
||||
const typedInput = input as HttpRequestInput
|
||||
|
||||
// Validate URL
|
||||
try {
|
||||
new URL(typedInput.url)
|
||||
} catch (error) {
|
||||
return {
|
||||
output: {
|
||||
status: 0,
|
||||
statusText: 'Invalid URL',
|
||||
headers: {},
|
||||
body: '',
|
||||
data: null,
|
||||
duration: 0,
|
||||
error: `Invalid URL: ${typedInput.url}`
|
||||
},
|
||||
state: 'failed'
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare request options
|
||||
const method = (typedInput.method || 'GET').toUpperCase()
|
||||
const timeout = typedInput.timeout || 30000
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'PayloadCMS-Automation/1.0',
|
||||
...typedInput.headers
|
||||
}
|
||||
|
||||
// Handle authentication
|
||||
if (typedInput.authentication) {
|
||||
switch (typedInput.authentication.type) {
|
||||
case 'bearer':
|
||||
if (typedInput.authentication.token) {
|
||||
headers['Authorization'] = `Bearer ${typedInput.authentication.token}`
|
||||
}
|
||||
break
|
||||
case 'basic':
|
||||
if (typedInput.authentication.username && typedInput.authentication.password) {
|
||||
const credentials = btoa(`${typedInput.authentication.username}:${typedInput.authentication.password}`)
|
||||
headers['Authorization'] = `Basic ${credentials}`
|
||||
}
|
||||
break
|
||||
case 'apikey':
|
||||
if (typedInput.authentication.headerName && typedInput.authentication.headerValue) {
|
||||
headers[typedInput.authentication.headerName] = typedInput.authentication.headerValue
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare request body
|
||||
let requestBody: string | undefined
|
||||
if (['POST', 'PUT', 'PATCH'].includes(method) && typedInput.body) {
|
||||
if (typeof typedInput.body === 'string') {
|
||||
requestBody = typedInput.body
|
||||
} else {
|
||||
requestBody = JSON.stringify(typedInput.body)
|
||||
if (!headers['Content-Type']) {
|
||||
headers['Content-Type'] = 'application/json'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create abort controller for timeout
|
||||
const abortController = new AbortController()
|
||||
const timeoutId = setTimeout(() => abortController.abort(), timeout)
|
||||
|
||||
// Retry logic
|
||||
const maxRetries = Math.min(Math.max(typedInput.retries || 0, 0), 5)
|
||||
const retryDelay = Math.max(typedInput.retryDelay || 1000, 100)
|
||||
|
||||
let lastError: Error | null = null
|
||||
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
// Add delay for retry attempts
|
||||
if (attempt > 0) {
|
||||
req?.payload?.logger?.info({
|
||||
attempt: attempt + 1,
|
||||
maxRetries: maxRetries + 1,
|
||||
url: typedInput.url,
|
||||
delay: retryDelay
|
||||
}, 'HTTP request retry attempt')
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, retryDelay))
|
||||
}
|
||||
|
||||
const response = await fetch(typedInput.url, {
|
||||
method,
|
||||
headers,
|
||||
body: requestBody,
|
||||
signal: abortController.signal
|
||||
})
|
||||
|
||||
clearTimeout(timeoutId)
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
// Parse response
|
||||
const responseText = await response.text()
|
||||
let parsedData: any = null
|
||||
|
||||
try {
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
if (contentType.includes('application/json') || contentType.includes('text/json')) {
|
||||
parsedData = JSON.parse(responseText)
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Not JSON, that's fine
|
||||
}
|
||||
|
||||
// Convert headers to plain object
|
||||
const responseHeaders: Record<string, string> = {}
|
||||
response.headers.forEach((value, key) => {
|
||||
responseHeaders[key] = value
|
||||
})
|
||||
|
||||
const output = {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: responseHeaders,
|
||||
body: responseText,
|
||||
data: parsedData,
|
||||
duration
|
||||
}
|
||||
|
||||
req?.payload?.logger?.info({
|
||||
url: typedInput.url,
|
||||
method,
|
||||
status: response.status,
|
||||
duration,
|
||||
attempt: attempt + 1
|
||||
}, 'HTTP request completed')
|
||||
|
||||
return {
|
||||
output,
|
||||
// Always return 'succeeded' for completed HTTP requests, even with error status codes (4xx/5xx).
|
||||
// This preserves error information in the output for workflow conditional logic.
|
||||
// Only network errors, timeouts, and connection failures should result in 'failed' state.
|
||||
// This design allows workflows to handle HTTP errors gracefully rather than failing completely.
|
||||
state: 'succeeded'
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error('Unknown error')
|
||||
|
||||
// Handle specific error types
|
||||
if (error instanceof Error) {
|
||||
if (error.name === 'AbortError') {
|
||||
lastError = new Error(`Request timeout after ${timeout}ms`)
|
||||
} else if (error.message.includes('fetch')) {
|
||||
lastError = new Error(`Network error: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
req?.payload?.logger?.warn({
|
||||
url: typedInput.url,
|
||||
method,
|
||||
attempt: attempt + 1,
|
||||
maxRetries: maxRetries + 1,
|
||||
error: lastError.message
|
||||
}, 'HTTP request attempt failed')
|
||||
|
||||
// Don't retry on certain errors
|
||||
if (lastError.message.includes('Invalid URL') ||
|
||||
lastError.message.includes('TypeError') ||
|
||||
attempt >= maxRetries) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearTimeout(timeoutId)
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
// All retries exhausted
|
||||
const finalError = lastError || new Error('HTTP request failed')
|
||||
|
||||
req?.payload?.logger?.error({
|
||||
url: typedInput.url,
|
||||
method,
|
||||
totalAttempts: maxRetries + 1,
|
||||
duration,
|
||||
error: finalError.message
|
||||
}, 'HTTP request failed after all retries')
|
||||
|
||||
// Include detailed error information in the output
|
||||
// Even though PayloadCMS will discard this for failed tasks,
|
||||
// we include it here for potential future PayloadCMS improvements
|
||||
const errorDetails = {
|
||||
errorType: finalError.message.includes('timeout') ? 'timeout' :
|
||||
finalError.message.includes('ENOTFOUND') ? 'dns' :
|
||||
finalError.message.includes('ECONNREFUSED') ? 'connection' : 'network',
|
||||
duration,
|
||||
attempts: maxRetries + 1,
|
||||
finalError: finalError.message,
|
||||
context: {
|
||||
url: typedInput.url,
|
||||
method,
|
||||
timeout: typedInput.timeout,
|
||||
headers: typedInput.headers
|
||||
}
|
||||
}
|
||||
|
||||
// Return comprehensive output (PayloadCMS will discard it for failed state, but we try anyway)
|
||||
return {
|
||||
output: {
|
||||
status: 0,
|
||||
statusText: 'Request Failed',
|
||||
headers: {},
|
||||
body: '',
|
||||
data: null,
|
||||
duration,
|
||||
error: finalError.message,
|
||||
errorDetails // Include detailed error info (will be discarded by PayloadCMS)
|
||||
},
|
||||
state: 'failed'
|
||||
}
|
||||
} catch (unexpectedError) {
|
||||
// Handle any unexpected errors that weren't caught above
|
||||
const error = unexpectedError instanceof Error ? unexpectedError : new Error('Unexpected error')
|
||||
|
||||
req?.payload?.logger?.error({
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
input: (input as any)?.url || 'unknown'
|
||||
}, 'Unexpected error in HTTP request handler')
|
||||
|
||||
return {
|
||||
output: {
|
||||
status: 0,
|
||||
statusText: 'Handler Error',
|
||||
headers: {},
|
||||
body: '',
|
||||
data: null,
|
||||
duration: Date.now() - startTime,
|
||||
error: `HTTP request handler error: ${error.message}`
|
||||
},
|
||||
state: 'failed'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,12 +9,171 @@ export const HttpRequestStepTask = {
|
||||
{
|
||||
name: 'url',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'The URL to make the HTTP request to'
|
||||
},
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'method',
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'GET', value: 'GET' },
|
||||
{ label: 'POST', value: 'POST' },
|
||||
{ label: 'PUT', value: 'PUT' },
|
||||
{ label: 'DELETE', value: 'DELETE' },
|
||||
{ label: 'PATCH', value: 'PATCH' }
|
||||
],
|
||||
defaultValue: 'GET',
|
||||
admin: {
|
||||
description: 'HTTP method to use'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'headers',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'HTTP headers as JSON object (e.g., {"Content-Type": "application/json"})'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'body',
|
||||
type: 'json',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.method !== 'GET' && siblingData?.method !== 'DELETE',
|
||||
description: 'Request body data. Use JSONPath to reference values (e.g., {"postId": "$.trigger.doc.id", "title": "$.trigger.doc.title"})'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'timeout',
|
||||
type: 'number',
|
||||
defaultValue: 30000,
|
||||
admin: {
|
||||
description: 'Request timeout in milliseconds (default: 30000)'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'authentication',
|
||||
type: 'group',
|
||||
fields: [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'None', value: 'none' },
|
||||
{ label: 'Bearer Token', value: 'bearer' },
|
||||
{ label: 'Basic Auth', value: 'basic' },
|
||||
{ label: 'API Key Header', value: 'apikey' }
|
||||
],
|
||||
defaultValue: 'none',
|
||||
admin: {
|
||||
description: 'Authentication method'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'token',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'bearer',
|
||||
description: 'Bearer token value'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'username',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'basic',
|
||||
description: 'Basic auth username'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'password',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'basic',
|
||||
description: 'Basic auth password'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'headerName',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'apikey',
|
||||
description: 'API key header name (e.g., "X-API-Key")'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'headerValue',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'apikey',
|
||||
description: 'API key value'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'retries',
|
||||
type: 'number',
|
||||
defaultValue: 0,
|
||||
min: 0,
|
||||
max: 5,
|
||||
admin: {
|
||||
description: 'Number of retry attempts on failure (max: 5)'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'retryDelay',
|
||||
type: 'number',
|
||||
defaultValue: 1000,
|
||||
admin: {
|
||||
condition: (_, siblingData) => (siblingData?.retries || 0) > 0,
|
||||
description: 'Delay between retries in milliseconds'
|
||||
}
|
||||
}
|
||||
],
|
||||
outputSchema: [
|
||||
{
|
||||
name: 'response',
|
||||
name: 'status',
|
||||
type: 'number',
|
||||
admin: {
|
||||
description: 'HTTP status code'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'statusText',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'HTTP status text'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'headers',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Response headers'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'body',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Response body'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'data',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Parsed response data (if JSON)'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'duration',
|
||||
type: 'number',
|
||||
admin: {
|
||||
description: 'Request duration in milliseconds'
|
||||
}
|
||||
}
|
||||
]
|
||||
} satisfies TaskConfig<'http-request-step'>
|
||||
|
||||
@@ -18,14 +18,14 @@ export const ReadDocumentStepTask = {
|
||||
name: 'id',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'The ID of a specific document to read (leave empty to find multiple)'
|
||||
description: 'The ID of a specific document to read. Use JSONPath (e.g., "$.trigger.doc.relatedId"). Leave empty to find multiple.'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'where',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Query conditions to find documents (used when ID is not provided)'
|
||||
description: 'Query conditions to find documents when ID is not provided. Use JSONPath in values (e.g., {"category": "$.trigger.doc.category", "status": "published"})'
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -10,7 +10,7 @@ export const SendEmailStepTask = {
|
||||
name: 'to',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Recipient email address'
|
||||
description: 'Recipient email address. Use JSONPath for dynamic values (e.g., "$.trigger.doc.email" or "$.trigger.user.email")'
|
||||
},
|
||||
required: true
|
||||
},
|
||||
@@ -18,14 +18,14 @@ export const SendEmailStepTask = {
|
||||
name: 'from',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Sender email address (optional, uses default if not provided)'
|
||||
description: 'Sender email address. Use JSONPath if needed (e.g., "$.trigger.doc.senderEmail"). Uses default if not provided.'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'subject',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Email subject line'
|
||||
description: 'Email subject line. Can include JSONPath references (e.g., "Order #$.trigger.doc.orderNumber received")'
|
||||
},
|
||||
required: true
|
||||
},
|
||||
@@ -33,14 +33,14 @@ export const SendEmailStepTask = {
|
||||
name: 'text',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Plain text email content'
|
||||
description: 'Plain text email content. Use JSONPath to include dynamic content (e.g., "Dear $.trigger.doc.customerName, your order #$.trigger.doc.id has been received.")'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'html',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'HTML email content (optional)'
|
||||
description: 'HTML email content. Use JSONPath for dynamic values (e.g., "<h1>Order #$.trigger.doc.orderNumber</h1>")'
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -18,7 +18,7 @@ export const UpdateDocumentStepTask = {
|
||||
name: 'id',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'The ID of the document to update'
|
||||
description: 'The ID of the document to update. Use JSONPath to reference IDs (e.g., "$.trigger.doc.id" or "$.steps.previousStep.output.id")'
|
||||
},
|
||||
required: true
|
||||
},
|
||||
@@ -26,7 +26,7 @@ export const UpdateDocumentStepTask = {
|
||||
name: 'data',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'The data to update the document with'
|
||||
description: 'The data to update the document with. Use JSONPath to reference values (e.g., {"status": "$.trigger.doc.status", "updatedBy": "$.trigger.user.id"})'
|
||||
},
|
||||
required: true
|
||||
},
|
||||
|
||||
356
src/test/create-document-step.test.ts
Normal file
356
src/test/create-document-step.test.ts
Normal file
@@ -0,0 +1,356 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest'
|
||||
import { createDocumentHandler } from '../steps/create-document-handler.js'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
describe('CreateDocumentStepHandler', () => {
|
||||
let mockPayload: Payload
|
||||
let mockReq: any
|
||||
|
||||
beforeEach(() => {
|
||||
mockPayload = {
|
||||
create: vi.fn()
|
||||
} as any
|
||||
|
||||
mockReq = {
|
||||
payload: mockPayload,
|
||||
user: { id: 'user-123', email: 'test@example.com' }
|
||||
}
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Document creation', () => {
|
||||
it('should create document successfully', async () => {
|
||||
const createdDoc = {
|
||||
id: 'doc-123',
|
||||
title: 'Test Document',
|
||||
content: 'Test content'
|
||||
}
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: {
|
||||
title: 'Test Document',
|
||||
content: 'Test content'
|
||||
},
|
||||
stepName: 'test-create-step'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(result.output.id).toBe('doc-123')
|
||||
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
title: 'Test Document',
|
||||
content: 'Test content'
|
||||
},
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
|
||||
it('should create document with relationship fields', async () => {
|
||||
const createdDoc = {
|
||||
id: 'doc-456',
|
||||
title: 'Related Document',
|
||||
author: 'user-123',
|
||||
category: 'cat-789'
|
||||
}
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'articles',
|
||||
data: {
|
||||
title: 'Related Document',
|
||||
author: 'user-123',
|
||||
category: 'cat-789'
|
||||
},
|
||||
stepName: 'test-create-with-relations'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'articles',
|
||||
data: {
|
||||
title: 'Related Document',
|
||||
author: 'user-123',
|
||||
category: 'cat-789'
|
||||
},
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
|
||||
it('should create document with complex nested data', async () => {
|
||||
const complexData = {
|
||||
title: 'Complex Document',
|
||||
metadata: {
|
||||
tags: ['tag1', 'tag2'],
|
||||
settings: {
|
||||
featured: true,
|
||||
priority: 5
|
||||
}
|
||||
},
|
||||
blocks: [
|
||||
{ type: 'text', content: 'Text block' },
|
||||
{ type: 'image', src: 'image.jpg', alt: 'Test image' }
|
||||
]
|
||||
}
|
||||
|
||||
const createdDoc = { id: 'doc-complex', ...complexData }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'pages',
|
||||
data: complexData,
|
||||
stepName: 'test-create-complex'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'pages',
|
||||
data: complexData,
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling', () => {
|
||||
it('should handle PayloadCMS validation errors', async () => {
|
||||
const validationError = new Error('Validation failed')
|
||||
;(validationError as any).data = [
|
||||
{
|
||||
message: 'Title is required',
|
||||
path: 'title',
|
||||
value: undefined
|
||||
}
|
||||
]
|
||||
;(mockPayload.create as any).mockRejectedValue(validationError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: {
|
||||
content: 'Missing title'
|
||||
},
|
||||
stepName: 'test-validation-error'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Validation failed')
|
||||
})
|
||||
|
||||
it('should handle permission errors', async () => {
|
||||
const permissionError = new Error('Insufficient permissions')
|
||||
;(permissionError as any).status = 403
|
||||
;(mockPayload.create as any).mockRejectedValue(permissionError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'admin-only',
|
||||
data: {
|
||||
secret: 'confidential data'
|
||||
},
|
||||
stepName: 'test-permission-error'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Insufficient permissions')
|
||||
})
|
||||
|
||||
it('should handle database connection errors', async () => {
|
||||
const dbError = new Error('Database connection failed')
|
||||
;(mockPayload.create as any).mockRejectedValue(dbError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Test' },
|
||||
stepName: 'test-db-error'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Database connection failed')
|
||||
})
|
||||
|
||||
it('should handle unknown collection errors', async () => {
|
||||
const collectionError = new Error('Collection "unknown" not found')
|
||||
;(mockPayload.create as any).mockRejectedValue(collectionError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'unknown-collection',
|
||||
data: { title: 'Test' },
|
||||
stepName: 'test-unknown-collection'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Collection "unknown" not found')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Input validation', () => {
|
||||
it('should validate required collection slug', async () => {
|
||||
const input = {
|
||||
data: { title: 'Test' },
|
||||
stepName: 'test-missing-collection'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Collection slug is required')
|
||||
})
|
||||
|
||||
it('should validate required data field', async () => {
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
stepName: 'test-missing-data'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Data is required')
|
||||
})
|
||||
|
||||
it('should validate data is an object', async () => {
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: 'invalid-data-type',
|
||||
stepName: 'test-invalid-data-type'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Data must be an object')
|
||||
})
|
||||
|
||||
it('should handle empty data object', async () => {
|
||||
const createdDoc = { id: 'empty-doc' }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: {},
|
||||
stepName: 'test-empty-data'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'posts',
|
||||
data: {},
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Request context', () => {
|
||||
it('should pass user context from request', async () => {
|
||||
const createdDoc = { id: 'user-doc', title: 'User Document' }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'User Document' },
|
||||
stepName: 'test-user-context'
|
||||
}
|
||||
|
||||
await createDocumentStepHandler({ input, req: mockReq })
|
||||
|
||||
const createCall = (mockPayload.create as any).mock.calls[0][0]
|
||||
expect(createCall.req).toBe(mockReq)
|
||||
expect(createCall.req.user).toEqual({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com'
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle requests without user context', async () => {
|
||||
const reqWithoutUser = {
|
||||
payload: mockPayload,
|
||||
user: null
|
||||
}
|
||||
|
||||
const createdDoc = { id: 'anonymous-doc' }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Anonymous Document' },
|
||||
stepName: 'test-anonymous'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: reqWithoutUser })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'posts',
|
||||
data: { title: 'Anonymous Document' },
|
||||
req: reqWithoutUser
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Output structure', () => {
|
||||
it('should return correct output structure on success', async () => {
|
||||
const createdDoc = {
|
||||
id: 'output-test-doc',
|
||||
title: 'Output Test',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z'
|
||||
}
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Output Test' },
|
||||
stepName: 'test-output-structure'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result).toEqual({
|
||||
state: 'succeeded',
|
||||
output: {
|
||||
document: createdDoc,
|
||||
id: 'output-test-doc'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should return correct error structure on failure', async () => {
|
||||
const error = new Error('Test error')
|
||||
;(mockPayload.create as any).mockRejectedValue(error)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Error Test' },
|
||||
stepName: 'test-error-structure'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result).toEqual({
|
||||
state: 'failed',
|
||||
error: 'Test error'
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
348
src/test/http-request-step.test.ts
Normal file
348
src/test/http-request-step.test.ts
Normal file
@@ -0,0 +1,348 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest'
|
||||
import { httpRequestStepHandler } from '../steps/http-request-handler.js'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
// Mock fetch globally
|
||||
global.fetch = vi.fn()
|
||||
|
||||
describe('HttpRequestStepHandler', () => {
|
||||
let mockPayload: Payload
|
||||
let mockReq: any
|
||||
|
||||
beforeEach(() => {
|
||||
mockPayload = {} as Payload
|
||||
mockReq = {
|
||||
payload: mockPayload,
|
||||
user: null
|
||||
}
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('GET requests', () => {
|
||||
it('should handle successful GET request', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'application/json' }),
|
||||
text: vi.fn().mockResolvedValue('{"success": true}')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-get-step'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.status).toBe(200)
|
||||
expect(result.output.statusText).toBe('OK')
|
||||
expect(result.output.body).toBe('{"success": true}')
|
||||
expect(result.output.headers).toEqual({ 'content-type': 'application/json' })
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/data', {
|
||||
method: 'GET',
|
||||
headers: {},
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle GET request with custom headers', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('success')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET' as const,
|
||||
headers: {
|
||||
'Authorization': 'Bearer token123',
|
||||
'User-Agent': 'PayloadCMS-Workflow/1.0'
|
||||
},
|
||||
stepName: 'test-get-with-headers'
|
||||
}
|
||||
|
||||
await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/data', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': 'Bearer token123',
|
||||
'User-Agent': 'PayloadCMS-Workflow/1.0'
|
||||
},
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('POST requests', () => {
|
||||
it('should handle POST request with JSON body', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 201,
|
||||
statusText: 'Created',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('{"id": "123"}')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/posts',
|
||||
method: 'POST' as const,
|
||||
body: { title: 'Test Post', content: 'Test content' },
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
stepName: 'test-post-step'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.status).toBe(201)
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/posts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ title: 'Test Post', content: 'Test content' }),
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle POST request with string body', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('OK')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/webhook',
|
||||
method: 'POST' as const,
|
||||
body: 'plain text data',
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
stepName: 'test-post-string'
|
||||
}
|
||||
|
||||
await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/webhook', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
body: 'plain text data',
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling', () => {
|
||||
it('should handle network errors', async () => {
|
||||
;(global.fetch as any).mockRejectedValue(new Error('Network error'))
|
||||
|
||||
const input = {
|
||||
url: 'https://invalid-url.example.com',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-network-error'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Network error')
|
||||
})
|
||||
|
||||
it('should handle HTTP error status codes', async () => {
|
||||
const mockResponse = {
|
||||
ok: false,
|
||||
status: 404,
|
||||
statusText: 'Not Found',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('Page not found')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/nonexistent',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-404-error'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('HTTP 404')
|
||||
expect(result.output.status).toBe(404)
|
||||
expect(result.output.statusText).toBe('Not Found')
|
||||
})
|
||||
|
||||
it('should handle timeout errors', async () => {
|
||||
const abortError = new Error('The operation was aborted')
|
||||
abortError.name = 'AbortError'
|
||||
;(global.fetch as any).mockRejectedValue(abortError)
|
||||
|
||||
const input = {
|
||||
url: 'https://slow-api.example.com',
|
||||
method: 'GET' as const,
|
||||
timeout: 1000,
|
||||
stepName: 'test-timeout'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('timeout')
|
||||
})
|
||||
|
||||
it('should handle invalid JSON response parsing', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'application/json' }),
|
||||
text: vi.fn().mockResolvedValue('invalid json {')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/invalid-json',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-invalid-json'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
// Should still succeed but with raw text body
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.body).toBe('invalid json {')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Request validation', () => {
|
||||
it('should validate required URL field', async () => {
|
||||
const input = {
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-missing-url'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('URL is required')
|
||||
})
|
||||
|
||||
it('should validate HTTP method', async () => {
|
||||
const input = {
|
||||
url: 'https://api.example.com',
|
||||
method: 'INVALID' as any,
|
||||
stepName: 'test-invalid-method'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Invalid HTTP method')
|
||||
})
|
||||
|
||||
it('should validate URL format', async () => {
|
||||
const input = {
|
||||
url: 'not-a-valid-url',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-invalid-url'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Invalid URL')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Response processing', () => {
|
||||
it('should parse JSON responses automatically', async () => {
|
||||
const responseData = { id: 123, name: 'Test Item' }
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'application/json' }),
|
||||
text: vi.fn().mockResolvedValue(JSON.stringify(responseData))
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/item/123',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-json-parsing'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(typeof result.output.body).toBe('string')
|
||||
// Should contain the JSON as string for safe storage
|
||||
expect(result.output.body).toBe(JSON.stringify(responseData))
|
||||
})
|
||||
|
||||
it('should handle non-JSON responses', async () => {
|
||||
const htmlContent = '<html><body>Hello World</body></html>'
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'text/html' }),
|
||||
text: vi.fn().mockResolvedValue(htmlContent)
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://example.com/page',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-html-response'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.body).toBe(htmlContent)
|
||||
})
|
||||
|
||||
it('should capture response headers', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({
|
||||
'content-type': 'application/json',
|
||||
'x-rate-limit': '100',
|
||||
'x-custom-header': 'custom-value'
|
||||
}),
|
||||
text: vi.fn().mockResolvedValue('{}')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-response-headers'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.headers).toEqual({
|
||||
'content-type': 'application/json',
|
||||
'x-rate-limit': '100',
|
||||
'x-custom-header': 'custom-value'
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
472
src/test/workflow-executor.test.ts
Normal file
472
src/test/workflow-executor.test.ts
Normal file
@@ -0,0 +1,472 @@
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest'
|
||||
import { WorkflowExecutor } from '../core/workflow-executor.js'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
describe('WorkflowExecutor', () => {
|
||||
let mockPayload: Payload
|
||||
let mockLogger: any
|
||||
let executor: WorkflowExecutor
|
||||
|
||||
beforeEach(() => {
|
||||
mockLogger = {
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn()
|
||||
}
|
||||
|
||||
mockPayload = {
|
||||
jobs: {
|
||||
queue: vi.fn().mockResolvedValue({ id: 'job-123' }),
|
||||
run: vi.fn().mockResolvedValue(undefined)
|
||||
},
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
find: vi.fn()
|
||||
} as any
|
||||
|
||||
executor = new WorkflowExecutor(mockPayload, mockLogger)
|
||||
})
|
||||
|
||||
describe('resolveJSONPathValue', () => {
|
||||
it('should resolve simple JSONPath expressions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'test-id', title: 'Test Title' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveJSONPathValue('$.trigger.doc.id', context)
|
||||
expect(result).toBe('test-id')
|
||||
})
|
||||
|
||||
it('should resolve nested JSONPath expressions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: {
|
||||
id: 'test-id',
|
||||
nested: { value: 'nested-value' }
|
||||
}
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveJSONPathValue('$.trigger.doc.nested.value', context)
|
||||
expect(result).toBe('nested-value')
|
||||
})
|
||||
|
||||
it('should return original value for non-JSONPath strings', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).resolveJSONPathValue('plain-string', context)
|
||||
expect(result).toBe('plain-string')
|
||||
})
|
||||
|
||||
it('should handle missing JSONPath gracefully', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).resolveJSONPathValue('$.trigger.missing.field', context)
|
||||
expect(result).toBe('$.trigger.missing.field') // Should return original if resolution fails
|
||||
})
|
||||
})
|
||||
|
||||
describe('resolveStepInput', () => {
|
||||
it('should resolve all JSONPath expressions in step config', () => {
|
||||
const config = {
|
||||
url: '$.trigger.webhook.url',
|
||||
message: 'Static message',
|
||||
data: {
|
||||
id: '$.trigger.doc.id',
|
||||
title: '$.trigger.doc.title'
|
||||
}
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'doc-123', title: 'Doc Title' },
|
||||
webhook: { url: 'https://example.com/webhook' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveStepInput(config, context)
|
||||
|
||||
expect(result).toEqual({
|
||||
url: 'https://example.com/webhook',
|
||||
message: 'Static message',
|
||||
data: {
|
||||
id: 'doc-123',
|
||||
title: 'Doc Title'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle arrays with JSONPath expressions', () => {
|
||||
const config = {
|
||||
items: ['$.trigger.doc.id', 'static-value', '$.trigger.doc.title']
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'doc-123', title: 'Doc Title' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveStepInput(config, context)
|
||||
|
||||
expect(result).toEqual({
|
||||
items: ['doc-123', 'static-value', 'Doc Title']
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('resolveExecutionOrder', () => {
|
||||
it('should handle steps without dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request' },
|
||||
{ name: 'step2', step: 'create-document' },
|
||||
{ name: 'step3', step: 'http-request' }
|
||||
]
|
||||
|
||||
const result = (executor as any).resolveExecutionOrder(steps)
|
||||
|
||||
expect(result).toHaveLength(1) // All in one batch
|
||||
expect(result[0]).toHaveLength(3) // All steps in first batch
|
||||
})
|
||||
|
||||
it('should handle steps with dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request' },
|
||||
{ name: 'step2', step: 'create-document', dependencies: ['step1'] },
|
||||
{ name: 'step3', step: 'http-request', dependencies: ['step2'] }
|
||||
]
|
||||
|
||||
const result = (executor as any).resolveExecutionOrder(steps)
|
||||
|
||||
expect(result).toHaveLength(3) // Three batches
|
||||
expect(result[0]).toHaveLength(1) // step1 first
|
||||
expect(result[1]).toHaveLength(1) // step2 second
|
||||
expect(result[2]).toHaveLength(1) // step3 third
|
||||
})
|
||||
|
||||
it('should handle parallel execution with partial dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request' },
|
||||
{ name: 'step2', step: 'create-document' },
|
||||
{ name: 'step3', step: 'http-request', dependencies: ['step1'] },
|
||||
{ name: 'step4', step: 'create-document', dependencies: ['step1'] }
|
||||
]
|
||||
|
||||
const result = (executor as any).resolveExecutionOrder(steps)
|
||||
|
||||
expect(result).toHaveLength(2) // Two batches
|
||||
expect(result[0]).toHaveLength(2) // step1 and step2 in parallel
|
||||
expect(result[1]).toHaveLength(2) // step3 and step4 in parallel
|
||||
})
|
||||
|
||||
it('should detect circular dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request', dependencies: ['step2'] },
|
||||
{ name: 'step2', step: 'create-document', dependencies: ['step1'] }
|
||||
]
|
||||
|
||||
expect(() => {
|
||||
(executor as any).resolveExecutionOrder(steps)
|
||||
}).toThrow('Circular dependency detected')
|
||||
})
|
||||
})
|
||||
|
||||
describe('evaluateCondition', () => {
|
||||
it('should evaluate simple equality conditions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { status: 'published' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).evaluateCondition('$.trigger.doc.status == "published"', context)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should evaluate inequality conditions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { count: 5 }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).evaluateCondition('$.trigger.doc.count > 3', context)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for invalid conditions', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).evaluateCondition('invalid condition syntax', context)
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle missing context gracefully', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).evaluateCondition('$.trigger.doc.status == "published"', context)
|
||||
expect(result).toBe(false) // Missing values should fail condition
|
||||
})
|
||||
})
|
||||
|
||||
describe('safeSerialize', () => {
|
||||
it('should serialize simple objects', () => {
|
||||
const obj = { name: 'test', value: 123 }
|
||||
const result = (executor as any).safeSerialize(obj)
|
||||
expect(result).toBe('{"name":"test","value":123}')
|
||||
})
|
||||
|
||||
it('should handle circular references', () => {
|
||||
const obj: any = { name: 'test' }
|
||||
obj.self = obj // Create circular reference
|
||||
|
||||
const result = (executor as any).safeSerialize(obj)
|
||||
expect(result).toContain('"name":"test"')
|
||||
expect(result).toContain('"self":"[Circular]"')
|
||||
})
|
||||
|
||||
it('should handle undefined and null values', () => {
|
||||
const obj = {
|
||||
defined: 'value',
|
||||
undefined: undefined,
|
||||
null: null
|
||||
}
|
||||
|
||||
const result = (executor as any).safeSerialize(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
expect(parsed.defined).toBe('value')
|
||||
expect(parsed.null).toBe(null)
|
||||
expect(parsed).not.toHaveProperty('undefined') // undefined props are omitted
|
||||
})
|
||||
})
|
||||
|
||||
describe('executeWorkflow', () => {
|
||||
it('should execute workflow with single step', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
steps: [
|
||||
{
|
||||
name: 'test-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: { doc: { id: 'test-doc' } },
|
||||
steps: {}
|
||||
}
|
||||
|
||||
// Mock step task
|
||||
const mockStepTask = {
|
||||
taskSlug: 'http-request-step',
|
||||
handler: vi.fn().mockResolvedValue({
|
||||
output: { status: 200, body: 'success' },
|
||||
state: 'succeeded'
|
||||
})
|
||||
}
|
||||
|
||||
// Mock the step tasks registry
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = await (executor as any).executeWorkflow(workflow, context)
|
||||
|
||||
expect(result.status).toBe('completed')
|
||||
expect(result.context.steps['test-step']).toBeDefined()
|
||||
expect(result.context.steps['test-step'].state).toBe('succeeded')
|
||||
expect(mockStepTask.handler).toHaveBeenCalledOnce()
|
||||
|
||||
// Restore original step tasks
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
|
||||
it('should handle step execution failures', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
steps: [
|
||||
{
|
||||
name: 'failing-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://invalid-url',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: { doc: { id: 'test-doc' } },
|
||||
steps: {}
|
||||
}
|
||||
|
||||
// Mock failing step task
|
||||
const mockStepTask = {
|
||||
taskSlug: 'http-request-step',
|
||||
handler: vi.fn().mockRejectedValue(new Error('Network error'))
|
||||
}
|
||||
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = await (executor as any).executeWorkflow(workflow, context)
|
||||
|
||||
expect(result.status).toBe('failed')
|
||||
expect(result.error).toContain('Network error')
|
||||
expect(result.context.steps['failing-step']).toBeDefined()
|
||||
expect(result.context.steps['failing-step'].state).toBe('failed')
|
||||
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
|
||||
it('should execute steps with dependencies in correct order', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com/1',
|
||||
method: 'GET'
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com/2',
|
||||
method: 'GET',
|
||||
dependencies: ['step1']
|
||||
},
|
||||
{
|
||||
name: 'step3',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com/3',
|
||||
method: 'GET',
|
||||
dependencies: ['step1']
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: { doc: { id: 'test-doc' } },
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const executionOrder: string[] = []
|
||||
const mockStepTask = {
|
||||
taskSlug: 'http-request-step',
|
||||
handler: vi.fn().mockImplementation(async ({ input }) => {
|
||||
executionOrder.push(input.stepName)
|
||||
return {
|
||||
output: { status: 200, body: 'success' },
|
||||
state: 'succeeded'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = await (executor as any).executeWorkflow(workflow, context)
|
||||
|
||||
expect(result.status).toBe('completed')
|
||||
expect(executionOrder[0]).toBe('step1') // First step executed first
|
||||
expect(executionOrder.slice(1)).toContain('step2') // Dependent steps after
|
||||
expect(executionOrder.slice(1)).toContain('step3')
|
||||
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
})
|
||||
|
||||
describe('findStepTask', () => {
|
||||
it('should find registered step task by slug', () => {
|
||||
const mockStepTask = {
|
||||
taskSlug: 'test-step',
|
||||
handler: vi.fn()
|
||||
}
|
||||
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = (executor as any).findStepTask('test-step')
|
||||
expect(result).toBe(mockStepTask)
|
||||
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
|
||||
it('should return undefined for unknown step type', () => {
|
||||
const result = (executor as any).findStepTask('unknown-step')
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateStepConfiguration', () => {
|
||||
it('should validate step with required fields', () => {
|
||||
const step = {
|
||||
name: 'valid-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
(executor as any).validateStepConfiguration(step)
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it('should throw error for step without name', () => {
|
||||
const step = {
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
(executor as any).validateStepConfiguration(step)
|
||||
}).toThrow('Step name is required')
|
||||
})
|
||||
|
||||
it('should throw error for step without type', () => {
|
||||
const step = {
|
||||
name: 'test-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
(executor as any).validateStepConfiguration(step)
|
||||
}).toThrow('Step type is required')
|
||||
})
|
||||
})
|
||||
|
||||
describe('createExecutionContext', () => {
|
||||
it('should create context with trigger data', () => {
|
||||
const triggerContext = {
|
||||
operation: 'create',
|
||||
doc: { id: 'test-id', title: 'Test Doc' },
|
||||
collection: 'posts'
|
||||
}
|
||||
|
||||
const result = (executor as any).createExecutionContext(triggerContext)
|
||||
|
||||
expect(result.trigger).toEqual(triggerContext)
|
||||
expect(result.steps).toEqual({})
|
||||
expect(result.metadata).toBeDefined()
|
||||
expect(result.metadata.startedAt).toBeDefined()
|
||||
})
|
||||
|
||||
it('should include metadata in context', () => {
|
||||
const triggerContext = { doc: { id: 'test' } }
|
||||
const result = (executor as any).createExecutionContext(triggerContext)
|
||||
|
||||
expect(result.metadata).toHaveProperty('startedAt')
|
||||
expect(result.metadata).toHaveProperty('executionId')
|
||||
expect(typeof result.metadata.executionId).toBe('string')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -27,30 +27,9 @@ export interface ExecutionContext {
|
||||
req: any // PayloadRequest
|
||||
}
|
||||
|
||||
export interface WorkflowStep {
|
||||
id: string
|
||||
type: string
|
||||
input: Record<string, any>
|
||||
dependencies?: string[]
|
||||
}
|
||||
|
||||
export interface WorkflowTrigger {
|
||||
type: 'collection' | 'global' | 'webhook' | 'cron' | 'manual'
|
||||
collection?: string
|
||||
global?: string
|
||||
event?: 'create' | 'update' | 'delete' | 'read'
|
||||
path?: string
|
||||
cron?: string
|
||||
}
|
||||
|
||||
export interface Workflow {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
active: boolean
|
||||
triggers: WorkflowTrigger[]
|
||||
steps: WorkflowStep[]
|
||||
}
|
||||
// NOTE: Workflow, WorkflowStep, and WorkflowTrigger types are now imported from the generated PayloadCMS types
|
||||
// These interfaces have been removed to avoid duplication and inconsistencies
|
||||
// Import them from 'payload' or the generated payload-types.ts file instead
|
||||
|
||||
export interface WorkflowsPluginConfig {
|
||||
collections?: string[]
|
||||
|
||||
138
src/utils/trigger-helpers.ts
Normal file
138
src/utils/trigger-helpers.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import type { Field } from 'payload'
|
||||
import type { CustomTriggerConfig } from '../plugin/config-types.js'
|
||||
|
||||
/**
|
||||
* Helper function to create a virtual trigger parameter field
|
||||
* Handles the boilerplate for storing/reading from the parameters JSON field
|
||||
*/
|
||||
export function createTriggerParameter(
|
||||
name: string,
|
||||
fieldConfig: any, // Use any to allow flexible field configurations
|
||||
triggerSlug: string
|
||||
): Field {
|
||||
// Create a unique field name by prefixing with trigger slug
|
||||
const uniqueFieldName = `__trigger_${triggerSlug}_${name}`
|
||||
|
||||
return {
|
||||
...fieldConfig,
|
||||
name: uniqueFieldName,
|
||||
virtual: true,
|
||||
admin: {
|
||||
...fieldConfig.admin,
|
||||
condition: (_, siblingData) => siblingData?.type === triggerSlug && (
|
||||
fieldConfig.admin?.condition ?
|
||||
fieldConfig.admin.condition(_, siblingData) :
|
||||
true
|
||||
)
|
||||
},
|
||||
hooks: {
|
||||
...fieldConfig.hooks,
|
||||
afterRead: [
|
||||
...(fieldConfig.hooks?.afterRead || []),
|
||||
({ siblingData }) => siblingData?.parameters?.[name] || fieldConfig.defaultValue
|
||||
],
|
||||
beforeChange: [
|
||||
...(fieldConfig.hooks?.beforeChange || []),
|
||||
({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters[name] = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: fieldConfig.validate || fieldConfig.required ?
|
||||
(value: any, args: any) => {
|
||||
const paramValue = value ?? args.siblingData?.parameters?.[name]
|
||||
|
||||
// Check required
|
||||
if (fieldConfig.required && args.siblingData?.type === triggerSlug && !paramValue) {
|
||||
return `${fieldConfig.admin?.description || name} is required for ${triggerSlug}`
|
||||
}
|
||||
|
||||
// Run original validation if present
|
||||
return fieldConfig.validate?.(paramValue, args) ?? true
|
||||
} :
|
||||
undefined
|
||||
} as Field
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create multiple trigger parameter fields at once
|
||||
*/
|
||||
export function createTriggerParameters(
|
||||
triggerSlug: string,
|
||||
parameters: Record<string, any>
|
||||
): Field[] {
|
||||
return Object.entries(parameters).map(([name, fieldConfig]) =>
|
||||
createTriggerParameter(name, fieldConfig, triggerSlug)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Main trigger builder function that creates a fluent API for defining triggers
|
||||
*/
|
||||
export function createTrigger<TSlug extends string>(slug: TSlug) {
|
||||
return {
|
||||
/**
|
||||
* Define parameters for this trigger using a clean object syntax
|
||||
* @param paramConfig - Object where keys are parameter names and values are Field configs
|
||||
* @returns Complete CustomTriggerConfig ready for use
|
||||
*/
|
||||
parameters(paramConfig: Record<string, any>): CustomTriggerConfig {
|
||||
return {
|
||||
slug,
|
||||
inputs: Object.entries(paramConfig).map(([name, fieldConfig]) =>
|
||||
createTriggerParameter(name, fieldConfig, slug)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Advanced trigger builder with chainable methods for more complex scenarios
|
||||
*/
|
||||
export function createAdvancedTrigger<TSlug extends string>(slug: TSlug) {
|
||||
const builder = {
|
||||
slug,
|
||||
_parameters: {} as Record<string, any>,
|
||||
|
||||
/**
|
||||
* Set all parameters at once
|
||||
*/
|
||||
parameters(paramConfig: Record<string, any>) {
|
||||
this._parameters = paramConfig
|
||||
return this
|
||||
},
|
||||
|
||||
/**
|
||||
* Add a single parameter
|
||||
*/
|
||||
parameter(name: string, fieldConfig: any) {
|
||||
this._parameters[name] = fieldConfig
|
||||
return this
|
||||
},
|
||||
|
||||
/**
|
||||
* Extend with existing parameter sets (useful for common patterns)
|
||||
*/
|
||||
extend(baseParameters: Record<string, any>) {
|
||||
this._parameters = { ...baseParameters, ...this._parameters }
|
||||
return this
|
||||
},
|
||||
|
||||
/**
|
||||
* Build the final trigger configuration
|
||||
*/
|
||||
build(): CustomTriggerConfig {
|
||||
return {
|
||||
slug: this.slug,
|
||||
inputs: Object.entries(this._parameters).map(([name, fieldConfig]) =>
|
||||
createTriggerParameter(name, fieldConfig, this.slug)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return builder
|
||||
}
|
||||
156
src/utils/trigger-presets.ts
Normal file
156
src/utils/trigger-presets.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
import { createAdvancedTrigger } from './trigger-helpers.js'
|
||||
|
||||
/**
|
||||
* Common parameter sets for reuse across different triggers
|
||||
*/
|
||||
|
||||
export const webhookParameters: Record<string, any> = {
|
||||
path: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'URL path for the webhook endpoint (e.g., "my-webhook")'
|
||||
},
|
||||
validate: (value: any) => {
|
||||
if (typeof value === 'string' && value.includes(' ')) {
|
||||
return 'Webhook path cannot contain spaces'
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
secret: {
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Secret key for webhook signature validation (optional but recommended)'
|
||||
}
|
||||
},
|
||||
headers: {
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Expected HTTP headers for validation (JSON object)'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const cronParameters: Record<string, any> = {
|
||||
expression: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Cron expression for scheduling (e.g., "0 9 * * 1" for every Monday at 9 AM)',
|
||||
placeholder: '0 9 * * 1'
|
||||
}
|
||||
},
|
||||
timezone: {
|
||||
type: 'text',
|
||||
defaultValue: 'UTC',
|
||||
admin: {
|
||||
description: 'Timezone for cron execution (e.g., "America/New_York", "Europe/London")',
|
||||
placeholder: 'UTC'
|
||||
},
|
||||
validate: (value: any) => {
|
||||
if (value) {
|
||||
try {
|
||||
new Intl.DateTimeFormat('en', { timeZone: value as string })
|
||||
return true
|
||||
} catch {
|
||||
return `Invalid timezone: ${value}. Please use a valid IANA timezone identifier`
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const eventParameters: Record<string, any> = {
|
||||
eventTypes: {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: [
|
||||
{ label: 'User Created', value: 'user.created' },
|
||||
{ label: 'User Updated', value: 'user.updated' },
|
||||
{ label: 'Document Published', value: 'document.published' },
|
||||
{ label: 'Payment Completed', value: 'payment.completed' }
|
||||
],
|
||||
admin: {
|
||||
description: 'Event types that should trigger this workflow'
|
||||
}
|
||||
},
|
||||
filters: {
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'JSON filters to apply to event data (e.g., {"status": "active"})'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Preset trigger builders for common patterns
|
||||
*/
|
||||
|
||||
/**
|
||||
* Create a webhook trigger with common webhook parameters pre-configured
|
||||
*/
|
||||
export function webhookTrigger<TSlug extends string>(slug: TSlug) {
|
||||
return createAdvancedTrigger(slug).extend(webhookParameters)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a scheduled/cron trigger with timing parameters pre-configured
|
||||
*/
|
||||
export function cronTrigger<TSlug extends string>(slug: TSlug) {
|
||||
return createAdvancedTrigger(slug).extend(cronParameters)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an event-driven trigger with event filtering parameters
|
||||
*/
|
||||
export function eventTrigger<TSlug extends string>(slug: TSlug) {
|
||||
return createAdvancedTrigger(slug).extend(eventParameters)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a simple manual trigger (no parameters needed)
|
||||
*/
|
||||
export function manualTrigger<TSlug extends string>(slug: TSlug) {
|
||||
return {
|
||||
slug,
|
||||
inputs: []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an API trigger for external systems to call
|
||||
*/
|
||||
export function apiTrigger<TSlug extends string>(slug: TSlug) {
|
||||
return createAdvancedTrigger(slug).extend({
|
||||
endpoint: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'API endpoint path (e.g., "/api/triggers/my-trigger")'
|
||||
}
|
||||
},
|
||||
method: {
|
||||
type: 'select',
|
||||
options: ['GET', 'POST', 'PUT', 'PATCH'],
|
||||
defaultValue: 'POST',
|
||||
admin: {
|
||||
description: 'HTTP method for the API endpoint'
|
||||
}
|
||||
},
|
||||
authentication: {
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'None', value: 'none' },
|
||||
{ label: 'API Key', value: 'api-key' },
|
||||
{ label: 'Bearer Token', value: 'bearer' },
|
||||
{ label: 'Basic Auth', value: 'basic' }
|
||||
],
|
||||
defaultValue: 'api-key',
|
||||
admin: {
|
||||
description: 'Authentication method for the API endpoint'
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
4
test-results/.last-run.json
Normal file
4
test-results/.last-run.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"status": "failed",
|
||||
"failedTests": []
|
||||
}
|
||||
@@ -31,4 +31,8 @@
|
||||
"./src/**/*.tsx",
|
||||
"./dev/next-env.d.ts",
|
||||
],
|
||||
"exclude": [
|
||||
"./src/test",
|
||||
"./test-results"
|
||||
]
|
||||
}
|
||||
@@ -4,5 +4,14 @@ export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
threads: false, // Prevent port/DB conflicts
|
||||
pool: 'forks',
|
||||
poolOptions: {
|
||||
forks: {
|
||||
singleFork: true
|
||||
}
|
||||
},
|
||||
testTimeout: 30000, // 30 second timeout for integration tests
|
||||
setupFiles: ['./dev/test-setup.ts']
|
||||
},
|
||||
})
|
||||
Reference in New Issue
Block a user