mirror of
https://github.com/xtr-dev/payload-automation.git
synced 2025-12-10 17:03:22 +00:00
Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d0b83cfa3 | ||
| 067b96a5a7 | |||
| cfc716fc78 | |||
|
|
de7589d431 | ||
|
|
5be4f82ebb | ||
|
|
13968904c0 | ||
|
|
e3b79710ba | ||
| 718f5fe16b | |||
| 10a4ca1b35 | |||
| 4c513aa730 | |||
| f29f08972b | |||
| 9c75b28cd7 | |||
| 243bff2de3 | |||
| 705ed331fa | |||
| e0b13d3515 | |||
| 0da87dbda7 | |||
| 508f4c418a | |||
| 069de012ea | |||
| acdfa411e4 | |||
| 0f741acf73 | |||
| 435f9b0c69 | |||
| cda349846a | |||
| b18e2eaf49 | |||
| 9a3b94ef60 | |||
| 8f0ee4bcef | |||
| 449b80e162 | |||
| 25d42b4653 | |||
| 73c8c20c4b | |||
| e138176878 | |||
| 6245a71516 | |||
| 59a97e519e | |||
| b3d2877f0a | |||
| c050ee835a | |||
| 1f80028042 | |||
| 14d1ecf036 | |||
| 3749881d5f | |||
| c46b58f43e | |||
| 398a2d160e |
58
.claude/agents/source-docs-generator.md
Normal file
58
.claude/agents/source-docs-generator.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
name: source-docs-generator
|
||||
description: Use this agent when you need to generate or update documentation files for source code. Examples: <example>Context: User wants to document their codebase by creating .md files for each source file. user: 'I need documentation for all my source files in the src directory' assistant: 'I'll use the source-docs-generator agent to create documentation files for your source code' <commentary>The user is requesting documentation generation for source files, which is exactly what the source-docs-generator agent is designed for.</commentary></example> <example>Context: User has added new source files and wants documentation updated. user: 'Can you update the docs for the new files I added to src/components?' assistant: 'I'll use the source-docs-generator agent to check for new or updated source files and generate corresponding documentation' <commentary>The agent will check existing docs and only update what's needed.</commentary></example>
|
||||
model: sonnet
|
||||
---
|
||||
|
||||
You are a Source Code Documentation Generator, an expert technical writer specializing in creating clear, comprehensive documentation for source code files. Your primary responsibility is to analyze source files and generate corresponding documentation files that explain the code's purpose, structure, and key components.
|
||||
|
||||
Your process for each task:
|
||||
|
||||
1. **Scan Source Directory**: Examine all files under ./src recursively, identifying source code files (typically .ts, .tsx, .js, .jsx, .py, etc.)
|
||||
|
||||
2. **Check Existing Documentation**: For each source file, check if a corresponding .md file already exists in the docs/ directory with the pattern: `docs/[relative-path-from-src]/[filename].[extension].md`
|
||||
|
||||
3. **Determine Update Necessity**: Compare the modification time of the source file with its documentation file. Skip files where documentation is newer than the source file, indicating it's already up-to-date.
|
||||
|
||||
4. **Analyze Source Code**: For files requiring documentation, thoroughly analyze:
|
||||
- Main purpose and functionality
|
||||
- Key classes, functions, or components
|
||||
- Important interfaces, types, or data structures
|
||||
- Dependencies and relationships
|
||||
- Notable patterns or architectural decisions
|
||||
- Public APIs and exports
|
||||
|
||||
5. **Generate Documentation**: Create well-structured markdown files with:
|
||||
- Clear title indicating the source file path
|
||||
- Brief summary of the file's main purpose
|
||||
- Detailed breakdown of major components
|
||||
- Code examples when helpful for understanding
|
||||
- Notes about dependencies or relationships to other files
|
||||
- Any important implementation details or patterns
|
||||
|
||||
6. **Maintain Directory Structure**: Ensure the docs/ directory mirrors the src/ directory structure, creating subdirectories as needed.
|
||||
|
||||
7. **Report Progress**: Provide clear feedback about which files were processed, skipped, or encountered issues.
|
||||
|
||||
Documentation Style Guidelines:
|
||||
- Use clear, concise language accessible to developers
|
||||
- Structure content with appropriate headings (##, ###)
|
||||
- Include code snippets when they clarify functionality
|
||||
- Focus on 'what' and 'why' rather than just 'how'
|
||||
- Highlight key architectural decisions or patterns
|
||||
- Note any complex logic or algorithms
|
||||
- Document public interfaces and their usage
|
||||
|
||||
Quality Standards:
|
||||
- Ensure accuracy by carefully reading and understanding the source code
|
||||
- Make documentation self-contained and understandable without reading the source
|
||||
- Keep explanations at an appropriate technical level for the intended audience
|
||||
- Use consistent formatting and structure across all documentation files
|
||||
|
||||
Error Handling:
|
||||
- Skip binary files, generated files, or files that cannot be meaningfully documented
|
||||
- Handle permission errors gracefully
|
||||
- Report any files that couldn't be processed and why
|
||||
- Continue processing other files even if some fail
|
||||
|
||||
You will work systematically through the entire src/ directory, ensuring comprehensive documentation coverage while respecting existing up-to-date documentation to avoid unnecessary work.
|
||||
78
.github/workflows/claude-code-review.yml
vendored
Normal file
78
.github/workflows/claude-code-review.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Optional: Only run on specific file changes
|
||||
# paths:
|
||||
# - "src/**/*.ts"
|
||||
# - "src/**/*.tsx"
|
||||
# - "src/**/*.js"
|
||||
# - "src/**/*.jsx"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Optional: Filter by PR author
|
||||
# if: |
|
||||
# github.event.pull_request.user.login == 'external-contributor' ||
|
||||
# github.event.pull_request.user.login == 'new-developer' ||
|
||||
# github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1)
|
||||
# model: "claude-opus-4-1-20250805"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
Please review this pull request and provide feedback on:
|
||||
- Code quality and best practices
|
||||
- Potential bugs or issues
|
||||
- Performance considerations
|
||||
- Security concerns
|
||||
- Test coverage
|
||||
|
||||
Be constructive and helpful in your feedback.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
# use_sticky_comment: true
|
||||
|
||||
# Optional: Customize review based on file types
|
||||
# direct_prompt: |
|
||||
# Review this PR focusing on:
|
||||
# - For TypeScript files: Type safety and proper interface usage
|
||||
# - For API endpoints: Security, input validation, and error handling
|
||||
# - For React components: Performance, accessibility, and best practices
|
||||
# - For tests: Coverage, edge cases, and test quality
|
||||
|
||||
# Optional: Different prompts for different authors
|
||||
# direct_prompt: |
|
||||
# ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' &&
|
||||
# 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' ||
|
||||
# 'Please provide a thorough code review focusing on our coding standards and best practices.' }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
64
.github/workflows/claude.yml
vendored
Normal file
64
.github/workflows/claude.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1)
|
||||
# model: "claude-opus-4-1-20250805"
|
||||
|
||||
# Optional: Customize the trigger phrase (default: @claude)
|
||||
# trigger_phrase: "/claude"
|
||||
|
||||
# Optional: Trigger when specific user is assigned to an issue
|
||||
# assignee_trigger: "claude-bot"
|
||||
|
||||
# Optional: Allow Claude to run specific commands
|
||||
# allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)"
|
||||
|
||||
# Optional: Add custom instructions for Claude to customize its behavior for your project
|
||||
# custom_instructions: |
|
||||
# Follow our coding standards
|
||||
# Ensure all new code has tests
|
||||
# Use TypeScript for new files
|
||||
|
||||
# Optional: Custom environment variables for Claude
|
||||
# claude_env: |
|
||||
# NODE_ENV: test
|
||||
|
||||
52
CHANGELOG.md
52
CHANGELOG.md
@@ -5,6 +5,58 @@ All notable changes to the PayloadCMS Automation Plugin will be documented in th
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.0.39] - 2025-09-11
|
||||
|
||||
### Changed
|
||||
- **Breaking Change**: Replaced JSONPath with Handlebars template system for better string interpolation
|
||||
- Automatic type conversion for numeric and boolean fields based on field names
|
||||
- Enhanced condition evaluation with Handlebars template support
|
||||
- Simplified data resolution syntax: `{{steps.stepName.output.field}}` instead of `$.steps.stepName.output.field`
|
||||
|
||||
### Removed
|
||||
- **Breaking Change**: Removed JSONPath dependency (`jsonpath-plus`) and all backward compatibility
|
||||
- Removed `resolveJSONPathValue` and `parseConditionValue` methods
|
||||
|
||||
### Added
|
||||
- Handlebars template engine for dynamic data interpolation
|
||||
- Smart type conversion: strings to numbers/booleans based on field patterns
|
||||
- Enhanced template examples and documentation
|
||||
- Support for complex string building: `"Post {{trigger.doc.title}} was updated"`
|
||||
|
||||
### Migration Notes
|
||||
- Update all workflow configurations to use Handlebars syntax:
|
||||
- `$.steps.stepName.output.id` → `{{steps.stepName.output.id}}`
|
||||
- `$.trigger.doc.status == 'published'` → `{{trigger.doc.status}} == 'published'`
|
||||
- String interpolation now works naturally: `"Message: {{steps.step1.output.result}}"`
|
||||
- Numeric fields (`timeout`, `retries`, etc.) are automatically converted from strings to numbers
|
||||
|
||||
## [0.0.38] - 2025-09-10
|
||||
|
||||
### Changed
|
||||
- Updated dependencies to PayloadCMS 3.45.0
|
||||
- Enhanced plugin configuration and stability
|
||||
|
||||
## [0.0.37] - 2025-09-XX
|
||||
|
||||
### Removed
|
||||
- **Breaking Change**: Removed built-in cron trigger implementation in favor of webhook-based scheduling
|
||||
- Removed unused plugin modules and associated tests
|
||||
- Removed `initCollectionHooks` and associated migration guides
|
||||
|
||||
### Changed
|
||||
- Refactored triggers to TriggerConfig pattern
|
||||
- Simplified executor architecture by removing executorRegistry pattern
|
||||
- Updated to on-demand workflow execution creation
|
||||
|
||||
### Added
|
||||
- Migration guide for v0.0.37 (MIGRATION-v0.0.37.md)
|
||||
- Enhanced parameter field configuration
|
||||
|
||||
### Migration Notes
|
||||
- Built-in cron triggers are no longer supported. Use webhook triggers with external cron services (GitHub Actions, Vercel Cron, etc.)
|
||||
- Update trigger configurations to use the new TriggerConfig pattern
|
||||
- See MIGRATION-v0.0.37.md for detailed migration steps
|
||||
|
||||
## [0.0.16] - 2025-09-01
|
||||
|
||||
### Fixed
|
||||
|
||||
16
CLAUDE.md
16
CLAUDE.md
@@ -15,9 +15,9 @@ A local copy of the PayloadCMS documentation is available at `./payload-docs/` f
|
||||
### Essential Commands
|
||||
- `pnpm dev` - Start development server with Next.js (runs on http://localhost:3000, fallback ports used if occupied)
|
||||
- `pnpm build` - Build the plugin for production (runs copyfiles, build:types, build:swc)
|
||||
- `pnpm test` - Run all tests (integration + e2e)
|
||||
- `pnpm test:int` - Run integration tests with Vitest
|
||||
- `pnpm test:e2e` - Run end-to-end tests with Playwright
|
||||
- `pnpm test` - Run tests (currently configured for integration and e2e)
|
||||
- `pnpm test:int` - Run integration tests
|
||||
- `pnpm test:e2e` - Run end-to-end tests
|
||||
- `pnpm lint` - Run ESLint
|
||||
- `pnpm lint:fix` - Auto-fix ESLint issues
|
||||
|
||||
@@ -111,14 +111,8 @@ Steps support a `dependencies` field (array of step names) that:
|
||||
|
||||
### Database Configuration
|
||||
- **Development**: SQLite adapter for simplicity
|
||||
- **Testing**: MongoDB Memory Server for isolation
|
||||
- Database selection in `dev/payload.config.ts`
|
||||
|
||||
### Testing Strategy
|
||||
- **Integration Tests** (`dev/int.spec.ts`): Vitest with 30-second timeouts
|
||||
- **E2E Tests** (`dev/e2e.spec.ts`): Playwright testing against development server
|
||||
- **Test Database**: MongoDB Memory Server for isolated testing
|
||||
|
||||
### Plugin Development Pattern
|
||||
- Uses spread syntax to extend existing PayloadCMS config
|
||||
- Maintains database schema consistency when plugin is disabled
|
||||
@@ -169,7 +163,6 @@ The plugin registers hooks for collections and globals specified in the plugin c
|
||||
|
||||
### Key Development Dependencies
|
||||
- Next.js 15.4.4 for development server
|
||||
- Vitest + Playwright for testing
|
||||
- SWC for fast transpilation
|
||||
- Various PayloadCMS adapters (SQLite, MongoDB, PostgreSQL)
|
||||
|
||||
@@ -178,5 +171,4 @@ The plugin registers hooks for collections and globals specified in the plugin c
|
||||
- `src/plugin/index.ts` - Main plugin configuration and extension logic
|
||||
- `src/core/workflow-executor.ts` - Core execution engine with dependency resolution
|
||||
- `src/collections/Workflow.ts` - Workflow collection schema and configuration
|
||||
- `dev/payload.config.ts` - Development configuration showing plugin integration
|
||||
- `dev/int.spec.ts` and `dev/e2e.spec.ts` - Testing patterns and setup
|
||||
- `dev/payload.config.ts` - Development configuration showing plugin integration
|
||||
@@ -1,187 +0,0 @@
|
||||
# Migration Guide: v0.0.23 → v0.0.24
|
||||
|
||||
## What's New
|
||||
|
||||
Version 0.0.24 introduces **trigger builder helpers** that dramatically reduce boilerplate when creating custom triggers, plus fixes field name clashing between built-in and external trigger parameters.
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
**None** - This is a fully backward-compatible release. All existing triggers continue to work exactly as before.
|
||||
|
||||
## New Features
|
||||
|
||||
### 1. Trigger Builder Helpers
|
||||
|
||||
New helper functions eliminate 90% of boilerplate when creating custom triggers:
|
||||
|
||||
```bash
|
||||
npm update @xtr-dev/payload-automation
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Import the new helpers
|
||||
import {
|
||||
createTrigger,
|
||||
webhookTrigger,
|
||||
cronTrigger
|
||||
} from '@xtr-dev/payload-automation/helpers'
|
||||
```
|
||||
|
||||
### 2. Fixed Field Name Clashing
|
||||
|
||||
Built-in trigger parameters now use a JSON backing store to prevent conflicts with custom trigger fields.
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### Step 1: Update Package
|
||||
|
||||
```bash
|
||||
npm install @xtr-dev/payload-automation@latest
|
||||
# or
|
||||
pnpm update @xtr-dev/payload-automation
|
||||
```
|
||||
|
||||
### Step 2: (Optional) Modernize Custom Triggers
|
||||
|
||||
**Your existing triggers will continue to work**, but you can optionally migrate to the cleaner syntax:
|
||||
|
||||
#### Before (Still Works)
|
||||
```typescript
|
||||
const customTrigger = {
|
||||
slug: 'order-webhook',
|
||||
inputs: [
|
||||
{
|
||||
name: 'webhookSecret',
|
||||
type: 'text',
|
||||
required: true,
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Secret for webhook validation'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.webhookSecret],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.webhookSecret = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
}
|
||||
// ... more boilerplate
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### After (Recommended)
|
||||
```typescript
|
||||
import { createTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const orderWebhook = createTrigger('order-webhook').parameters({
|
||||
webhookSecret: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Secret for webhook validation'
|
||||
}
|
||||
}
|
||||
// Add more parameters easily
|
||||
})
|
||||
```
|
||||
|
||||
### Step 3: (Optional) Use Preset Builders
|
||||
|
||||
For common trigger patterns:
|
||||
|
||||
```typescript
|
||||
import { webhookTrigger, cronTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
// Webhook trigger with built-in path, secret, headers parameters
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('currency', {
|
||||
type: 'select',
|
||||
options: ['USD', 'EUR', 'GBP']
|
||||
})
|
||||
.build()
|
||||
|
||||
// Cron trigger with built-in expression, timezone parameters
|
||||
const dailyReport = cronTrigger('daily-report')
|
||||
.parameter('format', {
|
||||
type: 'select',
|
||||
options: ['pdf', 'csv']
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
## Quick Migration Examples
|
||||
|
||||
### Simple Trigger Migration
|
||||
|
||||
```typescript
|
||||
// OLD WAY (still works)
|
||||
{
|
||||
slug: 'user-signup',
|
||||
inputs: [/* 20+ lines of boilerplate per field */]
|
||||
}
|
||||
|
||||
// NEW WAY (recommended)
|
||||
import { createTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const userSignup = createTrigger('user-signup').parameters({
|
||||
source: {
|
||||
type: 'select',
|
||||
options: ['web', 'mobile', 'api'],
|
||||
required: true
|
||||
},
|
||||
userType: {
|
||||
type: 'select',
|
||||
options: ['regular', 'premium'],
|
||||
defaultValue: 'regular'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### Webhook Trigger Migration
|
||||
|
||||
```typescript
|
||||
// OLD WAY
|
||||
{
|
||||
slug: 'payment-webhook',
|
||||
inputs: [/* Manual webhookPath field + lots of boilerplate */]
|
||||
}
|
||||
|
||||
// NEW WAY
|
||||
import { webhookTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('minimumAmount', {
|
||||
type: 'number',
|
||||
min: 0
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
## Benefits of Migration
|
||||
|
||||
- **90% less code** - Eliminate virtual field boilerplate
|
||||
- **No field name conflicts** - Built-in parameters isolated
|
||||
- **Better TypeScript support** - Full type inference
|
||||
- **Preset patterns** - Common trigger types ready-to-use
|
||||
- **Composable API** - Easy to extend and customize
|
||||
|
||||
## Compatibility
|
||||
|
||||
- ✅ **Existing triggers** continue to work unchanged
|
||||
- ✅ **Mix old and new** trigger styles in same config
|
||||
- ✅ **No database changes** required
|
||||
- ✅ **PayloadCMS field compatibility** maintained
|
||||
|
||||
## Need Help?
|
||||
|
||||
- [View examples](./examples/trigger-builders.ts)
|
||||
- [Read documentation](./examples/README-trigger-builders.md)
|
||||
- [Report issues](https://github.com/xtr-dev/payload-automation/issues)
|
||||
|
||||
---
|
||||
|
||||
**TL;DR**: Update the package, optionally migrate custom triggers to use the new helpers for cleaner code. All existing triggers continue to work without changes.
|
||||
@@ -1,68 +0,0 @@
|
||||
# Steps and Triggers Not Implementing
|
||||
|
||||
This document lists workflow steps and triggers that are intentionally **not** being implemented in the core plugin. These are either better suited as custom user implementations or fall outside the plugin's scope.
|
||||
|
||||
## Steps Not Implementing
|
||||
|
||||
### Workflow Orchestration
|
||||
- **Stop Workflow** - Can be achieved through conditional logic
|
||||
- **Run Workflow** - Adds complexity to execution tracking and circular dependency management
|
||||
- **Parallel Fork/Join** - Current dependency system already enables parallel execution
|
||||
|
||||
### External Service Integrations
|
||||
- **GraphQL Query** - Better as custom HTTP request step
|
||||
- **S3/Cloud Storage** - Too provider-specific
|
||||
- **Message Queue** (Kafka, RabbitMQ, SQS) - Infrastructure-specific
|
||||
- **SMS** (Twilio, etc.) - Requires external accounts
|
||||
- **Push Notifications** - Platform-specific implementation
|
||||
- **Slack/Discord/Teams** - Better as custom HTTP webhooks
|
||||
- **Calendar Integration** - Too many providers to support
|
||||
|
||||
### AI/ML Operations
|
||||
- **AI Prompt** (OpenAI, Claude, etc.) - Requires API keys, better as custom implementation
|
||||
- **Text Analysis** - Too many variations and providers
|
||||
- **Image Processing** - Better handled by dedicated services
|
||||
|
||||
### Specialized Data Operations
|
||||
- **Database Query** (Direct SQL/NoSQL) - Security concerns, bypasses Payload
|
||||
- **File Operations** - Complex permission and security implications
|
||||
- **Hash/Encrypt** - Security-sensitive, needs careful implementation
|
||||
- **RSS/Feed Processing** - Too specific for core plugin
|
||||
|
||||
## Triggers Not Implementing
|
||||
|
||||
### Workflow Events
|
||||
- **Workflow Complete/Failed** - Adds circular dependency complexity
|
||||
- **Step Failed** - Complicates error handling flow
|
||||
|
||||
### System Events
|
||||
- **File Upload** - Can use collection hooks on media collections
|
||||
- **User Authentication** (Login/Logout) - Security implications
|
||||
- **Server Start/Stop** - Lifecycle management complexity
|
||||
- **Cache Clear** - Too implementation-specific
|
||||
- **Migration/Backup Events** - Infrastructure-specific
|
||||
|
||||
### External Monitoring
|
||||
- **Email Received** (IMAP/POP3) - Requires mail server setup
|
||||
- **Git Webhooks** - Better as standard webhook triggers
|
||||
- **Performance Alerts** - Requires monitoring infrastructure
|
||||
- **Error Events** - Better handled by dedicated error tracking
|
||||
|
||||
### Advanced Time-Based
|
||||
- **Recurring Patterns** (e.g., "every 2nd Tuesday") - Complex parsing and timezone handling
|
||||
- **Date Range Triggers** - Can be achieved with conditional logic in workflows
|
||||
|
||||
## Why These Aren't Core Features
|
||||
|
||||
1. **Maintainability**: Each external integration requires ongoing maintenance as APIs change
|
||||
2. **Security**: Many features have security implications that are better handled by users who understand their specific requirements
|
||||
3. **Flexibility**: Users can implement these as custom steps/triggers tailored to their needs
|
||||
4. **Scope**: The plugin focuses on being a solid workflow engine, not an everything-integration platform
|
||||
5. **Dependencies**: Avoiding external service dependencies keeps the plugin lightweight
|
||||
|
||||
## What Users Can Do Instead
|
||||
|
||||
- Implement custom steps using the plugin's TaskConfig interface
|
||||
- Use HTTP Request step for most external integrations
|
||||
- Create custom triggers through Payload hooks
|
||||
- Build specialized workflow packages on top of this plugin
|
||||
133
README.md
133
README.md
@@ -1,17 +1,17 @@
|
||||
# @xtr-dev/payload-automation
|
||||
|
||||
A comprehensive workflow automation plugin for PayloadCMS 3.x that enables visual workflow building, execution tracking, and parallel processing.
|
||||
A workflow automation plugin for PayloadCMS 3.x. Run steps in workflows triggered by document changes or webhooks.
|
||||
|
||||
⚠️ **Pre-release Warning**: This package is currently in active development (v0.0.x). Breaking changes may occur before v1.0.0. Not recommended for production use.
|
||||
|
||||
## Features
|
||||
|
||||
- 🔄 **Visual Workflow Builder** - Create complex workflows with drag-and-drop interface
|
||||
- ⚡ **Parallel Execution** - Smart dependency resolution for optimal performance
|
||||
- 🎯 **Multiple Triggers** - Collection hooks, webhooks, manual execution
|
||||
- 📊 **Execution Tracking** - Complete history and monitoring of workflow runs
|
||||
- 🔧 **Extensible Steps** - HTTP requests, document CRUD, email notifications
|
||||
- 🔍 **JSONPath Integration** - Dynamic data interpolation and transformation
|
||||
- 🔄 Visual workflow builder in PayloadCMS admin
|
||||
- ⚡ Run workflows when documents are created/updated/deleted
|
||||
- 🎯 Trigger workflows via webhooks
|
||||
- 📊 Track workflow execution history
|
||||
- 🔧 HTTP requests, document operations, email sending
|
||||
- 🔗 Use data from previous steps in templates
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -46,91 +46,25 @@ export default buildConfig({
|
||||
})
|
||||
```
|
||||
|
||||
## Import Structure
|
||||
|
||||
The plugin uses separate exports to avoid bundling server-side code in client bundles:
|
||||
## Imports
|
||||
|
||||
```typescript
|
||||
// Server-side plugin and functions
|
||||
// Server plugin
|
||||
import { workflowsPlugin } from '@xtr-dev/payload-automation/server'
|
||||
|
||||
// Client-side components
|
||||
import { TriggerWorkflowButton } from '@xtr-dev/payload-automation/client'
|
||||
// Client components
|
||||
import { StatusCell, ErrorDisplay } from '@xtr-dev/payload-automation/client'
|
||||
|
||||
// Types only (safe for both server and client)
|
||||
// Types
|
||||
import type { WorkflowsPluginConfig } from '@xtr-dev/payload-automation'
|
||||
```
|
||||
|
||||
## Step Types
|
||||
|
||||
### HTTP Request
|
||||
Make external API calls with comprehensive error handling and retry logic.
|
||||
Call external APIs. Supports GET, POST, PUT, DELETE, PATCH. Uses Bearer tokens, API keys, or basic auth.
|
||||
|
||||
**Key Features:**
|
||||
- Support for GET, POST, PUT, DELETE, PATCH methods
|
||||
- Authentication: Bearer token, Basic auth, API key headers
|
||||
- Configurable timeouts and retry logic
|
||||
- JSONPath integration for dynamic URLs and request bodies
|
||||
|
||||
**Error Handling:**
|
||||
HTTP Request steps use a **response-based success model** rather than status-code-based failures:
|
||||
|
||||
- ✅ **Successful completion**: All HTTP requests that receive a response (including 4xx/5xx status codes) are marked as "succeeded"
|
||||
- ❌ **Failed execution**: Only network errors, timeouts, DNS failures, and connection issues cause step failure
|
||||
- 📊 **Error information preserved**: HTTP error status codes (404, 500, etc.) are captured in the step output for workflow conditional logic
|
||||
|
||||
**Example workflow logic:**
|
||||
```typescript
|
||||
// Step outputs for a 404 response:
|
||||
{
|
||||
"status": 404,
|
||||
"statusText": "Not Found",
|
||||
"body": "Resource not found",
|
||||
"headers": {...},
|
||||
"duration": 1200
|
||||
}
|
||||
|
||||
// Use in workflow conditions:
|
||||
// "$.steps.apiRequest.output.status >= 400" to handle errors
|
||||
```
|
||||
|
||||
This design allows workflows to handle HTTP errors gracefully rather than failing completely, enabling robust error handling and retry logic.
|
||||
|
||||
**Enhanced Error Tracking:**
|
||||
For network failures (timeouts, DNS errors, connection failures), the plugin provides detailed error information through an independent storage system that bypasses PayloadCMS's output limitations:
|
||||
|
||||
```typescript
|
||||
// Timeout error details preserved in workflow context:
|
||||
{
|
||||
"steps": {
|
||||
"httpStep": {
|
||||
"state": "failed",
|
||||
"error": "Task handler returned a failed state",
|
||||
"errorDetails": {
|
||||
"errorType": "timeout",
|
||||
"duration": 2006,
|
||||
"attempts": 1,
|
||||
"finalError": "Request timeout after 2000ms",
|
||||
"context": {
|
||||
"url": "https://api.example.com/data",
|
||||
"method": "GET",
|
||||
"timeout": 2000
|
||||
}
|
||||
},
|
||||
"executionInfo": {
|
||||
"completed": true,
|
||||
"success": false,
|
||||
"executedAt": "2025-09-04T15:16:10.000Z",
|
||||
"duration": 2006
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Access in workflow conditions:
|
||||
// "$.steps.httpStep.errorDetails.errorType == 'timeout'"
|
||||
// "$.steps.httpStep.errorDetails.duration > 5000"
|
||||
```
|
||||
HTTP steps succeed even with 4xx/5xx status codes. Only network errors (timeouts, DNS failures) cause step failure. Check `{{steps.stepName.output.status}}` for error handling.
|
||||
|
||||
### Document Operations
|
||||
- **Create Document** - Create PayloadCMS documents
|
||||
@@ -141,13 +75,20 @@ For network failures (timeouts, DNS errors, connection failures), the plugin pro
|
||||
### Communication
|
||||
- **Send Email** - Send notifications via PayloadCMS email
|
||||
|
||||
## Data Resolution
|
||||
## Templates
|
||||
|
||||
Use JSONPath to access workflow data:
|
||||
Use `{{}}` to insert data:
|
||||
|
||||
- `$.trigger.doc.id` - Access trigger document
|
||||
- `$.steps.stepName.output` - Use previous step outputs
|
||||
- `$.context` - Access workflow context
|
||||
- `{{trigger.doc.id}}` - Data from the document that triggered the workflow
|
||||
- `{{steps.stepName.output}}` - Data from previous steps
|
||||
|
||||
Example:
|
||||
```json
|
||||
{
|
||||
"url": "https://api.example.com/posts/{{steps.createPost.output.id}}",
|
||||
"condition": "{{trigger.doc.status}} == 'published'"
|
||||
}
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
@@ -155,10 +96,26 @@ Use JSONPath to access workflow data:
|
||||
- Node.js ^18.20.2 || >=20.9.0
|
||||
- pnpm ^9 || ^10
|
||||
|
||||
## Documentation
|
||||
## Logging
|
||||
|
||||
Full documentation coming soon. For now, explore the development environment in the repository for examples and patterns.
|
||||
Set `PAYLOAD_AUTOMATION_LOG_LEVEL` to control logs:
|
||||
- `silent`, `error`, `warn` (default), `info`, `debug`, `trace`
|
||||
|
||||
```bash
|
||||
PAYLOAD_AUTOMATION_LOG_LEVEL=debug npm run dev
|
||||
```
|
||||
|
||||
## Scheduled Workflows
|
||||
|
||||
Use webhook triggers with external cron services:
|
||||
|
||||
```bash
|
||||
# Call workflow webhook from cron
|
||||
curl -X POST https://your-app.com/api/workflows-webhook/daily-report
|
||||
```
|
||||
|
||||
Built-in cron triggers were removed in v0.0.37+.
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
MIT
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import { WorkflowExecutionStatus as WorkflowExecutionStatus_6f365a93b6cb4b34ad564b391e21db6f } from '@xtr-dev/payload-automation/client'
|
||||
import { StatusCell as StatusCell_6f365a93b6cb4b34ad564b391e21db6f } from '@xtr-dev/payload-automation/client'
|
||||
import { ErrorDisplay as ErrorDisplay_6f365a93b6cb4b34ad564b391e21db6f } from '@xtr-dev/payload-automation/client'
|
||||
|
||||
export const importMap = {
|
||||
"@xtr-dev/payload-automation/client#WorkflowExecutionStatus": WorkflowExecutionStatus_6f365a93b6cb4b34ad564b391e21db6f,
|
||||
"@xtr-dev/payload-automation/client#StatusCell": StatusCell_6f365a93b6cb4b34ad564b391e21db6f,
|
||||
"@xtr-dev/payload-automation/client#ErrorDisplay": ErrorDisplay_6f365a93b6cb4b34ad564b391e21db6f
|
||||
}
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
|
||||
describe('Workflow Condition Fix Test', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
it('should correctly evaluate trigger conditions with $.trigger.doc path', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with a condition using the correct JSONPath
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Condition Evaluation',
|
||||
description: 'Tests that $.trigger.doc.content conditions work',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create',
|
||||
condition: '$.trigger.doc.content == "TRIGGER_ME"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'audit-step',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
post: '$.trigger.doc.id',
|
||||
message: 'Condition was met and workflow triggered'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created workflow with condition: $.trigger.doc.content == "TRIGGER_ME"')
|
||||
|
||||
// Create a post that SHOULD NOT trigger
|
||||
const post1 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'This should not trigger'
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post that SHOULD trigger
|
||||
const post2 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'TRIGGER_ME'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Check workflow runs - should have exactly 1
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(`Found ${runs.totalDocs} workflow runs`)
|
||||
if (runs.totalDocs > 0) {
|
||||
console.log('Run statuses:', runs.docs.map(r => r.status))
|
||||
}
|
||||
|
||||
// Should have exactly 1 run for the matching condition
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
|
||||
// Check audit logs - should only have one for post2
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post2.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (runs.docs[0].status === 'completed') {
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
expect(auditLogs.docs[0].message).toBe('Condition was met and workflow triggered')
|
||||
}
|
||||
|
||||
// Verify no audit log for the first post
|
||||
const noAuditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post1.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(noAuditLogs.totalDocs).toBe(0)
|
||||
|
||||
console.log('✅ Condition evaluation working with $.trigger.doc path!')
|
||||
}, 30000)
|
||||
})
|
||||
@@ -1,519 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
import { mockHttpBin, testFixtures } from './test-helpers.js'
|
||||
|
||||
describe('Error Scenarios and Edge Cases', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
// Set up comprehensive mocks for all error scenarios
|
||||
mockHttpBin.mockAllErrorScenarios()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
mockHttpBin.cleanup()
|
||||
})
|
||||
|
||||
it('should handle HTTP timeout errors gracefully', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Clear existing mocks and set up a proper timeout mock
|
||||
mockHttpBin.cleanup()
|
||||
mockHttpBin.mockTimeout()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - HTTP Timeout',
|
||||
description: 'Tests HTTP request timeout handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
...testFixtures.httpRequestStep('https://httpbin.org/delay/10'),
|
||||
name: 'timeout-request',
|
||||
method: 'GET',
|
||||
timeout: 2000, // 2 second timeout
|
||||
body: null
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Timeout Post'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution (should timeout)
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Either failed due to timeout or completed (depending on network speed)
|
||||
expect(['failed', 'completed']).toContain(runs.docs[0].status)
|
||||
|
||||
// Verify that detailed error information is preserved via new independent storage system
|
||||
const context = runs.docs[0].context
|
||||
const stepContext = context.steps['timeout-request']
|
||||
|
||||
// Check that independent execution info was recorded
|
||||
expect(stepContext.executionInfo).toBeDefined()
|
||||
expect(stepContext.executionInfo.completed).toBe(true)
|
||||
|
||||
// Check that detailed error information was preserved (new feature!)
|
||||
if (runs.docs[0].status === 'failed' && stepContext.errorDetails) {
|
||||
expect(stepContext.errorDetails.errorType).toBe('timeout')
|
||||
expect(stepContext.errorDetails.duration).toBeGreaterThan(2000)
|
||||
expect(stepContext.errorDetails.attempts).toBe(1)
|
||||
expect(stepContext.errorDetails.context.url).toBe('https://httpbin.org/delay/10')
|
||||
expect(stepContext.errorDetails.context.timeout).toBe(2000)
|
||||
console.log('✅ Detailed timeout error information preserved:', {
|
||||
errorType: stepContext.errorDetails.errorType,
|
||||
duration: stepContext.errorDetails.duration,
|
||||
attempts: stepContext.errorDetails.attempts
|
||||
})
|
||||
} else if (runs.docs[0].status === 'failed') {
|
||||
console.log('✅ Timeout error handled:', runs.docs[0].error)
|
||||
} else {
|
||||
console.log('✅ Request completed within timeout')
|
||||
}
|
||||
}, 15000)
|
||||
|
||||
it('should handle invalid JSON responses', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Invalid JSON',
|
||||
description: 'Tests invalid JSON response handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'invalid-json-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/html', // Returns HTML, not JSON
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Invalid JSON Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed') // Should complete but with HTML body
|
||||
expect(runs.docs[0].context.steps['invalid-json-request'].output.body).toContain('<html>')
|
||||
|
||||
console.log('✅ Non-JSON response handled correctly')
|
||||
}, 25000)
|
||||
|
||||
it('should handle circular reference in JSONPath resolution', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// This test creates a scenario where JSONPath might encounter circular references
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Circular Reference',
|
||||
description: 'Tests circular reference handling in JSONPath',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'circular-test',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
// This creates a deep reference that could cause issues
|
||||
triggerData: '$.trigger',
|
||||
stepData: '$.steps',
|
||||
nestedRef: '$.trigger.doc'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Circular Reference Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Should either succeed with safe serialization or fail gracefully
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
console.log('✅ Circular reference handled:', runs.docs[0].status)
|
||||
}, 20000)
|
||||
|
||||
it('should handle malformed workflow configurations', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// This test should expect the workflow creation to fail due to validation
|
||||
let creationFailed = false
|
||||
let workflow: any = null
|
||||
|
||||
try {
|
||||
// Create workflow with missing required fields for create-document
|
||||
workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Malformed Config',
|
||||
description: 'Tests malformed workflow configuration',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'malformed-step',
|
||||
step: 'create-document',
|
||||
// Missing required collectionSlug
|
||||
data: {
|
||||
message: 'This should fail'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
creationFailed = true
|
||||
expect(error).toBeDefined()
|
||||
console.log('✅ Workflow creation failed as expected:', error instanceof Error ? error.message : error)
|
||||
}
|
||||
|
||||
// If creation failed, that's the expected behavior
|
||||
if (creationFailed) {
|
||||
return
|
||||
}
|
||||
|
||||
// If somehow the workflow was created, test execution failure
|
||||
if (workflow) {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Malformed Config Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('failed')
|
||||
expect(runs.docs[0].error).toBeDefined()
|
||||
|
||||
console.log('✅ Malformed config caused execution failure:', runs.docs[0].error)
|
||||
}
|
||||
}, 15000)
|
||||
|
||||
it('should handle HTTP 4xx and 5xx errors properly', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - HTTP Errors',
|
||||
description: 'Tests HTTP error status handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'not-found-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/status/404',
|
||||
method: 'GET'
|
||||
},
|
||||
{
|
||||
name: 'server-error-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/status/500',
|
||||
method: 'GET',
|
||||
dependencies: ['not-found-request']
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error HTTP Status Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed') // Workflow should complete successfully
|
||||
|
||||
// Check that both steps completed with HTTP error outputs
|
||||
const context = runs.docs[0].context
|
||||
expect(context.steps['not-found-request'].state).toBe('succeeded') // HTTP request completed
|
||||
expect(context.steps['not-found-request'].output.status).toBe(404) // But with error status
|
||||
|
||||
console.log('✅ HTTP error statuses handled correctly')
|
||||
}, 25000)
|
||||
|
||||
it('should handle retry logic for transient failures', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Retry Logic',
|
||||
description: 'Tests retry logic for HTTP requests',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'retry-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/status/503', // Service unavailable
|
||||
method: 'GET',
|
||||
retries: 3,
|
||||
retryDelay: 1000
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Retry Logic Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 10000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed') // Workflow should complete with HTTP error output
|
||||
|
||||
// The step should have succeeded but with error status
|
||||
const stepContext = runs.docs[0].context.steps['retry-request']
|
||||
expect(stepContext.state).toBe('succeeded')
|
||||
expect(stepContext.output.status).toBe(503)
|
||||
|
||||
console.log('✅ Retry logic executed correctly')
|
||||
}, 25000)
|
||||
|
||||
it('should handle extremely large workflow contexts', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Large Context',
|
||||
description: 'Tests handling of large workflow contexts',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'large-response-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/base64/SFRUUEJJTiBpcyBhd2Vzb21l', // Returns base64 decoded text
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Large Context Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Should handle large contexts without memory issues
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
console.log('✅ Large context handled:', runs.docs[0].status)
|
||||
}, 20000)
|
||||
|
||||
it('should handle undefined and null values in JSONPath', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Error - Null Values',
|
||||
description: 'Tests null/undefined values in JSONPath expressions',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'null-value-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
nonexistentField: '$.trigger.doc.nonexistent',
|
||||
nullField: '$.trigger.doc.null',
|
||||
undefinedField: '$.trigger.doc.undefined'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Error Null Values Post'
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Should handle null/undefined values gracefully
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
if (runs.docs[0].status === 'completed') {
|
||||
const stepOutput = runs.docs[0].context.steps['null-value-request'].output
|
||||
expect(stepOutput.status).toBe(200) // httpbin should accept the request
|
||||
console.log('✅ Null values handled gracefully')
|
||||
} else {
|
||||
console.log('✅ Null values caused expected failure:', runs.docs[0].error)
|
||||
}
|
||||
}, 20000)
|
||||
})
|
||||
@@ -1,392 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
import { mockHttpBin, testFixtures } from './test-helpers.js'
|
||||
|
||||
describe('Hook Execution Reliability Tests', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
mockHttpBin.cleanup()
|
||||
})
|
||||
|
||||
it('should reliably execute hooks when collections are created', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Reliability - Create',
|
||||
description: 'Tests hook execution on post creation',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
...testFixtures.createDocumentStep('auditLog'),
|
||||
name: 'create-audit-log',
|
||||
data: {
|
||||
message: 'Post was created via workflow trigger',
|
||||
post: '$.trigger.doc.id'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
expect(workflow.id).toBeDefined()
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Reliability Post'
|
||||
}
|
||||
})
|
||||
|
||||
expect(post).toBeDefined()
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Verify workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Either succeeded or failed, but should have executed
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
console.log('✅ Hook execution status:', runs.docs[0].status)
|
||||
|
||||
// Verify audit log was created only if the workflow succeeded
|
||||
if (runs.docs[0].status === 'completed') {
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBeGreaterThan(0)
|
||||
expect(auditLogs.docs[0].message).toContain('workflow trigger')
|
||||
} else {
|
||||
// If workflow failed, just log the error but don't fail the test
|
||||
console.log('⚠️ Workflow failed:', runs.docs[0].error)
|
||||
// The important thing is that a workflow run was created
|
||||
}
|
||||
}, 30000)
|
||||
|
||||
it('should handle hook execution errors gracefully', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Mock network error for invalid URL
|
||||
mockHttpBin.mockNetworkError('invalid-url-that-will-fail')
|
||||
|
||||
// Create a workflow with invalid step configuration
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Error Handling',
|
||||
description: 'Tests error handling in hook execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'invalid-http-request',
|
||||
step: 'http-request-step',
|
||||
url: 'https://invalid-url-that-will-fail',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Error Handling Post'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Verify a failed workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('failed')
|
||||
expect(runs.docs[0].error).toBeDefined()
|
||||
// Check that the error mentions either the URL or the task failure
|
||||
const errorMessage = runs.docs[0].error.toLowerCase()
|
||||
const hasRelevantError = errorMessage.includes('url') ||
|
||||
errorMessage.includes('invalid-url') ||
|
||||
errorMessage.includes('network') ||
|
||||
errorMessage.includes('failed')
|
||||
expect(hasRelevantError).toBe(true)
|
||||
|
||||
console.log('✅ Error handling working:', runs.docs[0].error)
|
||||
}, 30000)
|
||||
|
||||
it('should create failed workflow runs when executor is unavailable', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// This test simulates the executor being unavailable
|
||||
// We'll create a workflow and then simulate a hook execution without proper executor
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Executor Unavailable',
|
||||
description: 'Tests handling when executor is not available',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'simple-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/get'
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Temporarily disable the executor by setting it to null
|
||||
// This simulates the initialization issue
|
||||
const global = globalThis as any
|
||||
const originalExecutor = global.__workflowExecutor
|
||||
global.__workflowExecutor = null
|
||||
|
||||
try {
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Executor Unavailable Post'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for hook execution attempt
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
// Verify a failed workflow run was created for executor unavailability
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
if (runs.totalDocs > 0) {
|
||||
expect(runs.docs[0].error).toBeDefined()
|
||||
console.log('✅ Executor unavailable error captured:', runs.docs[0].error)
|
||||
} else {
|
||||
console.log('⚠️ No workflow run created - this indicates the hook may not have executed')
|
||||
}
|
||||
} finally {
|
||||
// Restore the original executor
|
||||
global.__workflowExecutor = originalExecutor
|
||||
}
|
||||
}, 30000)
|
||||
|
||||
it('should handle workflow conditions properly', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with a condition that should prevent execution
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Conditional Execution',
|
||||
description: 'Tests conditional workflow execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create',
|
||||
condition: '$.trigger.doc.content == "TRIGGER_CONDITION"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'conditional-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
post: '$.trigger.doc.id',
|
||||
message: 'Conditional trigger executed'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post that SHOULD NOT trigger the workflow
|
||||
const post1 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'Test Hook Conditional - Should Not Trigger'
|
||||
}
|
||||
})
|
||||
|
||||
// Create a post that SHOULD trigger the workflow
|
||||
const post2 = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: 'TRIGGER_CONDITION'
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
// Check workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Should have exactly 1 run (only for the matching condition)
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
// Either succeeded or failed, but should have executed
|
||||
expect(['completed', 'failed']).toContain(runs.docs[0].status)
|
||||
|
||||
// Verify audit log was created only for the correct post
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post2.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
|
||||
// Verify no audit log for the first post
|
||||
const noAuditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
post: {
|
||||
equals: post1.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(noAuditLogs.totalDocs).toBe(0)
|
||||
|
||||
console.log('✅ Conditional execution working correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle multiple concurrent hook executions', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Hook Concurrent Execution',
|
||||
description: 'Tests handling multiple concurrent hook executions',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'concurrent-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
post: '$.trigger.doc.id',
|
||||
message: 'Concurrent execution test'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Create multiple posts concurrently
|
||||
const concurrentCreations = Array.from({ length: 5 }, (_, i) =>
|
||||
payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
content: `Test Hook Concurrent Post ${i + 1}`
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const posts = await Promise.all(concurrentCreations)
|
||||
expect(posts).toHaveLength(5)
|
||||
|
||||
// Wait for all workflow executions
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
// Verify all workflow runs were created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(5)
|
||||
|
||||
// Verify all runs completed successfully
|
||||
const failedRuns = runs.docs.filter(run => run.status === 'failed')
|
||||
expect(failedRuns).toHaveLength(0)
|
||||
|
||||
console.log('✅ Concurrent executions completed:', {
|
||||
totalRuns: runs.totalDocs,
|
||||
statuses: runs.docs.map(run => run.status)
|
||||
})
|
||||
}, 45000)
|
||||
})
|
||||
@@ -1,16 +1,13 @@
|
||||
import type {CollectionSlug, TypedJobs} from 'payload';
|
||||
import type {CollectionSlug} from 'payload';
|
||||
|
||||
import {sqliteAdapter} from "@payloadcms/db-sqlite"
|
||||
import { lexicalEditor } from '@payloadcms/richtext-lexical'
|
||||
import { MongoMemoryReplSet } from 'mongodb-memory-server'
|
||||
import path from 'path'
|
||||
import {buildConfig} from 'payload'
|
||||
import sharp from 'sharp'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import {workflowsPlugin} from "../src/plugin/index.js"
|
||||
import {HttpRequestStepTask} from "../src/steps/http-request.js"
|
||||
import {CreateDocumentStepTask} from "../src/steps/index.js"
|
||||
import {CreateDocumentStepTask,HttpRequestStepTask} from "../src/steps/index.js"
|
||||
import { testEmailAdapter } from './helpers/testEmailAdapter.js'
|
||||
import { seed } from './seed.js'
|
||||
|
||||
@@ -24,13 +21,24 @@ if (!process.env.ROOT_DIR) {
|
||||
const buildConfigWithMemoryDB = async () => {
|
||||
// Use MongoDB adapter for testing instead of SQLite
|
||||
const { mongooseAdapter } = await import('@payloadcms/db-mongodb')
|
||||
|
||||
|
||||
return buildConfig({
|
||||
admin: {
|
||||
importMap: {
|
||||
baseDir: path.resolve(dirname, '..'),
|
||||
},
|
||||
},
|
||||
globals: [
|
||||
{
|
||||
slug: 'settings',
|
||||
fields: [
|
||||
{
|
||||
name: 'siteName',
|
||||
type: 'text'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
collections: [
|
||||
{
|
||||
slug: 'posts',
|
||||
@@ -96,14 +104,13 @@ const buildConfigWithMemoryDB = async () => {
|
||||
posts: true,
|
||||
media: true
|
||||
},
|
||||
globalTriggers: {
|
||||
settings: true
|
||||
},
|
||||
steps: [
|
||||
HttpRequestStepTask,
|
||||
CreateDocumentStepTask
|
||||
],
|
||||
triggers: [
|
||||
|
||||
],
|
||||
webhookPrefix: '/workflows-webhook'
|
||||
}),
|
||||
],
|
||||
secret: process.env.PAYLOAD_SECRET || 'test-secret_key',
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
import { mockHttpBin, testFixtures } from './test-helpers.js'
|
||||
|
||||
describe('Workflow Trigger Test', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
// Set up HTTP mocks
|
||||
const expectedRequestData = {
|
||||
message: 'Post created',
|
||||
postId: expect.any(String), // MongoDB ObjectId
|
||||
postTitle: 'Test post content for workflow trigger'
|
||||
}
|
||||
mockHttpBin.mockPost(expectedRequestData)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
mockHttpBin.cleanup()
|
||||
})
|
||||
|
||||
it('should create a workflow run when a post is created', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Use test fixtures for consistent data
|
||||
const testWorkflow = {
|
||||
...testFixtures.basicWorkflow,
|
||||
name: 'Test Post Creation Workflow',
|
||||
description: 'Triggers when a post is created',
|
||||
steps: [
|
||||
{
|
||||
...testFixtures.httpRequestStep(),
|
||||
name: 'log-post',
|
||||
body: {
|
||||
message: 'Post created',
|
||||
postId: '$.trigger.doc.id',
|
||||
postTitle: '$.trigger.doc.content'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: testWorkflow
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
expect(workflow.id).toBeDefined()
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: testFixtures.testPost
|
||||
})
|
||||
|
||||
expect(post).toBeDefined()
|
||||
expect(post.id).toBeDefined()
|
||||
|
||||
// Wait a bit for workflow to execute
|
||||
await new Promise(resolve => setTimeout(resolve, 3000))
|
||||
|
||||
// Check for workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 10
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBeGreaterThan(0)
|
||||
|
||||
// Check if workflow is an object or ID
|
||||
const workflowRef = runs.docs[0].workflow
|
||||
const workflowId = typeof workflowRef === 'object' && workflowRef !== null
|
||||
? (workflowRef as any).id
|
||||
: workflowRef
|
||||
|
||||
expect(workflowId).toBe(workflow.id) // Should reference the workflow ID
|
||||
|
||||
console.log('✅ Workflow run created successfully!')
|
||||
console.log(`Run status: ${runs.docs[0].status}`)
|
||||
console.log(`Run ID: ${runs.docs[0].id}`)
|
||||
|
||||
if (runs.docs[0].status === 'failed' && runs.docs[0].error) {
|
||||
console.log(`Error: ${runs.docs[0].error}`)
|
||||
}
|
||||
}, 30000)
|
||||
})
|
||||
@@ -1,201 +0,0 @@
|
||||
import nock from 'nock'
|
||||
|
||||
/**
|
||||
* Mock HTTP requests to httpbin.org for testing
|
||||
*/
|
||||
export const mockHttpBin = {
|
||||
/**
|
||||
* Mock a successful POST request to httpbin.org/post
|
||||
*/
|
||||
mockPost: (expectedData?: any) => {
|
||||
return nock('https://httpbin.org')
|
||||
.post('/post')
|
||||
.reply(200, {
|
||||
args: {},
|
||||
data: JSON.stringify(expectedData || {}),
|
||||
files: {},
|
||||
form: {},
|
||||
headers: {
|
||||
'Accept': '*/*',
|
||||
'Accept-Encoding': 'br, gzip, deflate',
|
||||
'Accept-Language': '*',
|
||||
'Content-Type': 'application/json',
|
||||
'Host': 'httpbin.org',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'User-Agent': 'PayloadCMS-Automation/1.0'
|
||||
},
|
||||
json: expectedData || {},
|
||||
origin: '127.0.0.1',
|
||||
url: 'https://httpbin.org/post'
|
||||
}, {
|
||||
'Content-Type': 'application/json',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Credentials': 'true'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock a GET request to httpbin.org/get
|
||||
*/
|
||||
mockGet: () => {
|
||||
return nock('https://httpbin.org')
|
||||
.get('/get')
|
||||
.reply(200, {
|
||||
args: {},
|
||||
headers: {
|
||||
'Accept': '*/*',
|
||||
'Host': 'httpbin.org',
|
||||
'User-Agent': 'PayloadCMS-Automation/1.0'
|
||||
},
|
||||
origin: '127.0.0.1',
|
||||
url: 'https://httpbin.org/get'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock HTTP timeout
|
||||
*/
|
||||
mockTimeout: (path: string = '/delay/10') => {
|
||||
return nock('https://httpbin.org')
|
||||
.get(path)
|
||||
.replyWithError({
|
||||
code: 'ECONNABORTED',
|
||||
message: 'timeout of 2000ms exceeded'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock HTTP error responses
|
||||
*/
|
||||
mockError: (status: number, path: string = '/status/' + status) => {
|
||||
return nock('https://httpbin.org')
|
||||
.get(path)
|
||||
.reply(status, {
|
||||
error: `HTTP ${status} Error`,
|
||||
message: `Mock ${status} response`
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock invalid URL to simulate network errors
|
||||
*/
|
||||
mockNetworkError: (url: string = 'invalid-url-that-will-fail') => {
|
||||
return nock('https://' + url)
|
||||
.get('/')
|
||||
.replyWithError({
|
||||
code: 'ENOTFOUND',
|
||||
message: `getaddrinfo ENOTFOUND ${url}`
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock HTML response (non-JSON)
|
||||
*/
|
||||
mockHtml: () => {
|
||||
return nock('https://httpbin.org')
|
||||
.get('/html')
|
||||
.reply(200, '<!DOCTYPE html><html><head><title>Test</title></head><body>Test HTML</body></html>', {
|
||||
'Content-Type': 'text/html'
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Mock all common endpoints for error scenarios
|
||||
*/
|
||||
mockAllErrorScenarios: () => {
|
||||
// HTML response for invalid JSON test
|
||||
nock('https://httpbin.org')
|
||||
.get('/html')
|
||||
.reply(200, '<!DOCTYPE html><html><head><title>Test</title></head><body>Test HTML</body></html>', {
|
||||
'Content-Type': 'text/html'
|
||||
})
|
||||
|
||||
// 404 error
|
||||
nock('https://httpbin.org')
|
||||
.get('/status/404')
|
||||
.reply(404, {
|
||||
error: 'Not Found',
|
||||
message: 'The requested resource was not found'
|
||||
})
|
||||
|
||||
// 500 error
|
||||
nock('https://httpbin.org')
|
||||
.get('/status/500')
|
||||
.reply(500, {
|
||||
error: 'Internal Server Error',
|
||||
message: 'Server encountered an error'
|
||||
})
|
||||
|
||||
// 503 error for retry tests
|
||||
nock('https://httpbin.org')
|
||||
.get('/status/503')
|
||||
.times(3) // Allow 3 retries
|
||||
.reply(503, {
|
||||
error: 'Service Unavailable',
|
||||
message: 'Service is temporarily unavailable'
|
||||
})
|
||||
|
||||
// POST endpoint for circular reference and other POST tests
|
||||
nock('https://httpbin.org')
|
||||
.post('/post')
|
||||
.times(5) // Allow multiple POST requests
|
||||
.reply(200, (uri, requestBody) => ({
|
||||
args: {},
|
||||
data: JSON.stringify(requestBody),
|
||||
json: requestBody,
|
||||
url: 'https://httpbin.org/post'
|
||||
}))
|
||||
},
|
||||
|
||||
/**
|
||||
* Clean up all nock mocks
|
||||
*/
|
||||
cleanup: () => {
|
||||
nock.cleanAll()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test fixtures for common workflow configurations
|
||||
*/
|
||||
export const testFixtures = {
|
||||
basicWorkflow: {
|
||||
name: 'Test Basic Workflow',
|
||||
description: 'Basic workflow for testing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger' as const,
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create' as const
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
httpRequestStep: (url: string = 'https://httpbin.org/post', expectedData?: any) => ({
|
||||
name: 'http-request',
|
||||
step: 'http-request-step',
|
||||
url,
|
||||
method: 'POST' as const,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: expectedData || {
|
||||
message: 'Test request',
|
||||
data: '$.trigger.doc'
|
||||
}
|
||||
}),
|
||||
|
||||
createDocumentStep: (collectionSlug: string = 'auditLog') => ({
|
||||
name: 'create-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug,
|
||||
data: {
|
||||
message: 'Test document created',
|
||||
sourceId: '$.trigger.doc.id'
|
||||
}
|
||||
}),
|
||||
|
||||
testPost: {
|
||||
content: 'Test post content for workflow trigger'
|
||||
}
|
||||
}
|
||||
@@ -1,125 +0,0 @@
|
||||
import { MongoMemoryReplSet } from 'mongodb-memory-server'
|
||||
import { getPayload } from 'payload'
|
||||
import type { Payload } from 'payload'
|
||||
import nock from 'nock'
|
||||
import config from './payload.config.js'
|
||||
|
||||
// Configure nock to intercept fetch requests properly in Node.js 22
|
||||
nock.disableNetConnect()
|
||||
nock.enableNetConnect('127.0.0.1')
|
||||
|
||||
// Set global fetch to use undici for proper nock interception
|
||||
import { fetch } from 'undici'
|
||||
global.fetch = fetch
|
||||
|
||||
let mongod: MongoMemoryReplSet | null = null
|
||||
let payload: Payload | null = null
|
||||
|
||||
// Global test setup - runs once for all tests
|
||||
beforeAll(async () => {
|
||||
// Start MongoDB in-memory replica set
|
||||
mongod = await MongoMemoryReplSet.create({
|
||||
replSet: {
|
||||
count: 1,
|
||||
dbName: 'payload-test',
|
||||
},
|
||||
})
|
||||
|
||||
const mongoUri = mongod.getUri()
|
||||
process.env.DATABASE_URI = mongoUri
|
||||
|
||||
console.log('🚀 MongoDB in-memory server started:', mongoUri)
|
||||
|
||||
// Initialize Payload with test config
|
||||
payload = await getPayload({
|
||||
config: await config,
|
||||
local: true
|
||||
})
|
||||
|
||||
console.log('✅ Payload initialized for testing')
|
||||
}, 60000)
|
||||
|
||||
// Global test teardown - runs once after all tests
|
||||
afterAll(async () => {
|
||||
if (payload) {
|
||||
console.log('🛑 Shutting down Payload...')
|
||||
// Payload doesn't have a shutdown method, but we can clear the cache
|
||||
delete (global as any).payload
|
||||
payload = null
|
||||
}
|
||||
|
||||
if (mongod) {
|
||||
console.log('🛑 Stopping MongoDB in-memory server...')
|
||||
await mongod.stop()
|
||||
mongod = null
|
||||
}
|
||||
}, 30000)
|
||||
|
||||
// Export payload instance for tests
|
||||
export const getTestPayload = () => {
|
||||
if (!payload) {
|
||||
throw new Error('Payload not initialized. Make sure test setup has run.')
|
||||
}
|
||||
return payload
|
||||
}
|
||||
|
||||
// Helper to clean all collections
|
||||
export const cleanDatabase = async () => {
|
||||
if (!payload) return
|
||||
|
||||
try {
|
||||
// Clean up workflow runs first (child records)
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const run of runs.docs) {
|
||||
await payload.delete({
|
||||
collection: 'workflow-runs',
|
||||
id: run.id
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up workflows
|
||||
const workflows = await payload.find({
|
||||
collection: 'workflows',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
await payload.delete({
|
||||
collection: 'workflows',
|
||||
id: workflow.id
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up audit logs
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const log of auditLogs.docs) {
|
||||
await payload.delete({
|
||||
collection: 'auditLog',
|
||||
id: log.id
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up posts
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
limit: 1000
|
||||
})
|
||||
|
||||
for (const post of posts.docs) {
|
||||
await payload.delete({
|
||||
collection: 'posts',
|
||||
id: post.id
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Database cleanup failed:', error)
|
||||
}
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
import type { Payload } from 'payload'
|
||||
import { getPayload } from 'payload'
|
||||
import config from './payload.config'
|
||||
|
||||
async function testWorkflowTrigger() {
|
||||
console.log('Starting workflow trigger test...')
|
||||
|
||||
// Get payload instance
|
||||
const payload = await getPayload({ config })
|
||||
|
||||
try {
|
||||
// Create a test user
|
||||
const user = await payload.create({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: 'test@example.com',
|
||||
password: 'password123'
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created test user:', user.id)
|
||||
|
||||
// Create a workflow with collection trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Post Creation Workflow',
|
||||
description: 'Triggers when a post is created',
|
||||
triggers: [
|
||||
{
|
||||
type: 'collection-trigger',
|
||||
collectionSlug: 'posts',
|
||||
operation: 'create'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'log-post',
|
||||
step: 'http-request-step',
|
||||
input: {
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: {
|
||||
message: 'Post created',
|
||||
postId: '$.trigger.doc.id',
|
||||
postTitle: '$.trigger.doc.title'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
user: user.id
|
||||
})
|
||||
|
||||
console.log('Created workflow:', workflow.id)
|
||||
|
||||
// Create a post to trigger the workflow
|
||||
console.log('Creating post to trigger workflow...')
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
title: 'Test Post',
|
||||
content: 'This should trigger the workflow',
|
||||
_status: 'published'
|
||||
},
|
||||
user: user.id
|
||||
})
|
||||
|
||||
console.log('Created post:', post.id)
|
||||
|
||||
// Wait a bit for workflow to execute
|
||||
await new Promise(resolve => setTimeout(resolve, 2000))
|
||||
|
||||
// Check for workflow runs
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Workflow runs found:', runs.totalDocs)
|
||||
|
||||
if (runs.totalDocs > 0) {
|
||||
console.log('✅ SUCCESS: Workflow was triggered!')
|
||||
console.log('Run status:', runs.docs[0].status)
|
||||
console.log('Run context:', JSON.stringify(runs.docs[0].context, null, 2))
|
||||
} else {
|
||||
console.log('❌ FAILURE: Workflow was not triggered')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Test failed:', error)
|
||||
} finally {
|
||||
await payload.shutdown()
|
||||
}
|
||||
}
|
||||
|
||||
testWorkflowTrigger().catch(console.error)
|
||||
@@ -1,483 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
|
||||
describe('Webhook Trigger Testing', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
it('should trigger workflow via webhook endpoint simulation', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with webhook trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Basic Trigger',
|
||||
description: 'Tests basic webhook triggering',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-basic'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'create-webhook-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Webhook triggered successfully',
|
||||
user: '$.trigger.data.userId'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
|
||||
// Directly execute the workflow with webhook-like data
|
||||
const executor = (globalThis as any).__workflowExecutor
|
||||
if (!executor) {
|
||||
console.warn('⚠️ Workflow executor not available, skipping webhook execution')
|
||||
return
|
||||
}
|
||||
|
||||
// Simulate webhook trigger by directly executing the workflow
|
||||
const webhookData = {
|
||||
userId: 'webhook-test-user',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
|
||||
const mockReq = {
|
||||
payload,
|
||||
user: null,
|
||||
headers: {}
|
||||
}
|
||||
|
||||
await executor.execute({
|
||||
workflow,
|
||||
trigger: {
|
||||
type: 'webhook',
|
||||
path: 'test-basic',
|
||||
data: webhookData,
|
||||
headers: {}
|
||||
},
|
||||
req: mockReq as any,
|
||||
payload
|
||||
})
|
||||
|
||||
console.log('✅ Workflow executed directly')
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 2000))
|
||||
|
||||
// Verify workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).not.toBe('failed')
|
||||
|
||||
// Verify audit log was created
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'Webhook triggered'
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
console.log('✅ Webhook audit log created')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook with complex data', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Complex Data',
|
||||
description: 'Tests webhook with complex JSON data',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-complex'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'echo-webhook-data',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
originalData: '$.trigger.data',
|
||||
headers: '$.trigger.headers',
|
||||
path: '$.trigger.path'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const complexData = {
|
||||
user: {
|
||||
id: 123,
|
||||
name: 'Test User',
|
||||
permissions: ['read', 'write']
|
||||
},
|
||||
event: {
|
||||
type: 'user_action',
|
||||
timestamp: new Date().toISOString(),
|
||||
metadata: {
|
||||
source: 'webhook-test',
|
||||
version: '1.0.0'
|
||||
}
|
||||
},
|
||||
nested: {
|
||||
deeply: {
|
||||
nested: {
|
||||
value: 'deep-test-value'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response = await makeWebhookRequest('test-complex', complexData)
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed')
|
||||
|
||||
// Verify the complex data was properly passed through
|
||||
const stepOutput = runs.docs[0].context.steps['echo-webhook-data'].output
|
||||
expect(stepOutput.status).toBe(200)
|
||||
|
||||
const responseBody = JSON.parse(stepOutput.body)
|
||||
expect(responseBody.json.originalData.user.name).toBe('Test User')
|
||||
expect(responseBody.json.originalData.nested.deeply.nested.value).toBe('deep-test-value')
|
||||
|
||||
console.log('✅ Complex webhook data processed correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook conditions', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Conditional',
|
||||
description: 'Tests conditional webhook execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-conditional',
|
||||
condition: '$.data.action == "important"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'conditional-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Webhook condition met - important action'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// First request - should NOT trigger (condition not met)
|
||||
const response1 = await makeWebhookRequest('test-conditional', {
|
||||
action: 'normal',
|
||||
data: 'test'
|
||||
})
|
||||
expect(response1.status).toBe(200)
|
||||
|
||||
// Second request - SHOULD trigger (condition met)
|
||||
const response2 = await makeWebhookRequest('test-conditional', {
|
||||
action: 'important',
|
||||
priority: 'high'
|
||||
})
|
||||
expect(response2.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Should have exactly 1 run (only for the matching condition)
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).not.toBe('failed')
|
||||
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'condition met'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
console.log('✅ Webhook conditional execution working')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook authentication headers', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Headers',
|
||||
description: 'Tests webhook header processing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-headers'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'process-headers',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
receivedHeaders: '$.trigger.headers',
|
||||
authorization: '$.trigger.headers.authorization',
|
||||
userAgent: '$.trigger.headers.user-agent'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Make webhook request with custom headers
|
||||
const webhookUrl = `${baseUrl}/api/workflows/webhook/test-headers`
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer test-token-123',
|
||||
'User-Agent': 'Webhook-Test-Client/1.0',
|
||||
'X-Custom-Header': 'custom-value'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
test: 'header processing'
|
||||
})
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed')
|
||||
|
||||
// Verify headers were captured and processed
|
||||
const stepOutput = runs.docs[0].context.steps['process-headers'].output
|
||||
const responseBody = JSON.parse(stepOutput.body)
|
||||
|
||||
expect(responseBody.json.authorization).toBe('Bearer test-token-123')
|
||||
expect(responseBody.json.userAgent).toBe('Webhook-Test-Client/1.0')
|
||||
|
||||
console.log('✅ Webhook headers processed correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle multiple concurrent webhook requests', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Concurrent',
|
||||
description: 'Tests concurrent webhook processing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-concurrent'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'concurrent-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Concurrent webhook execution',
|
||||
requestId: '$.trigger.data.requestId'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Make multiple concurrent webhook requests
|
||||
const concurrentRequests = Array.from({ length: 5 }, (_, i) =>
|
||||
makeWebhookRequest('test-concurrent', {
|
||||
requestId: `concurrent-${i + 1}`,
|
||||
timestamp: new Date().toISOString()
|
||||
})
|
||||
)
|
||||
|
||||
const responses = await Promise.all(concurrentRequests)
|
||||
responses.forEach(response => {
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
|
||||
// Wait for all workflow executions
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(5)
|
||||
|
||||
// Verify all runs completed successfully
|
||||
const failedRuns = runs.docs.filter(run => run.status === 'failed')
|
||||
expect(failedRuns).toHaveLength(0)
|
||||
|
||||
// Verify all audit logs were created
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'Concurrent webhook'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(5)
|
||||
console.log('✅ Concurrent webhook requests processed successfully')
|
||||
}, 35000)
|
||||
|
||||
it('should handle non-existent webhook paths gracefully', async () => {
|
||||
// Test that workflows with non-matching webhook paths don't get triggered
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Non-existent Path',
|
||||
description: 'Should not be triggered by different path',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'specific-path'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'create-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'This should not be created'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Simulate trying to trigger with wrong path - should not execute workflow
|
||||
const initialRuns = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(initialRuns.totalDocs).toBe(0)
|
||||
console.log('✅ Non-existent webhook path handled: no workflow runs created')
|
||||
}, 10000)
|
||||
|
||||
it('should handle malformed webhook JSON', async () => {
|
||||
const webhookUrl = `${baseUrl}/api/workflows/webhook/test-malformed`
|
||||
|
||||
// First create a workflow to receive the malformed request
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Malformed JSON',
|
||||
description: 'Tests malformed JSON handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-malformed'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'malformed-test',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Processed malformed request'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Send malformed JSON
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: '{"malformed": json, "missing": quotes}'
|
||||
})
|
||||
|
||||
// Should handle malformed JSON gracefully
|
||||
expect([400, 422]).toContain(response.status)
|
||||
console.log('✅ Malformed JSON handled:', response.status)
|
||||
}, 15000)
|
||||
})
|
||||
@@ -28,6 +28,12 @@ export default [
|
||||
rules: {
|
||||
'no-restricted-exports': 'off',
|
||||
'no-console': 'off',
|
||||
'perfectionist/sort-object-types': 'off',
|
||||
'perfectionist/sort-objects': 'off',
|
||||
'perfectionist/sort-exports': 'off',
|
||||
'perfectionist/sort-imports': 'off',
|
||||
'perfectionist/sort-switch-case': 'off',
|
||||
'perfectionist/sort-interfaces': 'off'
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,218 +0,0 @@
|
||||
# Trigger Builder Examples
|
||||
|
||||
The new trigger builder API dramatically reduces boilerplate when creating custom triggers.
|
||||
|
||||
## Before vs After
|
||||
|
||||
### Before (Manual Approach)
|
||||
```typescript
|
||||
const customTrigger = {
|
||||
slug: 'order-webhook',
|
||||
inputs: [
|
||||
{
|
||||
name: 'webhookSecret',
|
||||
type: 'text',
|
||||
required: true,
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Secret for webhook validation'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.webhookSecret],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.webhookSecret = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'orderStatuses',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.orderStatuses || ['completed']],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.orderStatuses = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
}
|
||||
// ... imagine more fields with similar boilerplate
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### After (Builder Approach)
|
||||
```typescript
|
||||
import { createTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const orderWebhook = createTrigger('order-webhook').parameters({
|
||||
webhookSecret: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Secret for webhook validation'
|
||||
}
|
||||
},
|
||||
orderStatuses: {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
admin: {
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## Built-in Trigger Presets
|
||||
|
||||
### Webhook Trigger
|
||||
```typescript
|
||||
import { webhookTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('currency', {
|
||||
type: 'select',
|
||||
options: ['USD', 'EUR', 'GBP'],
|
||||
defaultValue: 'USD'
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
### Scheduled/Cron Trigger
|
||||
```typescript
|
||||
import { cronTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const dailyReport = cronTrigger('daily-report')
|
||||
.parameter('reportFormat', {
|
||||
type: 'select',
|
||||
options: ['pdf', 'csv', 'json'],
|
||||
defaultValue: 'pdf'
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
### Manual Trigger (No Parameters)
|
||||
```typescript
|
||||
import { manualTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const backupTrigger = manualTrigger('manual-backup')
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Extending Common Parameters
|
||||
```typescript
|
||||
import { createAdvancedTrigger, webhookParameters } from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
const advancedWebhook = createAdvancedTrigger('advanced-webhook')
|
||||
.extend(webhookParameters) // Includes path, secret, headers
|
||||
.parameter('retryAttempts', {
|
||||
type: 'number',
|
||||
min: 0,
|
||||
max: 5,
|
||||
defaultValue: 3
|
||||
})
|
||||
.parameter('timeout', {
|
||||
type: 'number',
|
||||
min: 1000,
|
||||
max: 30000,
|
||||
defaultValue: 5000,
|
||||
admin: {
|
||||
description: 'Timeout in milliseconds'
|
||||
}
|
||||
})
|
||||
.build()
|
||||
```
|
||||
|
||||
### Custom Validation
|
||||
```typescript
|
||||
const validatedTrigger = createTrigger('validated-trigger').parameters({
|
||||
email: {
|
||||
type: 'email',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
if (value?.endsWith('@spam.com')) {
|
||||
return 'Spam domains not allowed'
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
webhookUrl: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
try {
|
||||
const url = new URL(value)
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
return 'Only HTTP/HTTPS URLs allowed'
|
||||
}
|
||||
} catch {
|
||||
return 'Please enter a valid URL'
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## Usage in Plugin Configuration
|
||||
|
||||
```typescript
|
||||
import { workflowsPlugin } from '@xtr-dev/payload-automation'
|
||||
import {
|
||||
createTrigger,
|
||||
webhookTrigger,
|
||||
cronTrigger
|
||||
} from '@xtr-dev/payload-automation/helpers'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
workflowsPlugin({
|
||||
triggers: [
|
||||
// Mix different trigger types
|
||||
createTrigger('user-signup').parameters({
|
||||
source: {
|
||||
type: 'select',
|
||||
options: ['web', 'mobile', 'api'],
|
||||
required: true
|
||||
}
|
||||
}),
|
||||
|
||||
webhookTrigger('payment-received')
|
||||
.parameter('minimumAmount', { type: 'number', min: 0 })
|
||||
.build(),
|
||||
|
||||
cronTrigger('weekly-cleanup')
|
||||
.parameter('deleteOlderThan', {
|
||||
type: 'number',
|
||||
defaultValue: 30,
|
||||
admin: { description: 'Delete records older than N days' }
|
||||
})
|
||||
.build()
|
||||
]
|
||||
})
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
## Benefits
|
||||
|
||||
- **90% less boilerplate** - No manual hooks, conditions, or virtual field setup
|
||||
- **Type safety** - Full TypeScript support
|
||||
- **Reusable patterns** - Common trigger types as presets
|
||||
- **Composable** - Mix builders with manual fields
|
||||
- **Backward compatible** - Existing triggers continue to work
|
||||
- **Validation built-in** - Parameter validation handled automatically
|
||||
@@ -1,68 +0,0 @@
|
||||
# PayloadCMS Workflows Plugin Examples
|
||||
|
||||
This directory contains example code demonstrating how to use the PayloadCMS Workflows plugin.
|
||||
|
||||
## Manual Trigger Example
|
||||
|
||||
The `manual-trigger-example.ts` file shows how to:
|
||||
- Create a workflow with a manual trigger button in the admin UI
|
||||
- Trigger workflows programmatically using custom triggers
|
||||
- Access trigger data in workflow steps using JSONPath
|
||||
|
||||
### Setting up a Manual Trigger Workflow
|
||||
|
||||
1. Configure the plugin with a custom trigger:
|
||||
```typescript
|
||||
workflowsPlugin({
|
||||
triggers: [
|
||||
{
|
||||
slug: 'manual-trigger',
|
||||
inputs: [] // No inputs needed for simple manual triggers
|
||||
}
|
||||
],
|
||||
// ... other config
|
||||
})
|
||||
```
|
||||
|
||||
2. Create a workflow with the manual trigger:
|
||||
```typescript
|
||||
await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'My Manual Workflow',
|
||||
triggers: [
|
||||
{
|
||||
type: 'manual-trigger'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
// Your workflow steps here
|
||||
]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
3. The workflow will now have a "Trigger Workflow" button in the admin UI
|
||||
|
||||
### Triggering Workflows Programmatically
|
||||
|
||||
```typescript
|
||||
import { triggerCustomWorkflow } from '@xtr-dev/payload-automation'
|
||||
|
||||
// Trigger all workflows with 'manual-trigger'
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-trigger',
|
||||
data: {
|
||||
// Custom data to pass to the workflow
|
||||
source: 'api',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### Accessing Trigger Data in Steps
|
||||
|
||||
Use JSONPath expressions to access trigger data in your workflow steps:
|
||||
- `$.trigger.data.source` - Access custom data fields
|
||||
- `$.trigger.type` - The trigger type
|
||||
- `$.trigger.triggeredAt` - When the trigger was activated
|
||||
@@ -1,274 +0,0 @@
|
||||
import { buildConfig } from 'payload'
|
||||
import { workflowsPlugin, triggerCustomWorkflow } from '@xtr-dev/payload-automation'
|
||||
import type { Field } from 'payload'
|
||||
|
||||
// Example: Data import trigger with custom fields
|
||||
const dataImportFields: Field[] = [
|
||||
{
|
||||
name: 'sourceUrl',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'URL of the data source to import from'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'format',
|
||||
type: 'select',
|
||||
options: ['json', 'csv', 'xml'],
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Format of the data to import'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'mapping',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Field mapping configuration'
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
// Example: Manual review trigger with approval fields
|
||||
const manualReviewFields: Field[] = [
|
||||
{
|
||||
name: 'reviewerId',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'ID of the reviewer'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'reviewNotes',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Notes from the review'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'approved',
|
||||
type: 'checkbox',
|
||||
defaultValue: false,
|
||||
admin: {
|
||||
description: 'Whether the item was approved'
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
export default buildConfig({
|
||||
// ... other config
|
||||
|
||||
plugins: [
|
||||
workflowsPlugin({
|
||||
collectionTriggers: {
|
||||
posts: true, // Enable all CRUD triggers for posts
|
||||
products: { // Selective triggers for products
|
||||
create: true,
|
||||
update: true
|
||||
}
|
||||
},
|
||||
|
||||
// Define custom triggers that will appear in the workflow UI
|
||||
triggers: [
|
||||
{
|
||||
slug: 'data-import',
|
||||
inputs: dataImportFields
|
||||
},
|
||||
{
|
||||
slug: 'manual-review',
|
||||
inputs: manualReviewFields
|
||||
},
|
||||
{
|
||||
slug: 'scheduled-report',
|
||||
inputs: [
|
||||
{
|
||||
name: 'reportType',
|
||||
type: 'select',
|
||||
options: ['daily', 'weekly', 'monthly'],
|
||||
required: true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
steps: [
|
||||
// ... your workflow steps
|
||||
]
|
||||
})
|
||||
],
|
||||
|
||||
onInit: async (payload) => {
|
||||
// Example 1: Trigger workflow from external data source
|
||||
// This could be called from a webhook, scheduled job, or any other event
|
||||
const handleDataImport = async (sourceUrl: string, format: string) => {
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'data-import',
|
||||
data: {
|
||||
sourceUrl,
|
||||
format,
|
||||
mapping: {
|
||||
title: 'name',
|
||||
description: 'summary'
|
||||
},
|
||||
importedAt: new Date().toISOString()
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Data import workflows triggered:', results)
|
||||
}
|
||||
|
||||
// Example 2: Trigger workflow after custom business logic
|
||||
const handleDocumentReview = async (documentId: string, reviewerId: string, approved: boolean) => {
|
||||
// Perform your custom review logic here
|
||||
const reviewData = {
|
||||
documentId,
|
||||
reviewerId,
|
||||
reviewNotes: approved ? 'Document meets all requirements' : 'Needs revision',
|
||||
approved,
|
||||
reviewedAt: new Date().toISOString()
|
||||
}
|
||||
|
||||
// Trigger workflows that listen for manual review
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-review',
|
||||
data: reviewData,
|
||||
user: {
|
||||
id: reviewerId,
|
||||
email: 'reviewer@example.com'
|
||||
}
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Example 3: Integrate with external services
|
||||
// You could set up listeners for external events
|
||||
if (process.env.ENABLE_EXTERNAL_SYNC) {
|
||||
// Listen to external service events (example with a hypothetical event emitter)
|
||||
// externalService.on('data-ready', async (event) => {
|
||||
// await triggerCustomWorkflow(payload, {
|
||||
// slug: 'data-import',
|
||||
// data: event.data
|
||||
// })
|
||||
// })
|
||||
}
|
||||
|
||||
// Example 4: Create scheduled reports using node-cron or similar
|
||||
// This shows how you might trigger a custom workflow on a schedule
|
||||
// without using the built-in cron trigger
|
||||
const scheduleReports = async () => {
|
||||
// This could be called by a cron job or scheduled task
|
||||
await triggerCustomWorkflow(payload, {
|
||||
slug: 'scheduled-report',
|
||||
data: {
|
||||
reportType: 'daily',
|
||||
generatedAt: new Date().toISOString(),
|
||||
metrics: {
|
||||
totalUsers: 1000,
|
||||
activeUsers: 750,
|
||||
newSignups: 25
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Example 5: Hook into collection operations for complex logic
|
||||
const postsCollection = payload.collections.posts
|
||||
if (postsCollection) {
|
||||
postsCollection.config.hooks = postsCollection.config.hooks || {}
|
||||
postsCollection.config.hooks.afterChange = postsCollection.config.hooks.afterChange || []
|
||||
|
||||
postsCollection.config.hooks.afterChange.push(async ({ doc, operation, req }) => {
|
||||
// Custom logic to determine if we should trigger a workflow
|
||||
if (operation === 'create' && doc.status === 'published') {
|
||||
// Trigger a custom workflow for newly published posts
|
||||
await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-review',
|
||||
data: {
|
||||
documentId: doc.id,
|
||||
documentType: 'post',
|
||||
reviewerId: 'auto-review',
|
||||
reviewNotes: 'Automatically queued for review',
|
||||
approved: false
|
||||
},
|
||||
req // Pass the request context
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Make functions available globally for testing/debugging
|
||||
;(global as any).handleDataImport = handleDataImport
|
||||
;(global as any).handleDocumentReview = handleDocumentReview
|
||||
;(global as any).scheduleReports = scheduleReports
|
||||
}
|
||||
})
|
||||
|
||||
// Example workflow configuration that would use these custom triggers:
|
||||
/*
|
||||
{
|
||||
name: "Process Data Import",
|
||||
triggers: [{
|
||||
type: "data-import",
|
||||
sourceUrl: "https://api.example.com/data",
|
||||
format: "json",
|
||||
mapping: { ... }
|
||||
}],
|
||||
steps: [
|
||||
{
|
||||
step: "http-request",
|
||||
name: "fetch-data",
|
||||
input: {
|
||||
url: "$.trigger.data.sourceUrl",
|
||||
method: "GET"
|
||||
}
|
||||
},
|
||||
{
|
||||
step: "create-document",
|
||||
name: "import-records",
|
||||
input: {
|
||||
collection: "imported-data",
|
||||
data: "$.steps.fetch-data.output.body"
|
||||
},
|
||||
dependencies: ["fetch-data"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
{
|
||||
name: "Review Approval Workflow",
|
||||
triggers: [{
|
||||
type: "manual-review",
|
||||
reviewerId: "",
|
||||
reviewNotes: "",
|
||||
approved: false
|
||||
}],
|
||||
steps: [
|
||||
{
|
||||
step: "update-document",
|
||||
name: "update-status",
|
||||
input: {
|
||||
collection: "documents",
|
||||
id: "$.trigger.data.documentId",
|
||||
data: {
|
||||
status: "$.trigger.data.approved ? 'approved' : 'rejected'",
|
||||
reviewedBy: "$.trigger.data.reviewerId",
|
||||
reviewedAt: "$.trigger.data.reviewedAt"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
step: "send-email",
|
||||
name: "notify-author",
|
||||
input: {
|
||||
to: "author@example.com",
|
||||
subject: "Document Review Complete",
|
||||
text: "Your document has been $.trigger.data.approved ? 'approved' : 'rejected'"
|
||||
},
|
||||
dependencies: ["update-status"]
|
||||
}
|
||||
]
|
||||
}
|
||||
*/
|
||||
@@ -1,122 +0,0 @@
|
||||
/**
|
||||
* Example: Manual Trigger Workflow
|
||||
*
|
||||
* This example shows how to create a workflow that can be triggered
|
||||
* manually from the PayloadCMS admin interface using a custom button.
|
||||
*/
|
||||
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
/**
|
||||
* Create a workflow with manual trigger
|
||||
*/
|
||||
export async function createManualTriggerWorkflow(payload: Payload) {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Manual Data Processing',
|
||||
description: 'A workflow that can be triggered manually from the admin UI',
|
||||
triggers: [
|
||||
{
|
||||
type: 'manual-trigger' // This enables the trigger button in the admin
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'fetch-data',
|
||||
type: 'http-request-step',
|
||||
input: {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'process-data',
|
||||
type: 'create-document',
|
||||
input: {
|
||||
collection: 'auditLog',
|
||||
data: {
|
||||
message: 'Manual workflow executed',
|
||||
triggeredAt: '$.trigger.data.timestamp'
|
||||
}
|
||||
},
|
||||
dependencies: ['fetch-data'] // This step depends on fetch-data
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created workflow:', workflow.id)
|
||||
return workflow
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger a workflow programmatically using the custom trigger
|
||||
*/
|
||||
export async function triggerWorkflowProgrammatically(payload: Payload) {
|
||||
// Import the trigger functions from the plugin
|
||||
const { triggerCustomWorkflow, triggerWorkflowById } = await import('@xtr-dev/payload-automation')
|
||||
|
||||
// Option 1: Trigger all workflows with a specific trigger slug
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-trigger',
|
||||
data: {
|
||||
source: 'api',
|
||||
timestamp: new Date().toISOString(),
|
||||
user: 'system'
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Triggered workflows:', results)
|
||||
|
||||
// Option 2: Trigger a specific workflow by ID
|
||||
const workflowId = 'your-workflow-id'
|
||||
const result = await triggerWorkflowById(
|
||||
payload,
|
||||
workflowId,
|
||||
'manual-trigger',
|
||||
{
|
||||
source: 'api',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
)
|
||||
|
||||
console.log('Triggered workflow:', result)
|
||||
}
|
||||
|
||||
/**
|
||||
* Example usage in your application
|
||||
*/
|
||||
export async function setupManualTriggerExample(payload: Payload) {
|
||||
// Create the workflow
|
||||
const workflow = await createManualTriggerWorkflow(payload)
|
||||
|
||||
// The workflow is now available in the admin UI with a trigger button
|
||||
console.log('Workflow created! You can now:')
|
||||
console.log('1. Go to the admin UI and navigate to the Workflows collection')
|
||||
console.log('2. Open the workflow:', workflow.name)
|
||||
console.log('3. Click the "Trigger Workflow" button to execute it manually')
|
||||
|
||||
// You can also trigger it programmatically
|
||||
await triggerWorkflowProgrammatically(payload)
|
||||
}
|
||||
|
||||
/**
|
||||
* Notes:
|
||||
*
|
||||
* 1. The manual trigger button appears automatically in the workflow admin UI
|
||||
* when a workflow has a trigger with type 'manual-trigger'
|
||||
*
|
||||
* 2. You can have multiple triggers on the same workflow, including manual triggers
|
||||
*
|
||||
* 3. The trigger passes data to the workflow execution context, accessible via:
|
||||
* - $.trigger.data - The custom data passed when triggering
|
||||
* - $.trigger.type - The trigger type ('manual-trigger')
|
||||
* - $.trigger.triggeredAt - Timestamp of when the trigger was activated
|
||||
*
|
||||
* 4. Manual triggers are useful for:
|
||||
* - Administrative tasks
|
||||
* - Data migration workflows
|
||||
* - Testing and debugging
|
||||
* - On-demand processing
|
||||
*/
|
||||
@@ -1,300 +0,0 @@
|
||||
/**
|
||||
* Examples demonstrating the new trigger builder API
|
||||
* This shows the before/after comparison and various usage patterns
|
||||
*/
|
||||
|
||||
import {
|
||||
createTrigger,
|
||||
createAdvancedTrigger,
|
||||
webhookTrigger,
|
||||
cronTrigger,
|
||||
eventTrigger,
|
||||
manualTrigger,
|
||||
apiTrigger,
|
||||
webhookParameters,
|
||||
cronParameters
|
||||
} from '../src/exports/helpers.js'
|
||||
|
||||
/**
|
||||
* BEFORE: Manual trigger definition with lots of boilerplate
|
||||
*/
|
||||
const oldWayTrigger = {
|
||||
slug: 'order-webhook',
|
||||
inputs: [
|
||||
{
|
||||
name: 'webhookSecret',
|
||||
type: 'text',
|
||||
required: true,
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Secret for webhook validation'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.webhookSecret],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.webhookSecret = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'orderStatuses',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
virtual: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'order-webhook',
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => siblingData?.parameters?.orderStatuses || ['completed']],
|
||||
beforeChange: [({ value, siblingData }) => {
|
||||
if (!siblingData.parameters) siblingData.parameters = {}
|
||||
siblingData.parameters.orderStatuses = value
|
||||
return undefined
|
||||
}]
|
||||
}
|
||||
}
|
||||
// ... imagine more fields with similar boilerplate
|
||||
]
|
||||
} as const
|
||||
|
||||
/**
|
||||
* AFTER: Clean trigger definition using builders
|
||||
*/
|
||||
|
||||
// 1. Simple trigger with parameters
|
||||
const orderWebhook = createTrigger('order-webhook').parameters({
|
||||
webhookSecret: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Secret for webhook validation'
|
||||
}
|
||||
},
|
||||
orderStatuses: {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['pending', 'processing', 'completed'],
|
||||
defaultValue: ['completed'],
|
||||
admin: {
|
||||
description: 'Order statuses that trigger the workflow'
|
||||
}
|
||||
},
|
||||
minimumAmount: {
|
||||
type: 'number',
|
||||
min: 0,
|
||||
admin: {
|
||||
description: 'Minimum order amount to trigger workflow'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// 2. Using preset webhook builder
|
||||
const paymentWebhook = webhookTrigger('payment-webhook')
|
||||
.parameter('currency', {
|
||||
type: 'select',
|
||||
options: ['USD', 'EUR', 'GBP'],
|
||||
defaultValue: 'USD'
|
||||
})
|
||||
.parameter('paymentMethods', {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['credit_card', 'paypal', 'bank_transfer']
|
||||
})
|
||||
.build()
|
||||
|
||||
// 3. Scheduled trigger using cron builder
|
||||
const dailyReport = cronTrigger('daily-report')
|
||||
.parameter('reportFormat', {
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'PDF Report', value: 'pdf' },
|
||||
{ label: 'CSV Export', value: 'csv' },
|
||||
{ label: 'JSON Data', value: 'json' }
|
||||
],
|
||||
defaultValue: 'pdf'
|
||||
})
|
||||
.parameter('includeCharts', {
|
||||
type: 'checkbox',
|
||||
defaultValue: true,
|
||||
admin: {
|
||||
description: 'Include visual charts in the report'
|
||||
}
|
||||
})
|
||||
.build()
|
||||
|
||||
// 4. Event-driven trigger
|
||||
const userActivity = eventTrigger('user-activity')
|
||||
.parameter('actionTypes', {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['login', 'logout', 'profile_update', 'password_change'],
|
||||
admin: {
|
||||
description: 'User actions that should trigger this workflow'
|
||||
}
|
||||
})
|
||||
.parameter('userRoles', {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
admin: {
|
||||
description: 'Only trigger for users with these roles'
|
||||
}
|
||||
})
|
||||
.build()
|
||||
|
||||
// 5. Simple manual trigger (no parameters)
|
||||
const manualBackup = manualTrigger('manual-backup')
|
||||
|
||||
// 6. API trigger with authentication
|
||||
const externalApi = apiTrigger('external-api')
|
||||
.parameter('allowedOrigins', {
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Comma-separated list of allowed origins'
|
||||
},
|
||||
validate: (value) => {
|
||||
if (value && typeof value === 'string') {
|
||||
const origins = value.split(',').map(s => s.trim())
|
||||
const validOrigins = origins.every(origin => {
|
||||
try {
|
||||
new URL(origin)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
if (!validOrigins) {
|
||||
return 'All origins must be valid URLs'
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
})
|
||||
.build()
|
||||
|
||||
// 7. Complex trigger extending common parameters
|
||||
const advancedWebhook = createAdvancedTrigger('advanced-webhook')
|
||||
.extend(webhookParameters) // Start with webhook basics
|
||||
.parameter('retryConfig', {
|
||||
type: 'group',
|
||||
fields: [
|
||||
{
|
||||
name: 'maxRetries',
|
||||
type: 'number',
|
||||
min: 0,
|
||||
max: 10,
|
||||
defaultValue: 3
|
||||
},
|
||||
{
|
||||
name: 'retryDelay',
|
||||
type: 'number',
|
||||
min: 1000,
|
||||
max: 60000,
|
||||
defaultValue: 5000,
|
||||
admin: {
|
||||
description: 'Delay between retries in milliseconds'
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
.parameter('filters', {
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'field',
|
||||
type: 'text',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'operator',
|
||||
type: 'select',
|
||||
options: ['equals', 'not_equals', 'contains', 'greater_than'],
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: 'text',
|
||||
required: true
|
||||
}
|
||||
]
|
||||
})
|
||||
.build()
|
||||
|
||||
// 8. Custom parameter validation
|
||||
const validatedTrigger = createTrigger('validated-trigger').parameters({
|
||||
email: {
|
||||
type: 'email',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
if (value && typeof value === 'string') {
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
|
||||
if (!emailRegex.test(value)) {
|
||||
return 'Please enter a valid email address'
|
||||
}
|
||||
// Custom business logic validation
|
||||
if (value.endsWith('@example.com')) {
|
||||
return 'Example.com emails are not allowed'
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
webhookUrl: {
|
||||
type: 'text',
|
||||
required: true,
|
||||
validate: (value) => {
|
||||
if (value && typeof value === 'string') {
|
||||
try {
|
||||
const url = new URL(value)
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
return 'URL must use HTTP or HTTPS protocol'
|
||||
}
|
||||
if (url.hostname === 'localhost') {
|
||||
return 'Localhost URLs are not allowed in production'
|
||||
}
|
||||
} catch {
|
||||
return 'Please enter a valid URL'
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Export all triggers for use in plugin configuration
|
||||
*/
|
||||
export const exampleTriggers = [
|
||||
orderWebhook,
|
||||
paymentWebhook,
|
||||
dailyReport,
|
||||
userActivity,
|
||||
manualBackup,
|
||||
externalApi,
|
||||
advancedWebhook,
|
||||
validatedTrigger
|
||||
]
|
||||
|
||||
/**
|
||||
* Usage in payload.config.ts:
|
||||
*
|
||||
* ```typescript
|
||||
* import { workflowsPlugin } from '@xtr-dev/payload-automation'
|
||||
* import { exampleTriggers } from './examples/trigger-builders'
|
||||
*
|
||||
* export default buildConfig({
|
||||
* plugins: [
|
||||
* workflowsPlugin({
|
||||
* triggers: exampleTriggers,
|
||||
* // ... other config
|
||||
* })
|
||||
* ]
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@xtr-dev/payload-workflows",
|
||||
"version": "0.0.30",
|
||||
"version": "0.0.40",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@xtr-dev/payload-workflows",
|
||||
"version": "0.0.30",
|
||||
"version": "0.0.40",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jsonpath-plus": "^10.3.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xtr-dev/payload-automation",
|
||||
"version": "0.0.30",
|
||||
"version": "0.0.40",
|
||||
"description": "PayloadCMS Automation Plugin - Comprehensive workflow automation system with visual workflow building, execution tracking, and step types",
|
||||
"license": "MIT",
|
||||
"type": "module",
|
||||
@@ -75,6 +75,7 @@
|
||||
"@payloadcms/ui": "3.45.0",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@swc/cli": "0.6.0",
|
||||
"@types/handlebars": "^4.1.0",
|
||||
"@types/nock": "^11.1.0",
|
||||
"@types/node": "^22.5.4",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
@@ -135,7 +136,7 @@
|
||||
"registry": "https://registry.npmjs.org/",
|
||||
"packageManager": "pnpm@10.12.4+sha512.5ea8b0deed94ed68691c9bad4c955492705c5eeb8a87ef86bc62c74a26b037b08ff9570f108b2e4dbd1dd1a9186fea925e527f141c648e85af45631074680184",
|
||||
"dependencies": {
|
||||
"jsonpath-plus": "^10.3.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"node-cron": "^4.2.1",
|
||||
"pino": "^9.9.0"
|
||||
}
|
||||
|
||||
86
pnpm-lock.yaml
generated
86
pnpm-lock.yaml
generated
@@ -8,9 +8,9 @@ importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
jsonpath-plus:
|
||||
specifier: ^10.3.0
|
||||
version: 10.3.0
|
||||
handlebars:
|
||||
specifier: ^4.7.8
|
||||
version: 4.7.8
|
||||
node-cron:
|
||||
specifier: ^4.2.1
|
||||
version: 4.2.1
|
||||
@@ -45,6 +45,9 @@ importers:
|
||||
'@swc/cli':
|
||||
specifier: 0.6.0
|
||||
version: 0.6.0(@swc/core@1.13.4)
|
||||
'@types/handlebars':
|
||||
specifier: ^4.1.0
|
||||
version: 4.1.0
|
||||
'@types/nock':
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0
|
||||
@@ -962,18 +965,6 @@ packages:
|
||||
'@jsdevtools/ono@7.1.3':
|
||||
resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==}
|
||||
|
||||
'@jsep-plugin/assignment@1.3.0':
|
||||
resolution: {integrity: sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==}
|
||||
engines: {node: '>= 10.16.0'}
|
||||
peerDependencies:
|
||||
jsep: ^0.4.0||^1.0.0
|
||||
|
||||
'@jsep-plugin/regex@1.0.4':
|
||||
resolution: {integrity: sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg==}
|
||||
engines: {node: '>= 10.16.0'}
|
||||
peerDependencies:
|
||||
jsep: ^0.4.0||^1.0.0
|
||||
|
||||
'@lexical/clipboard@0.28.0':
|
||||
resolution: {integrity: sha512-LYqion+kAwFQJStA37JAEMxTL/m1WlZbotDfM/2WuONmlO0yWxiyRDI18oeCwhBD6LQQd9c3Ccxp9HFwUG1AVw==}
|
||||
|
||||
@@ -1600,6 +1591,10 @@ packages:
|
||||
'@types/estree@1.0.8':
|
||||
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
|
||||
|
||||
'@types/handlebars@4.1.0':
|
||||
resolution: {integrity: sha512-gq9YweFKNNB1uFK71eRqsd4niVkXrxHugqWFQkeLRJvGjnxsLr16bYtcsG4tOFwmYi0Bax+wCkbf1reUfdl4kA==}
|
||||
deprecated: This is a stub types definition. handlebars provides its own type definitions, so you do not need this installed.
|
||||
|
||||
'@types/hast@3.0.4':
|
||||
resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==}
|
||||
|
||||
@@ -2886,6 +2881,11 @@ packages:
|
||||
resolution: {integrity: sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==}
|
||||
engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0}
|
||||
|
||||
handlebars@4.7.8:
|
||||
resolution: {integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==}
|
||||
engines: {node: '>=0.4.7'}
|
||||
hasBin: true
|
||||
|
||||
has-bigints@1.1.0:
|
||||
resolution: {integrity: sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -3172,10 +3172,6 @@ packages:
|
||||
resolution: {integrity: sha512-iZ8Bdb84lWRuGHamRXFyML07r21pcwBrLkHEuHgEY5UbCouBwv7ECknDRKzsQIXMiqpPymqtIf8TC/shYKB5rw==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
jsep@1.4.0:
|
||||
resolution: {integrity: sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==}
|
||||
engines: {node: '>= 10.16.0'}
|
||||
|
||||
jsesc@3.1.0:
|
||||
resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -3204,11 +3200,6 @@ packages:
|
||||
json-stringify-safe@5.0.1:
|
||||
resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==}
|
||||
|
||||
jsonpath-plus@10.3.0:
|
||||
resolution: {integrity: sha512-8TNmfeTCk2Le33A3vRRwtuworG/L5RrgMvdjhKZxvyShO+mBu2fP50OWUjRLNtvw344DdDarFh9buFAZs5ujeA==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
jsox@1.2.121:
|
||||
resolution: {integrity: sha512-9Ag50tKhpTwS6r5wh3MJSAvpSof0UBr39Pto8OnzFT32Z/pAbxAsKHzyvsyMEHVslELvHyO/4/jaQELHk8wDcw==}
|
||||
hasBin: true
|
||||
@@ -3534,6 +3525,9 @@ packages:
|
||||
natural-compare@1.4.0:
|
||||
resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
|
||||
|
||||
neo-async@2.6.2:
|
||||
resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==}
|
||||
|
||||
new-find-package-json@2.0.0:
|
||||
resolution: {integrity: sha512-lDcBsjBSMlj3LXH2v/FW3txlh2pYTjmbOXPYJD93HI5EwuLzI11tdHSIpUMmfq/IOsldj4Ps8M8flhm+pCK4Ew==}
|
||||
engines: {node: '>=12.22.0'}
|
||||
@@ -4472,6 +4466,11 @@ packages:
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
uglify-js@3.19.3:
|
||||
resolution: {integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==}
|
||||
engines: {node: '>=0.8.0'}
|
||||
hasBin: true
|
||||
|
||||
uint8array-extras@1.5.0:
|
||||
resolution: {integrity: sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -4663,6 +4662,9 @@ packages:
|
||||
resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
wordwrap@1.0.0:
|
||||
resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==}
|
||||
|
||||
wrap-ansi@7.0.0:
|
||||
resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
|
||||
engines: {node: '>=10'}
|
||||
@@ -5447,14 +5449,6 @@ snapshots:
|
||||
|
||||
'@jsdevtools/ono@7.1.3': {}
|
||||
|
||||
'@jsep-plugin/assignment@1.3.0(jsep@1.4.0)':
|
||||
dependencies:
|
||||
jsep: 1.4.0
|
||||
|
||||
'@jsep-plugin/regex@1.0.4(jsep@1.4.0)':
|
||||
dependencies:
|
||||
jsep: 1.4.0
|
||||
|
||||
'@lexical/clipboard@0.28.0':
|
||||
dependencies:
|
||||
'@lexical/html': 0.28.0
|
||||
@@ -6316,6 +6310,10 @@ snapshots:
|
||||
|
||||
'@types/estree@1.0.8': {}
|
||||
|
||||
'@types/handlebars@4.1.0':
|
||||
dependencies:
|
||||
handlebars: 4.7.8
|
||||
|
||||
'@types/hast@3.0.4':
|
||||
dependencies:
|
||||
'@types/unist': 3.0.3
|
||||
@@ -7966,6 +7964,15 @@ snapshots:
|
||||
|
||||
graphql@16.11.0: {}
|
||||
|
||||
handlebars@4.7.8:
|
||||
dependencies:
|
||||
minimist: 1.2.8
|
||||
neo-async: 2.6.2
|
||||
source-map: 0.6.1
|
||||
wordwrap: 1.0.0
|
||||
optionalDependencies:
|
||||
uglify-js: 3.19.3
|
||||
|
||||
has-bigints@1.1.0: {}
|
||||
|
||||
has-flag@4.0.0: {}
|
||||
@@ -8224,8 +8231,6 @@ snapshots:
|
||||
|
||||
jsdoc-type-pratt-parser@4.8.0: {}
|
||||
|
||||
jsep@1.4.0: {}
|
||||
|
||||
jsesc@3.1.0: {}
|
||||
|
||||
json-buffer@3.0.1: {}
|
||||
@@ -8252,12 +8257,6 @@ snapshots:
|
||||
|
||||
json-stringify-safe@5.0.1: {}
|
||||
|
||||
jsonpath-plus@10.3.0:
|
||||
dependencies:
|
||||
'@jsep-plugin/assignment': 1.3.0(jsep@1.4.0)
|
||||
'@jsep-plugin/regex': 1.0.4(jsep@1.4.0)
|
||||
jsep: 1.4.0
|
||||
|
||||
jsox@1.2.121: {}
|
||||
|
||||
jsx-ast-utils@3.3.5:
|
||||
@@ -8698,6 +8697,8 @@ snapshots:
|
||||
|
||||
natural-compare@1.4.0: {}
|
||||
|
||||
neo-async@2.6.2: {}
|
||||
|
||||
new-find-package-json@2.0.0:
|
||||
dependencies:
|
||||
debug: 4.4.1
|
||||
@@ -9761,6 +9762,9 @@ snapshots:
|
||||
|
||||
typescript@5.7.3: {}
|
||||
|
||||
uglify-js@3.19.3:
|
||||
optional: true
|
||||
|
||||
uint8array-extras@1.5.0: {}
|
||||
|
||||
unbox-primitive@1.1.0:
|
||||
@@ -9973,6 +9977,8 @@ snapshots:
|
||||
|
||||
word-wrap@1.2.5: {}
|
||||
|
||||
wordwrap@1.0.0: {}
|
||||
|
||||
wrap-ansi@7.0.0:
|
||||
dependencies:
|
||||
ansi-styles: 4.3.0
|
||||
|
||||
@@ -1,359 +1,121 @@
|
||||
import type {CollectionConfig, Field} from 'payload'
|
||||
import type {CollectionConfig} from 'payload'
|
||||
|
||||
import type {WorkflowsPluginConfig} from "../plugin/config-types.js"
|
||||
|
||||
export const createWorkflowCollection: <T extends string>(options: WorkflowsPluginConfig<T>) => CollectionConfig = ({
|
||||
collectionTriggers,
|
||||
steps,
|
||||
triggers
|
||||
}) => ({
|
||||
slug: 'workflows',
|
||||
access: {
|
||||
create: () => true,
|
||||
delete: () => true,
|
||||
read: () => true,
|
||||
update: () => true,
|
||||
},
|
||||
admin: {
|
||||
defaultColumns: ['name', 'updatedAt'],
|
||||
description: 'Create and manage automated workflows.',
|
||||
group: 'Automation',
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Human-readable name for the workflow',
|
||||
import {parameter} from "../fields/parameter.js"
|
||||
import {collectionTrigger, globalTrigger} from "../triggers/index.js"
|
||||
|
||||
export const createWorkflowCollection: <T extends string>(options: WorkflowsPluginConfig<T>) => CollectionConfig = (options) => {
|
||||
const steps = options.steps || []
|
||||
const triggers = (options.triggers || []).map(t => t(options)).concat(collectionTrigger(options), globalTrigger(options))
|
||||
return {
|
||||
slug: 'workflows',
|
||||
access: {
|
||||
create: () => true,
|
||||
delete: () => true,
|
||||
read: () => true,
|
||||
update: () => true,
|
||||
},
|
||||
admin: {
|
||||
defaultColumns: ['name', 'updatedAt'],
|
||||
description: 'Create and manage automated workflows.',
|
||||
group: 'Automation',
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Human-readable name for the workflow',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Optional description of what this workflow does',
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Optional description of what this workflow does',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'executionStatus',
|
||||
type: 'ui',
|
||||
admin: {
|
||||
components: {
|
||||
Field: '@xtr-dev/payload-automation/client#WorkflowExecutionStatus'
|
||||
},
|
||||
condition: (data) => !!data?.id // Only show for existing workflows
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'triggers',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: [
|
||||
'collection-trigger',
|
||||
'webhook-trigger',
|
||||
'global-trigger',
|
||||
'cron-trigger',
|
||||
...(triggers || []).map(t => t.slug)
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'parameters',
|
||||
type: 'json',
|
||||
admin: {
|
||||
hidden: true,
|
||||
},
|
||||
defaultValue: {}
|
||||
},
|
||||
// Virtual fields for collection trigger
|
||||
{
|
||||
name: '__builtin_collectionSlug',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'collection-trigger',
|
||||
description: 'Collection that triggers the workflow',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.collectionSlug || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.collectionSlug = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
{
|
||||
name: 'triggers',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: [
|
||||
...triggers.map(t => t.slug)
|
||||
]
|
||||
},
|
||||
options: Object.keys(collectionTriggers || {}),
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: '__builtin_operation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'collection-trigger',
|
||||
description: 'Collection operation that triggers the workflow',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.operation || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.operation = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: [
|
||||
'create',
|
||||
'delete',
|
||||
'read',
|
||||
'update',
|
||||
],
|
||||
virtual: true,
|
||||
},
|
||||
// Virtual fields for webhook trigger
|
||||
{
|
||||
name: '__builtin_webhookPath',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'webhook-trigger',
|
||||
description: 'URL path for the webhook (e.g., "my-webhook"). Full URL will be /api/workflows-webhook/my-webhook',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.webhookPath || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.webhookPath = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'webhook-trigger' && !value && !siblingData?.parameters?.webhookPath) {
|
||||
return 'Webhook path is required for webhook triggers'
|
||||
}
|
||||
return true
|
||||
},
|
||||
virtual: true,
|
||||
},
|
||||
// Virtual fields for global trigger
|
||||
{
|
||||
name: '__builtin_global',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'global-trigger',
|
||||
description: 'Global that triggers the workflow',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.global || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.global = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: [], // Will be populated dynamically based on available globals
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: '__builtin_globalOperation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'global-trigger',
|
||||
description: 'Global operation that triggers the workflow',
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.globalOperation || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.globalOperation = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
options: [
|
||||
'update'
|
||||
],
|
||||
virtual: true,
|
||||
},
|
||||
// Virtual fields for cron trigger
|
||||
{
|
||||
name: '__builtin_cronExpression',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'cron-trigger',
|
||||
description: 'Cron expression for scheduled execution (e.g., "0 0 * * *" for daily at midnight)',
|
||||
placeholder: '0 0 * * *'
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.cronExpression || undefined
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.cronExpression = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
const cronValue = value || siblingData?.parameters?.cronExpression
|
||||
if (siblingData?.type === 'cron-trigger' && !cronValue) {
|
||||
return 'Cron expression is required for cron triggers'
|
||||
}
|
||||
|
||||
// Validate cron expression format if provided
|
||||
if (siblingData?.type === 'cron-trigger' && cronValue) {
|
||||
// Basic format validation - should be 5 parts separated by spaces
|
||||
const cronParts = cronValue.trim().split(/\s+/)
|
||||
if (cronParts.length !== 5) {
|
||||
return 'Invalid cron expression format. Expected 5 parts: "minute hour day month weekday" (e.g., "0 9 * * 1")'
|
||||
}
|
||||
|
||||
// Additional validation could use node-cron but we avoid dynamic imports here
|
||||
// The main validation happens at runtime in the cron scheduler
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: '__builtin_timezone',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'cron-trigger',
|
||||
description: 'Timezone for cron execution (e.g., "America/New_York", "Europe/London"). Defaults to UTC.',
|
||||
placeholder: 'UTC'
|
||||
},
|
||||
defaultValue: 'UTC',
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
return siblingData?.parameters?.timezone || 'UTC'
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {siblingData.parameters = {}}
|
||||
siblingData.parameters.timezone = value || 'UTC'
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
const tzValue = value || siblingData?.parameters?.timezone
|
||||
if (siblingData?.type === 'cron-trigger' && tzValue) {
|
||||
try {
|
||||
// Test if timezone is valid by trying to create a date with it
|
||||
new Intl.DateTimeFormat('en', {timeZone: tzValue})
|
||||
return true
|
||||
} catch {
|
||||
return `Invalid timezone: ${tzValue}. Please use a valid IANA timezone identifier (e.g., "America/New_York", "Europe/London")`
|
||||
}
|
||||
}
|
||||
return true
|
||||
},
|
||||
virtual: true,
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
// Virtual fields for custom triggers
|
||||
// Note: Custom trigger fields from trigger-helpers already have unique names
|
||||
// We just need to pass them through without modification
|
||||
...(triggers || []).flatMap(t => (t.inputs || []))
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'steps',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'row',
|
||||
fields: [
|
||||
{
|
||||
name: 'step',
|
||||
type: 'select',
|
||||
options: steps.map(t => t.slug)
|
||||
{
|
||||
name: 'parameters',
|
||||
type: 'json',
|
||||
admin: {
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
}
|
||||
]
|
||||
},
|
||||
...(steps || []).flatMap(step => (step.inputSchema || []).map(field => ({
|
||||
...field,
|
||||
admin: {
|
||||
...(field.admin || {}),
|
||||
condition: (...args) => args[1]?.step === step.slug && (
|
||||
field.admin?.condition ?
|
||||
field.admin.condition.call(this, ...args) :
|
||||
true
|
||||
),
|
||||
defaultValue: {}
|
||||
},
|
||||
} as Field))),
|
||||
{
|
||||
name: 'dependencies',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Step names that must complete before this step can run'
|
||||
// Virtual fields for custom triggers
|
||||
...triggers.flatMap(t => (t.parameters || []).map(p => parameter(t.slug, p as any))),
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
hasMany: true,
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this step to execute (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'steps',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
defaultValue: 'Unnamed Step'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
versions: {
|
||||
drafts: {
|
||||
autosave: false,
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: steps.map(t => t.slug)
|
||||
},
|
||||
{
|
||||
name: 'input',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Step input configuration. Use JSONPath expressions to reference dynamic data (e.g., {"url": "$.trigger.doc.webhookUrl", "data": "$.steps.previousStep.output.result"})'
|
||||
},
|
||||
defaultValue: {}
|
||||
},
|
||||
{
|
||||
name: 'dependencies',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Step names that must complete before this step can run'
|
||||
},
|
||||
hasMany: true,
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this step to execute (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
versions: {
|
||||
drafts: {
|
||||
autosave: false,
|
||||
},
|
||||
maxPerDoc: 10,
|
||||
},
|
||||
maxPerDoc: 10,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,10 +10,10 @@ interface ErrorDisplayProps {
|
||||
path?: string
|
||||
}
|
||||
|
||||
export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
readOnly = false
|
||||
export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
readOnly = false
|
||||
}) => {
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
|
||||
@@ -32,7 +32,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Network error') || error.includes('fetch')) {
|
||||
return {
|
||||
type: 'network',
|
||||
@@ -41,7 +41,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Hook execution failed')) {
|
||||
return {
|
||||
type: 'hook',
|
||||
@@ -50,7 +50,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Executor not available')) {
|
||||
return {
|
||||
type: 'executor',
|
||||
@@ -59,7 +59,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Collection slug is required') || error.includes('Document data is required')) {
|
||||
return {
|
||||
type: 'validation',
|
||||
@@ -68,7 +68,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('status') && error.includes('4')) {
|
||||
return {
|
||||
type: 'client',
|
||||
@@ -77,7 +77,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('status') && error.includes('5')) {
|
||||
return {
|
||||
type: 'server',
|
||||
@@ -127,7 +127,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
const errorColor = getErrorColor(errorInfo.type)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
<div style={{
|
||||
border: `2px solid ${errorColor}30`,
|
||||
borderRadius: '8px',
|
||||
backgroundColor: `${errorColor}08`,
|
||||
@@ -135,9 +135,9 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
marginTop: '8px'
|
||||
}}>
|
||||
{/* Error Header */}
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '12px',
|
||||
marginBottom: '12px'
|
||||
}}>
|
||||
@@ -145,15 +145,15 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
{getErrorIcon(errorInfo.type)}
|
||||
</span>
|
||||
<div>
|
||||
<h4 style={{
|
||||
margin: 0,
|
||||
<h4 style={{
|
||||
margin: 0,
|
||||
color: errorColor,
|
||||
fontSize: '16px',
|
||||
fontWeight: '600'
|
||||
}}>
|
||||
{errorInfo.title}
|
||||
</h4>
|
||||
<p style={{
|
||||
<p style={{
|
||||
margin: '4px 0 0 0',
|
||||
color: '#6B7280',
|
||||
fontSize: '14px',
|
||||
@@ -168,14 +168,14 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
<div>
|
||||
<div style={{ marginBottom: expanded ? '12px' : '0' }}>
|
||||
<Button
|
||||
buttonStyle="secondary"
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} Technical Details
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
|
||||
{expanded && (
|
||||
<div style={{
|
||||
backgroundColor: '#F8F9FA',
|
||||
@@ -194,7 +194,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
</div>
|
||||
|
||||
{/* Quick Actions */}
|
||||
<div style={{
|
||||
<div style={{
|
||||
marginTop: '12px',
|
||||
padding: '12px',
|
||||
backgroundColor: `${errorColor}10`,
|
||||
@@ -253,11 +253,11 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
{/* Hidden textarea for editing if needed */}
|
||||
{!readOnly && onChange && (
|
||||
<textarea
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={{ display: 'none' }}
|
||||
value={value}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { Button, toast } from '@payloadcms/ui'
|
||||
import { useState } from 'react'
|
||||
|
||||
interface TriggerWorkflowButtonProps {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
triggerSlug?: string
|
||||
}
|
||||
|
||||
export const TriggerWorkflowButton: React.FC<TriggerWorkflowButtonProps> = ({
|
||||
workflowId,
|
||||
workflowName,
|
||||
triggerSlug = 'manual-trigger'
|
||||
}) => {
|
||||
const [loading, setLoading] = useState(false)
|
||||
|
||||
const handleTrigger = async () => {
|
||||
setLoading(true)
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/workflows/trigger-custom', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workflowId,
|
||||
triggerSlug,
|
||||
data: {
|
||||
triggeredAt: new Date().toISOString(),
|
||||
source: 'admin-button'
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.message || 'Failed to trigger workflow')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
toast.success(`Workflow "${workflowName}" triggered successfully! Run ID: ${result.runId}`)
|
||||
} catch (error) {
|
||||
console.error('Error triggering workflow:', error)
|
||||
toast.error(`Failed to trigger workflow: ${error instanceof Error ? error.message : 'Unknown error'}`)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Button
|
||||
onClick={handleTrigger}
|
||||
disabled={loading}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
{loading ? 'Triggering...' : 'Trigger Workflow'}
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
297
src/components/WorkflowBuilder/StepConfigurationForm.tsx
Normal file
297
src/components/WorkflowBuilder/StepConfigurationForm.tsx
Normal file
@@ -0,0 +1,297 @@
|
||||
'use client'
|
||||
|
||||
import React, { useState, useCallback, useEffect } from 'react'
|
||||
import type { Node } from '@xyflow/react'
|
||||
import { Button } from '@payloadcms/ui'
|
||||
|
||||
interface StepField {
|
||||
name: string
|
||||
type: string
|
||||
label?: string
|
||||
admin?: {
|
||||
description?: string
|
||||
condition?: (data: any, siblingData: any) => boolean
|
||||
}
|
||||
options?: Array<{ label: string; value: string }>
|
||||
defaultValue?: any
|
||||
required?: boolean
|
||||
hasMany?: boolean
|
||||
fields?: StepField[] // For group fields
|
||||
}
|
||||
|
||||
interface StepType {
|
||||
slug: string
|
||||
label?: string
|
||||
inputSchema?: StepField[]
|
||||
outputSchema?: StepField[]
|
||||
}
|
||||
|
||||
interface StepConfigurationFormProps {
|
||||
selectedNode: Node | null
|
||||
availableStepTypes: StepType[]
|
||||
availableSteps: string[] // For dependency selection
|
||||
onNodeUpdate: (nodeId: string, data: Partial<Node['data']>) => void
|
||||
onClose: () => void
|
||||
}
|
||||
|
||||
export const StepConfigurationForm: React.FC<StepConfigurationFormProps> = ({
|
||||
selectedNode,
|
||||
availableStepTypes,
|
||||
availableSteps,
|
||||
onNodeUpdate,
|
||||
onClose
|
||||
}) => {
|
||||
const [formData, setFormData] = useState<Record<string, any>>(
|
||||
selectedNode?.data.configuration || {}
|
||||
)
|
||||
const [jsonText, setJsonText] = useState<string>(() =>
|
||||
JSON.stringify(selectedNode?.data.configuration || {}, null, 2)
|
||||
)
|
||||
|
||||
if (!selectedNode) return null
|
||||
|
||||
const stepType = availableStepTypes.find(type => type.slug === selectedNode.data.stepType)
|
||||
const inputSchema = stepType?.inputSchema || []
|
||||
|
||||
// Update form data when selected node changes
|
||||
useEffect(() => {
|
||||
const config = selectedNode?.data.configuration || {}
|
||||
setFormData(config)
|
||||
setJsonText(JSON.stringify(config, null, 2))
|
||||
}, [selectedNode])
|
||||
|
||||
|
||||
const handleSave = useCallback(() => {
|
||||
// Update the node with form data
|
||||
onNodeUpdate(selectedNode.id, {
|
||||
...selectedNode.data,
|
||||
configuration: formData
|
||||
})
|
||||
|
||||
onClose()
|
||||
}, [selectedNode, formData, onNodeUpdate, onClose])
|
||||
|
||||
const renderStepConfiguration = () => {
|
||||
if (!inputSchema.length) {
|
||||
return (
|
||||
<div style={{
|
||||
padding: '20px',
|
||||
textAlign: 'center',
|
||||
color: 'var(--theme-text-400)',
|
||||
fontStyle: 'italic'
|
||||
}}>
|
||||
This step type has no configuration parameters.
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ marginBottom: '16px' }}>
|
||||
<label style={{
|
||||
display: 'block',
|
||||
marginBottom: '4px',
|
||||
fontSize: '12px',
|
||||
fontWeight: '500',
|
||||
color: 'var(--theme-text)'
|
||||
}}>
|
||||
Step Configuration
|
||||
</label>
|
||||
<div style={{ fontSize: '11px', color: 'var(--theme-text-400)', marginBottom: '8px' }}>
|
||||
Configure this step's parameters in JSON format. Use JSONPath expressions like <code>$.trigger.doc.id</code> to reference dynamic data.
|
||||
</div>
|
||||
|
||||
{/* Schema Reference */}
|
||||
<details style={{ marginBottom: '12px' }}>
|
||||
<summary style={{
|
||||
fontSize: '11px',
|
||||
color: 'var(--theme-text-400)',
|
||||
cursor: 'pointer',
|
||||
marginBottom: '8px'
|
||||
}}>
|
||||
📖 Available Fields (click to expand)
|
||||
</summary>
|
||||
<div style={{
|
||||
background: 'var(--theme-elevation-50)',
|
||||
border: '1px solid var(--theme-elevation-100)',
|
||||
borderRadius: '4px',
|
||||
padding: '12px',
|
||||
fontSize: '11px',
|
||||
fontFamily: 'monospace'
|
||||
}}>
|
||||
{inputSchema.map((field, index) => (
|
||||
<div key={field.name} style={{ marginBottom: index < inputSchema.length - 1 ? '8px' : '0' }}>
|
||||
<strong>{field.name}</strong> ({field.type})
|
||||
{field.required && <span style={{ color: 'var(--theme-error-500)' }}> *required</span>}
|
||||
{field.admin?.description && (
|
||||
<div style={{ color: 'var(--theme-text-400)', marginTop: '2px' }}>
|
||||
{field.admin.description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</details>
|
||||
|
||||
<textarea
|
||||
value={jsonText}
|
||||
onChange={(e) => {
|
||||
const text = e.target.value
|
||||
setJsonText(text)
|
||||
try {
|
||||
const parsed = JSON.parse(text)
|
||||
setFormData(parsed)
|
||||
} catch {
|
||||
// Keep invalid JSON, user is still typing
|
||||
// Don't update formData until JSON is valid
|
||||
}
|
||||
}}
|
||||
rows={Math.min(Math.max(inputSchema.length * 2, 6), 15)}
|
||||
style={{
|
||||
width: '100%',
|
||||
padding: '12px',
|
||||
border: '1px solid var(--theme-elevation-100)',
|
||||
borderRadius: '4px',
|
||||
fontSize: '13px',
|
||||
fontFamily: 'monospace',
|
||||
lineHeight: '1.4',
|
||||
background: 'var(--theme-input-bg)',
|
||||
color: 'var(--theme-text)',
|
||||
resize: 'vertical'
|
||||
}}
|
||||
placeholder='{\n "field1": "value1",\n "field2": "$.trigger.doc.id"\n}'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
height: '100%',
|
||||
width: '100%',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
overflow: 'hidden'
|
||||
}}>
|
||||
{/* Header */}
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
borderBottom: '1px solid var(--theme-elevation-100)',
|
||||
background: 'var(--theme-elevation-50)'
|
||||
}}>
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<h4 style={{ margin: 0, fontSize: '16px', fontWeight: '600', color: 'var(--theme-text)' }}>
|
||||
Configure Step
|
||||
</h4>
|
||||
<Button
|
||||
buttonStyle="none"
|
||||
onClick={onClose}
|
||||
size="small"
|
||||
>
|
||||
×
|
||||
</Button>
|
||||
</div>
|
||||
<div style={{ fontSize: '12px', color: 'var(--theme-text-400)', marginTop: '4px' }}>
|
||||
{stepType?.label || (selectedNode.data.stepType as string)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Form */}
|
||||
<div style={{
|
||||
flex: 1,
|
||||
overflow: 'auto',
|
||||
padding: '16px'
|
||||
}}>
|
||||
{/* Basic step info */}
|
||||
<div style={{ marginBottom: '16px' }}>
|
||||
<label style={{
|
||||
display: 'block',
|
||||
marginBottom: '4px',
|
||||
fontSize: '12px',
|
||||
fontWeight: '500'
|
||||
}}>
|
||||
Step Name *
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={(selectedNode.data.label as string) || ''}
|
||||
onChange={(e) => onNodeUpdate(selectedNode.id, {
|
||||
...selectedNode.data,
|
||||
label: e.target.value
|
||||
})}
|
||||
style={{
|
||||
width: '100%',
|
||||
padding: '8px',
|
||||
border: '1px solid var(--theme-elevation-100)',
|
||||
borderRadius: '4px',
|
||||
fontSize: '14px'
|
||||
}}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Dependencies */}
|
||||
<div style={{ marginBottom: '16px' }}>
|
||||
<label style={{
|
||||
display: 'block',
|
||||
marginBottom: '4px',
|
||||
fontSize: '12px',
|
||||
fontWeight: '500'
|
||||
}}>
|
||||
Dependencies
|
||||
</label>
|
||||
<div style={{ fontSize: '11px', color: 'var(--theme-text-400)', marginBottom: '8px' }}>
|
||||
Steps that must complete before this step can run
|
||||
</div>
|
||||
{availableSteps
|
||||
.filter(step => step !== selectedNode.id)
|
||||
.map(stepId => (
|
||||
<label key={stepId} style={{
|
||||
display: 'block',
|
||||
fontSize: '12px',
|
||||
marginBottom: '4px'
|
||||
}}>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={((selectedNode.data.dependencies as string[]) || []).includes(stepId)}
|
||||
onChange={(e) => {
|
||||
const currentDeps = (selectedNode.data.dependencies as string[]) || []
|
||||
const newDeps = e.target.checked
|
||||
? [...currentDeps, stepId]
|
||||
: currentDeps.filter((dep: string) => dep !== stepId)
|
||||
|
||||
onNodeUpdate(selectedNode.id, {
|
||||
...selectedNode.data,
|
||||
dependencies: newDeps
|
||||
})
|
||||
}}
|
||||
style={{ marginRight: '8px' }}
|
||||
/>
|
||||
{stepId}
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Step-specific configuration */}
|
||||
{renderStepConfiguration()}
|
||||
|
||||
{/* Submit button */}
|
||||
<div style={{
|
||||
borderTop: '1px solid var(--theme-elevation-100)',
|
||||
paddingTop: '16px',
|
||||
marginTop: '16px'
|
||||
}}>
|
||||
<Button
|
||||
buttonStyle="primary"
|
||||
onClick={handleSave}
|
||||
>
|
||||
Save Configuration
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
255
src/components/WorkflowBuilder/WorkflowBuilder.tsx
Normal file
255
src/components/WorkflowBuilder/WorkflowBuilder.tsx
Normal file
@@ -0,0 +1,255 @@
|
||||
'use client'
|
||||
|
||||
import React, { useCallback, useMemo, useState } from 'react'
|
||||
import {
|
||||
ReactFlow,
|
||||
Node,
|
||||
Edge,
|
||||
addEdge,
|
||||
Connection,
|
||||
useNodesState,
|
||||
useEdgesState,
|
||||
Controls,
|
||||
Background,
|
||||
BackgroundVariant,
|
||||
MiniMap,
|
||||
Panel
|
||||
} from '@xyflow/react'
|
||||
import '@xyflow/react/dist/style.css'
|
||||
|
||||
// Import custom node types
|
||||
import { StepNode } from './nodes/StepNode.js'
|
||||
import { WorkflowToolbar } from './WorkflowToolbar.js'
|
||||
import { StepConfigurationForm } from './StepConfigurationForm.js'
|
||||
|
||||
// Define node types for React Flow
|
||||
const nodeTypes = {
|
||||
stepNode: StepNode,
|
||||
}
|
||||
|
||||
interface WorkflowData {
|
||||
id: string
|
||||
name: string
|
||||
steps?: Array<{
|
||||
name: string
|
||||
type: string
|
||||
position?: { x: number; y: number }
|
||||
visual?: { color?: string; icon?: string }
|
||||
dependencies?: string[]
|
||||
}>
|
||||
layout?: {
|
||||
viewport?: { x: number; y: number; zoom: number }
|
||||
}
|
||||
}
|
||||
|
||||
interface StepType {
|
||||
slug: string
|
||||
label?: string
|
||||
inputSchema?: any[]
|
||||
outputSchema?: any[]
|
||||
}
|
||||
|
||||
interface WorkflowBuilderProps {
|
||||
workflow?: WorkflowData
|
||||
availableStepTypes?: StepType[]
|
||||
onSave?: (workflow: WorkflowData) => void
|
||||
readonly?: boolean
|
||||
}
|
||||
|
||||
export const WorkflowBuilder: React.FC<WorkflowBuilderProps> = ({
|
||||
workflow,
|
||||
availableStepTypes = [],
|
||||
onSave,
|
||||
readonly = false
|
||||
}) => {
|
||||
const [selectedNode, setSelectedNode] = useState<Node | null>(null)
|
||||
|
||||
// Convert workflow steps to React Flow nodes
|
||||
const initialNodes: Node[] = useMemo(() => {
|
||||
if (!workflow?.steps) return []
|
||||
|
||||
return workflow.steps.map((step, index) => ({
|
||||
id: step.name || `step-${index}`,
|
||||
type: 'stepNode',
|
||||
position: step.position || { x: 100 + index * 200, y: 100 },
|
||||
data: {
|
||||
label: step.name || 'Unnamed Step',
|
||||
stepType: step.type,
|
||||
color: step.visual?.color || '#3b82f6',
|
||||
icon: step.visual?.icon,
|
||||
dependencies: step.dependencies || []
|
||||
}
|
||||
}))
|
||||
}, [workflow?.steps])
|
||||
|
||||
// Convert dependencies to React Flow edges
|
||||
const initialEdges: Edge[] = useMemo(() => {
|
||||
if (!workflow?.steps) return []
|
||||
|
||||
const edges: Edge[] = []
|
||||
|
||||
workflow.steps.forEach((step, index) => {
|
||||
const targetId = step.name || `step-${index}`
|
||||
|
||||
if (step.dependencies) {
|
||||
step.dependencies.forEach((depName) => {
|
||||
// Find the source step
|
||||
const sourceStep = workflow.steps?.find(s => s.name === depName)
|
||||
if (sourceStep) {
|
||||
const sourceId = sourceStep.name || `step-${workflow.steps?.indexOf(sourceStep)}`
|
||||
edges.push({
|
||||
id: `${sourceId}-${targetId}`,
|
||||
source: sourceId,
|
||||
target: targetId,
|
||||
type: 'smoothstep'
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return edges
|
||||
}, [workflow?.steps])
|
||||
|
||||
const [nodes, setNodes, onNodesChange] = useNodesState(initialNodes)
|
||||
const [edges, setEdges, onEdgesChange] = useEdgesState(initialEdges)
|
||||
|
||||
// Handle new connections
|
||||
const onConnect = useCallback((params: Connection) => {
|
||||
if (readonly) return
|
||||
|
||||
setEdges((eds) => addEdge({
|
||||
...params,
|
||||
type: 'smoothstep'
|
||||
}, eds))
|
||||
}, [setEdges, readonly])
|
||||
|
||||
// Handle node selection
|
||||
const onNodeClick = useCallback((_event: React.MouseEvent, node: Node) => {
|
||||
console.log('Node clicked:', node.id, node.data.label)
|
||||
setSelectedNode(node)
|
||||
}, [])
|
||||
|
||||
// Handle adding new step
|
||||
const onAddStep = useCallback((stepType: string) => {
|
||||
if (readonly) return
|
||||
|
||||
const newStep: Node = {
|
||||
id: `step-${Date.now()}`,
|
||||
type: 'stepNode',
|
||||
position: { x: 100, y: 100 },
|
||||
data: {
|
||||
label: 'New Step',
|
||||
stepType,
|
||||
color: '#3b82f6',
|
||||
dependencies: []
|
||||
}
|
||||
}
|
||||
|
||||
setNodes((nds) => [...nds, newStep])
|
||||
}, [setNodes, readonly])
|
||||
|
||||
// Handle updating a node's data
|
||||
const handleNodeUpdate = useCallback((nodeId: string, newData: Partial<Node['data']>) => {
|
||||
setNodes((nds) =>
|
||||
nds.map((node) =>
|
||||
node.id === nodeId
|
||||
? { ...node, data: { ...node.data, ...newData } }
|
||||
: node
|
||||
)
|
||||
)
|
||||
}, [setNodes])
|
||||
|
||||
// Handle saving workflow
|
||||
const handleSave = useCallback(() => {
|
||||
if (!workflow || !onSave) return
|
||||
|
||||
// Convert nodes and edges back to workflow format
|
||||
const updatedSteps = nodes.map((node) => {
|
||||
// Find dependencies from edges
|
||||
const dependencies = edges
|
||||
.filter(edge => edge.target === node.id)
|
||||
.map(edge => edge.source)
|
||||
|
||||
return {
|
||||
name: node.id,
|
||||
type: node.data.stepType as string,
|
||||
position: node.position,
|
||||
visual: {
|
||||
color: node.data.color as string,
|
||||
icon: node.data.icon as string
|
||||
},
|
||||
dependencies: dependencies.length > 0 ? dependencies : undefined
|
||||
}
|
||||
})
|
||||
|
||||
const updatedWorkflow: WorkflowData = {
|
||||
...workflow,
|
||||
steps: updatedSteps
|
||||
}
|
||||
|
||||
onSave(updatedWorkflow)
|
||||
}, [workflow, nodes, edges, onSave])
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
width: '100%',
|
||||
height: '600px',
|
||||
display: 'flex',
|
||||
background: 'var(--theme-bg)',
|
||||
borderRadius: '4px',
|
||||
border: '1px solid var(--theme-elevation-100)'
|
||||
}}>
|
||||
{/* Main canvas area */}
|
||||
<div style={{
|
||||
flex: selectedNode ? '1 1 70%' : '1 1 100%',
|
||||
transition: 'flex 0.3s ease'
|
||||
}}>
|
||||
<ReactFlow
|
||||
nodes={nodes}
|
||||
edges={edges}
|
||||
onNodesChange={onNodesChange}
|
||||
onEdgesChange={onEdgesChange}
|
||||
onConnect={onConnect}
|
||||
onNodeClick={onNodeClick}
|
||||
nodeTypes={nodeTypes}
|
||||
fitView
|
||||
attributionPosition="top-right"
|
||||
>
|
||||
<Controls />
|
||||
<MiniMap />
|
||||
<Background variant={BackgroundVariant.Dots} gap={12} size={1} />
|
||||
|
||||
{!readonly && (
|
||||
<Panel position="top-left">
|
||||
<WorkflowToolbar
|
||||
availableStepTypes={availableStepTypes}
|
||||
onAddStep={onAddStep}
|
||||
onSave={handleSave}
|
||||
/>
|
||||
</Panel>
|
||||
)}
|
||||
</ReactFlow>
|
||||
</div>
|
||||
|
||||
{/* Side panel for step configuration */}
|
||||
{selectedNode && !readonly && (
|
||||
<div style={{
|
||||
flex: '0 0 30%',
|
||||
borderLeft: '1px solid var(--theme-elevation-100)',
|
||||
background: 'var(--theme-elevation-0)',
|
||||
display: 'flex',
|
||||
flexDirection: 'column'
|
||||
}}>
|
||||
<StepConfigurationForm
|
||||
selectedNode={selectedNode}
|
||||
availableStepTypes={availableStepTypes}
|
||||
availableSteps={nodes.map(node => node.id)}
|
||||
onNodeUpdate={handleNodeUpdate}
|
||||
onClose={() => setSelectedNode(null)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
118
src/components/WorkflowBuilder/WorkflowToolbar.tsx
Normal file
118
src/components/WorkflowBuilder/WorkflowToolbar.tsx
Normal file
@@ -0,0 +1,118 @@
|
||||
'use client'
|
||||
|
||||
import React from 'react'
|
||||
|
||||
interface AvailableStepType {
|
||||
slug: string
|
||||
label?: string
|
||||
inputSchema?: any[]
|
||||
outputSchema?: any[]
|
||||
}
|
||||
|
||||
interface WorkflowToolbarProps {
|
||||
availableStepTypes: AvailableStepType[]
|
||||
onAddStep: (stepType: string) => void
|
||||
onSave: () => void
|
||||
}
|
||||
|
||||
export const WorkflowToolbar: React.FC<WorkflowToolbarProps> = ({
|
||||
availableStepTypes,
|
||||
onAddStep,
|
||||
onSave
|
||||
}) => {
|
||||
const getStepTypeLabel = (stepType: AvailableStepType) => {
|
||||
return stepType.label || stepType.slug.replace(/-/g, ' ').replace(/\b\w/g, l => l.toUpperCase())
|
||||
}
|
||||
|
||||
const getStepTypeIcon = (stepType: AvailableStepType) => {
|
||||
// Simple icon mapping based on step type
|
||||
switch (stepType.slug) {
|
||||
case 'http-request-step':
|
||||
return '🌐'
|
||||
case 'create-document-step':
|
||||
return '📄'
|
||||
case 'read-document-step':
|
||||
return '👁️'
|
||||
case 'update-document-step':
|
||||
return '✏️'
|
||||
case 'delete-document-step':
|
||||
return '🗑️'
|
||||
case 'send-email-step':
|
||||
return '📧'
|
||||
default:
|
||||
return '⚡'
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
background: 'var(--theme-elevation-0)',
|
||||
padding: '12px',
|
||||
borderRadius: '4px',
|
||||
border: '1px solid var(--theme-elevation-150)',
|
||||
minWidth: '200px'
|
||||
}}>
|
||||
<h4 style={{
|
||||
margin: '0 0 12px 0',
|
||||
fontSize: '14px',
|
||||
fontWeight: '600',
|
||||
color: 'var(--theme-text)'
|
||||
}}>
|
||||
Add Step
|
||||
</h4>
|
||||
|
||||
<div style={{ marginBottom: '16px' }}>
|
||||
{availableStepTypes.map((stepType) => (
|
||||
<button
|
||||
key={stepType.slug}
|
||||
onClick={() => onAddStep(stepType.slug)}
|
||||
style={{
|
||||
display: 'block',
|
||||
width: '100%',
|
||||
padding: '8px 12px',
|
||||
margin: '4px 0',
|
||||
background: 'var(--theme-elevation-50)',
|
||||
border: '1px solid var(--theme-elevation-100)',
|
||||
borderRadius: '4px',
|
||||
cursor: 'pointer',
|
||||
textAlign: 'left',
|
||||
fontSize: '12px',
|
||||
color: 'var(--theme-text)',
|
||||
transition: 'background-color 0.2s'
|
||||
}}
|
||||
onMouseEnter={(e) => {
|
||||
e.currentTarget.style.backgroundColor = 'var(--theme-elevation-100)'
|
||||
}}
|
||||
onMouseLeave={(e) => {
|
||||
e.currentTarget.style.backgroundColor = 'var(--theme-elevation-50)'
|
||||
}}
|
||||
>
|
||||
<span style={{ marginRight: '8px' }}>
|
||||
{getStepTypeIcon(stepType)}
|
||||
</span>
|
||||
{getStepTypeLabel(stepType)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div style={{ borderTop: '1px solid var(--theme-elevation-100)', paddingTop: '12px' }}>
|
||||
<button
|
||||
onClick={onSave}
|
||||
style={{
|
||||
width: '100%',
|
||||
padding: '8px 16px',
|
||||
background: 'var(--theme-success-500)',
|
||||
color: 'var(--theme-base-0)',
|
||||
border: 'none',
|
||||
borderRadius: '4px',
|
||||
cursor: 'pointer',
|
||||
fontSize: '12px',
|
||||
fontWeight: '500'
|
||||
}}
|
||||
>
|
||||
💾 Save Workflow
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
4
src/components/WorkflowBuilder/index.ts
Normal file
4
src/components/WorkflowBuilder/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { WorkflowBuilder } from './WorkflowBuilder.js'
|
||||
export { WorkflowToolbar } from './WorkflowToolbar.js'
|
||||
export { StepConfigurationForm } from './StepConfigurationForm.js'
|
||||
export { StepNode } from './nodes/StepNode.js'
|
||||
157
src/components/WorkflowBuilder/nodes/StepNode.tsx
Normal file
157
src/components/WorkflowBuilder/nodes/StepNode.tsx
Normal file
@@ -0,0 +1,157 @@
|
||||
'use client'
|
||||
|
||||
import React, { memo } from 'react'
|
||||
import { Handle, Position, NodeProps } from '@xyflow/react'
|
||||
|
||||
interface StepNodeData {
|
||||
label: string
|
||||
stepType: string
|
||||
color?: string
|
||||
icon?: string
|
||||
dependencies?: string[]
|
||||
}
|
||||
|
||||
export const StepNode: React.FC<NodeProps> = memo(({ data, selected }) => {
|
||||
const { label, stepType, color = '#3b82f6', icon, dependencies = [] } = data as unknown as StepNodeData
|
||||
|
||||
const getStepTypeIcon = (type: string) => {
|
||||
// Return icon from data or default based on type
|
||||
if (icon) return icon
|
||||
|
||||
switch (type) {
|
||||
case 'http-request-step':
|
||||
return '🌐'
|
||||
case 'create-document-step':
|
||||
return '📄'
|
||||
case 'read-document-step':
|
||||
return '👁️'
|
||||
case 'update-document-step':
|
||||
return '✏️'
|
||||
case 'delete-document-step':
|
||||
return '🗑️'
|
||||
case 'send-email-step':
|
||||
return '📧'
|
||||
default:
|
||||
return '⚡'
|
||||
}
|
||||
}
|
||||
|
||||
const getStepTypeLabel = (type: string) => {
|
||||
return type.replace(/-/g, ' ').replace(/\b\w/g, l => l.toUpperCase())
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
background: color,
|
||||
color: 'white',
|
||||
borderRadius: '8px',
|
||||
padding: '12px 16px',
|
||||
minWidth: '150px',
|
||||
border: selected ? '2px solid #1e40af' : '1px solid rgba(255, 255, 255, 0.2)',
|
||||
boxShadow: selected
|
||||
? '0 8px 25px rgba(0, 0, 0, 0.15)'
|
||||
: '0 4px 15px rgba(0, 0, 0, 0.1)',
|
||||
transition: 'all 0.2s ease',
|
||||
cursor: 'pointer',
|
||||
position: 'relative'
|
||||
}}
|
||||
title="Click to configure this step"
|
||||
>
|
||||
{/* Input Handle - only show if this step has dependencies */}
|
||||
{dependencies.length > 0 && (
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Top}
|
||||
style={{
|
||||
background: '#fff',
|
||||
border: '2px solid #3b82f6',
|
||||
width: '10px',
|
||||
height: '10px'
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '8px',
|
||||
marginBottom: '4px'
|
||||
}}>
|
||||
<span style={{ fontSize: '16px' }}>
|
||||
{getStepTypeIcon(stepType)}
|
||||
</span>
|
||||
<div>
|
||||
<div style={{
|
||||
fontWeight: '600',
|
||||
fontSize: '14px',
|
||||
lineHeight: '1.2'
|
||||
}}>
|
||||
{label}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style={{
|
||||
fontSize: '11px',
|
||||
opacity: 0.9,
|
||||
fontWeight: '400'
|
||||
}}>
|
||||
{getStepTypeLabel(stepType)}
|
||||
</div>
|
||||
|
||||
{/* Status indicator for dependencies */}
|
||||
{dependencies.length > 0 && (
|
||||
<div style={{
|
||||
position: 'absolute',
|
||||
top: '4px',
|
||||
right: '20px',
|
||||
background: 'rgba(255, 255, 255, 0.2)',
|
||||
borderRadius: '50%',
|
||||
width: '16px',
|
||||
height: '16px',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
fontSize: '8px',
|
||||
fontWeight: 'bold',
|
||||
pointerEvents: 'none' // Allow clicks to pass through to parent
|
||||
}}>
|
||||
{dependencies.length}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Configuration indicator */}
|
||||
<div style={{
|
||||
position: 'absolute',
|
||||
top: '4px',
|
||||
right: '4px',
|
||||
background: 'rgba(255, 255, 255, 0.3)',
|
||||
borderRadius: '50%',
|
||||
width: '16px',
|
||||
height: '16px',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
fontSize: '10px',
|
||||
pointerEvents: 'none' // Allow clicks to pass through to parent
|
||||
}}>
|
||||
⚙️
|
||||
</div>
|
||||
|
||||
{/* Output Handle - always show for potential connections */}
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Bottom}
|
||||
style={{
|
||||
background: '#fff',
|
||||
border: '2px solid #3b82f6',
|
||||
width: '10px',
|
||||
height: '10px'
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
StepNode.displayName = 'StepNode'
|
||||
@@ -1,231 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import React, { useState, useEffect } from 'react'
|
||||
import { Button } from '@payloadcms/ui'
|
||||
|
||||
interface WorkflowRun {
|
||||
id: string
|
||||
status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'
|
||||
startedAt: string
|
||||
completedAt?: string
|
||||
error?: string
|
||||
triggeredBy: string
|
||||
}
|
||||
|
||||
interface WorkflowExecutionStatusProps {
|
||||
workflowId: string | number
|
||||
}
|
||||
|
||||
export const WorkflowExecutionStatus: React.FC<WorkflowExecutionStatusProps> = ({ workflowId }) => {
|
||||
const [runs, setRuns] = useState<WorkflowRun[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const fetchRecentRuns = async () => {
|
||||
try {
|
||||
const response = await fetch(`/api/workflow-runs?where[workflow][equals]=${workflowId}&limit=5&sort=-startedAt`)
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setRuns(data.docs || [])
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to fetch workflow runs:', error)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
fetchRecentRuns()
|
||||
}, [workflowId])
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ padding: '16px', color: '#6B7280' }}>
|
||||
Loading execution history...
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (runs.length === 0) {
|
||||
return (
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
backgroundColor: '#F9FAFB',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '8px',
|
||||
color: '#6B7280',
|
||||
textAlign: 'center'
|
||||
}}>
|
||||
📋 No execution history yet
|
||||
<br />
|
||||
<small>This workflow hasn't been triggered yet.</small>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const getStatusIcon = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending': return '⏳'
|
||||
case 'running': return '🔄'
|
||||
case 'completed': return '✅'
|
||||
case 'failed': return '❌'
|
||||
case 'cancelled': return '⏹️'
|
||||
default: return '❓'
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusColor = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending': return '#6B7280'
|
||||
case 'running': return '#3B82F6'
|
||||
case 'completed': return '#10B981'
|
||||
case 'failed': return '#EF4444'
|
||||
case 'cancelled': return '#F59E0B'
|
||||
default: return '#6B7280'
|
||||
}
|
||||
}
|
||||
|
||||
const formatDate = (dateString: string) => {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffMs = now.getTime() - date.getTime()
|
||||
|
||||
if (diffMs < 60000) { // Less than 1 minute
|
||||
return 'Just now'
|
||||
} else if (diffMs < 3600000) { // Less than 1 hour
|
||||
return `${Math.floor(diffMs / 60000)} min ago`
|
||||
} else if (diffMs < 86400000) { // Less than 1 day
|
||||
return `${Math.floor(diffMs / 3600000)} hrs ago`
|
||||
} else {
|
||||
return date.toLocaleDateString()
|
||||
}
|
||||
}
|
||||
|
||||
const getDuration = (startedAt: string, completedAt?: string) => {
|
||||
const start = new Date(startedAt)
|
||||
const end = completedAt ? new Date(completedAt) : new Date()
|
||||
const diffMs = end.getTime() - start.getTime()
|
||||
|
||||
if (diffMs < 1000) return '<1s'
|
||||
if (diffMs < 60000) return `${Math.floor(diffMs / 1000)}s`
|
||||
if (diffMs < 3600000) return `${Math.floor(diffMs / 60000)}m ${Math.floor((diffMs % 60000) / 1000)}s`
|
||||
return `${Math.floor(diffMs / 3600000)}h ${Math.floor((diffMs % 3600000) / 60000)}m`
|
||||
}
|
||||
|
||||
const recentRun = runs[0]
|
||||
const recentStatus = getStatusIcon(recentRun.status)
|
||||
const recentColor = getStatusColor(recentRun.status)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: '#FAFAFA'
|
||||
}}>
|
||||
{/* Summary Header */}
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
borderBottom: expanded ? '1px solid #E5E7EB' : 'none',
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '12px' }}>
|
||||
<span style={{ fontSize: '20px' }}>{recentStatus}</span>
|
||||
<div>
|
||||
<div style={{ fontWeight: '600', color: recentColor }}>
|
||||
Last run: {recentRun.status}
|
||||
</div>
|
||||
<div style={{ fontSize: '13px', color: '#6B7280' }}>
|
||||
{formatDate(recentRun.startedAt)} • Duration: {getDuration(recentRun.startedAt, recentRun.completedAt)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} History ({runs.length})
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Detailed History */}
|
||||
{expanded && (
|
||||
<div style={{ padding: '16px' }}>
|
||||
<h4 style={{ margin: '0 0 12px 0', fontSize: '14px', fontWeight: '600' }}>
|
||||
Recent Executions
|
||||
</h4>
|
||||
|
||||
{runs.map((run, index) => (
|
||||
<div
|
||||
key={run.id}
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
padding: '8px 12px',
|
||||
marginBottom: index < runs.length - 1 ? '8px' : '0',
|
||||
backgroundColor: 'white',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '6px'
|
||||
}}
|
||||
>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '10px' }}>
|
||||
<span style={{ fontSize: '16px' }}>
|
||||
{getStatusIcon(run.status)}
|
||||
</span>
|
||||
|
||||
<div>
|
||||
<div style={{
|
||||
fontSize: '13px',
|
||||
fontWeight: '500',
|
||||
color: getStatusColor(run.status)
|
||||
}}>
|
||||
{run.status.charAt(0).toUpperCase() + run.status.slice(1)}
|
||||
</div>
|
||||
<div style={{ fontSize: '12px', color: '#6B7280' }}>
|
||||
{formatDate(run.startedAt)} • {run.triggeredBy}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style={{
|
||||
fontSize: '12px',
|
||||
color: '#6B7280',
|
||||
textAlign: 'right'
|
||||
}}>
|
||||
<div>
|
||||
{getDuration(run.startedAt, run.completedAt)}
|
||||
</div>
|
||||
{run.error && (
|
||||
<div style={{ color: '#EF4444', marginTop: '2px' }}>
|
||||
Error
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div style={{
|
||||
marginTop: '12px',
|
||||
textAlign: 'center'
|
||||
}}>
|
||||
<Button
|
||||
onClick={() => {
|
||||
// Navigate to workflow runs filtered by this workflow
|
||||
window.location.href = `/admin/collections/workflow-runs?where[workflow][equals]=${workflowId}`
|
||||
}}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
View All Runs →
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -5,93 +5,45 @@ import type { Payload, PayloadRequest } from 'payload'
|
||||
export type PayloadWorkflow = {
|
||||
id: number
|
||||
name: string
|
||||
description?: string | null
|
||||
description?: null | string
|
||||
triggers?: Array<{
|
||||
type?: string | null
|
||||
condition?: string | null
|
||||
type?: null | string
|
||||
condition?: null | string
|
||||
parameters?: {
|
||||
collectionSlug?: string | null
|
||||
operation?: string | null
|
||||
webhookPath?: string | null
|
||||
cronExpression?: string | null
|
||||
timezone?: string | null
|
||||
global?: string | null
|
||||
globalOperation?: string | null
|
||||
collectionSlug?: null | string
|
||||
operation?: null | string
|
||||
global?: null | string
|
||||
globalOperation?: null | string
|
||||
[key: string]: unknown
|
||||
} | null
|
||||
[key: string]: unknown
|
||||
}> | null
|
||||
steps?: Array<{
|
||||
step?: string | null
|
||||
name?: string | null
|
||||
type?: null | string
|
||||
name?: null | string
|
||||
input?: unknown
|
||||
dependencies?: string[] | null
|
||||
condition?: string | null
|
||||
dependencies?: null | string[]
|
||||
condition?: null | string
|
||||
[key: string]: unknown
|
||||
}> | null
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
import { JSONPath } from 'jsonpath-plus'
|
||||
import Handlebars from 'handlebars'
|
||||
|
||||
// Helper type to extract workflow step data from the generated types
|
||||
export type WorkflowStep = NonNullable<PayloadWorkflow['steps']>[0] & {
|
||||
export type WorkflowStep = {
|
||||
name: string // Ensure name is always present for our execution logic
|
||||
}
|
||||
} & NonNullable<PayloadWorkflow['steps']>[0]
|
||||
|
||||
// Helper type to extract workflow trigger data from the generated types
|
||||
export type WorkflowTrigger = NonNullable<PayloadWorkflow['triggers']>[0] & {
|
||||
// Helper type to extract workflow trigger data from the generated types
|
||||
export type WorkflowTrigger = {
|
||||
type: string // Ensure type is always present for our execution logic
|
||||
}
|
||||
} & NonNullable<PayloadWorkflow['triggers']>[0]
|
||||
|
||||
export interface ExecutionContext {
|
||||
steps: Record<string, {
|
||||
error?: string
|
||||
input: unknown
|
||||
output: unknown
|
||||
state: 'failed' | 'pending' | 'running' | 'succeeded'
|
||||
_startTime?: number
|
||||
executionInfo?: {
|
||||
completed: boolean
|
||||
success: boolean
|
||||
executedAt: string
|
||||
duration: number
|
||||
failureReason?: string
|
||||
}
|
||||
errorDetails?: {
|
||||
stepId: string
|
||||
errorType: string
|
||||
duration: number
|
||||
attempts: number
|
||||
finalError: string
|
||||
context: {
|
||||
url?: string
|
||||
method?: string
|
||||
timeout?: number
|
||||
statusCode?: number
|
||||
headers?: Record<string, string>
|
||||
[key: string]: any
|
||||
}
|
||||
timestamp: string
|
||||
}
|
||||
}>
|
||||
trigger: {
|
||||
collection?: string
|
||||
data?: unknown
|
||||
doc?: unknown
|
||||
headers?: Record<string, string>
|
||||
operation?: string
|
||||
path?: string
|
||||
previousDoc?: unknown
|
||||
req?: PayloadRequest
|
||||
triggeredAt?: string
|
||||
type: string
|
||||
user?: {
|
||||
collection?: string
|
||||
email?: string
|
||||
id?: string
|
||||
}
|
||||
}
|
||||
steps: Record<string, any>
|
||||
trigger: Record<string, any>
|
||||
}
|
||||
|
||||
export class WorkflowExecutor {
|
||||
@@ -100,6 +52,82 @@ export class WorkflowExecutor {
|
||||
private logger: Payload['logger']
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Convert string values to appropriate types based on common patterns
|
||||
*/
|
||||
private convertValueType(value: unknown, key: string): unknown {
|
||||
if (typeof value !== 'string') {
|
||||
return value
|
||||
}
|
||||
|
||||
// Type conversion patterns based on field names and values
|
||||
const numericFields = ['timeout', 'retries', 'delay', 'port', 'limit', 'offset', 'count', 'max', 'min']
|
||||
const booleanFields = ['enabled', 'required', 'active', 'success', 'failed', 'complete']
|
||||
|
||||
// Convert numeric fields
|
||||
if (numericFields.some(field => key.toLowerCase().includes(field))) {
|
||||
const numValue = Number(value)
|
||||
if (!isNaN(numValue)) {
|
||||
this.logger.debug({
|
||||
key,
|
||||
originalValue: value,
|
||||
convertedValue: numValue
|
||||
}, 'Auto-converted field to number')
|
||||
return numValue
|
||||
}
|
||||
}
|
||||
|
||||
// Convert boolean fields
|
||||
if (booleanFields.some(field => key.toLowerCase().includes(field))) {
|
||||
if (value === 'true') return true
|
||||
if (value === 'false') return false
|
||||
}
|
||||
|
||||
// Try to parse as number if it looks numeric
|
||||
if (/^\d+$/.test(value)) {
|
||||
const numValue = parseInt(value, 10)
|
||||
this.logger.debug({
|
||||
key,
|
||||
originalValue: value,
|
||||
convertedValue: numValue
|
||||
}, 'Auto-converted numeric string to number')
|
||||
return numValue
|
||||
}
|
||||
|
||||
// Try to parse as float if it looks like a decimal
|
||||
if (/^\d+\.\d+$/.test(value)) {
|
||||
const floatValue = parseFloat(value)
|
||||
this.logger.debug({
|
||||
key,
|
||||
originalValue: value,
|
||||
convertedValue: floatValue
|
||||
}, 'Auto-converted decimal string to number')
|
||||
return floatValue
|
||||
}
|
||||
|
||||
// Return as string if no conversion applies
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Classifies error types based on error messages
|
||||
*/
|
||||
private classifyErrorType(errorMessage: string): string {
|
||||
if (errorMessage.includes('timeout') || errorMessage.includes('ETIMEDOUT')) {
|
||||
return 'timeout'
|
||||
}
|
||||
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
|
||||
return 'dns'
|
||||
}
|
||||
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('ECONNRESET')) {
|
||||
return 'connection'
|
||||
}
|
||||
if (errorMessage.includes('network') || errorMessage.includes('fetch')) {
|
||||
return 'network'
|
||||
}
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a step condition using JSONPath
|
||||
*/
|
||||
@@ -191,22 +219,14 @@ export class WorkflowExecutor {
|
||||
}
|
||||
|
||||
// Move taskSlug declaration outside try block so it's accessible in catch
|
||||
const taskSlug = step.step // Use the 'step' field for task type
|
||||
const taskSlug = step.type as string
|
||||
|
||||
try {
|
||||
// Extract input data from step - PayloadCMS flattens inputSchema fields to step level
|
||||
const inputFields: Record<string, unknown> = {}
|
||||
|
||||
// Get all fields except the core step fields
|
||||
const coreFields = ['step', 'name', 'dependencies', 'condition']
|
||||
for (const [key, value] of Object.entries(step)) {
|
||||
if (!coreFields.includes(key)) {
|
||||
inputFields[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve input data using JSONPath
|
||||
const resolvedInput = this.resolveStepInput(inputFields, context)
|
||||
// Get input configuration from the step
|
||||
const inputConfig = (step.input as Record<string, unknown>) || {}
|
||||
|
||||
// Resolve input data using Handlebars templates
|
||||
const resolvedInput = this.resolveStepInput(inputConfig, context, taskSlug)
|
||||
context.steps[stepName].input = resolvedInput
|
||||
|
||||
if (!taskSlug) {
|
||||
@@ -232,8 +252,8 @@ export class WorkflowExecutor {
|
||||
id: job.id,
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
|
||||
this.logger.info({
|
||||
jobId: job.id,
|
||||
runResult: runResults,
|
||||
hasResult: !!runResults
|
||||
@@ -278,7 +298,7 @@ export class WorkflowExecutor {
|
||||
if (!errorMessage && taskStatus?.output?.error) {
|
||||
errorMessage = taskStatus.output.error
|
||||
}
|
||||
|
||||
|
||||
// Check if task handler returned with state='failed'
|
||||
if (!errorMessage && taskStatus?.state === 'failed') {
|
||||
errorMessage = 'Task handler returned a failed state'
|
||||
@@ -339,7 +359,7 @@ export class WorkflowExecutor {
|
||||
const errorDetails = this.extractErrorDetailsFromJob(completedJob, context.steps[stepName], stepName)
|
||||
if (errorDetails) {
|
||||
context.steps[stepName].errorDetails = errorDetails
|
||||
|
||||
|
||||
this.logger.info({
|
||||
stepName,
|
||||
errorType: errorDetails.errorType,
|
||||
@@ -402,6 +422,69 @@ export class WorkflowExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts detailed error information from job logs and input
|
||||
*/
|
||||
private extractErrorDetailsFromJob(job: any, stepContext: any, stepName: string) {
|
||||
try {
|
||||
// Get error information from multiple sources
|
||||
const input = stepContext.input || {}
|
||||
const logs = job.log || []
|
||||
const latestLog = logs[logs.length - 1]
|
||||
|
||||
// Extract error message from job error or log
|
||||
const errorMessage = job.error?.message || latestLog?.error?.message || 'Unknown error'
|
||||
|
||||
// For timeout scenarios, check if it's a timeout based on duration and timeout setting
|
||||
let errorType = this.classifyErrorType(errorMessage)
|
||||
|
||||
// Special handling for HTTP timeouts - if task failed and duration exceeds timeout, it's likely a timeout
|
||||
if (errorType === 'unknown' && input.timeout && stepContext.executionInfo?.duration) {
|
||||
const timeoutMs = parseInt(input.timeout) || 30000
|
||||
const actualDuration = stepContext.executionInfo.duration
|
||||
|
||||
// If execution duration is close to or exceeds timeout, classify as timeout
|
||||
if (actualDuration >= (timeoutMs * 0.9)) { // 90% of timeout threshold
|
||||
errorType = 'timeout'
|
||||
this.logger.debug({
|
||||
timeoutMs,
|
||||
actualDuration,
|
||||
stepName
|
||||
}, 'Classified error as timeout based on duration analysis')
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate duration from execution info if available
|
||||
const duration = stepContext.executionInfo?.duration || 0
|
||||
|
||||
// Extract attempt count from logs
|
||||
const attempts = job.totalTried || 1
|
||||
|
||||
return {
|
||||
stepId: `${stepName}-${Date.now()}`,
|
||||
errorType,
|
||||
duration,
|
||||
attempts,
|
||||
finalError: errorMessage,
|
||||
context: {
|
||||
url: input.url,
|
||||
method: input.method,
|
||||
timeout: input.timeout,
|
||||
statusCode: latestLog?.output?.status,
|
||||
headers: input.headers
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stepName
|
||||
}, 'Failed to extract error details from job')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resolve step execution order based on dependencies
|
||||
*/
|
||||
@@ -459,52 +542,56 @@ export class WorkflowExecutor {
|
||||
return executionBatches
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resolve step input using JSONPath expressions
|
||||
* Resolve step input using Handlebars templates with automatic type conversion
|
||||
*/
|
||||
private resolveStepInput(config: Record<string, unknown>, context: ExecutionContext): Record<string, unknown> {
|
||||
private resolveStepInput(config: Record<string, unknown>, context: ExecutionContext, stepType?: string): Record<string, unknown> {
|
||||
const resolved: Record<string, unknown> = {}
|
||||
|
||||
this.logger.debug({
|
||||
configKeys: Object.keys(config),
|
||||
contextSteps: Object.keys(context.steps),
|
||||
triggerType: context.trigger?.type
|
||||
}, 'Starting step input resolution')
|
||||
triggerType: context.trigger?.type,
|
||||
stepType
|
||||
}, 'Starting step input resolution with Handlebars')
|
||||
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (typeof value === 'string' && value.startsWith('$')) {
|
||||
// This is a JSONPath expression
|
||||
this.logger.debug({
|
||||
key,
|
||||
jsonPath: value,
|
||||
availableSteps: Object.keys(context.steps),
|
||||
hasTriggerData: !!context.trigger?.data,
|
||||
hasTriggerDoc: !!context.trigger?.doc
|
||||
}, 'Resolving JSONPath expression')
|
||||
|
||||
try {
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: value,
|
||||
wrap: false
|
||||
})
|
||||
|
||||
if (typeof value === 'string') {
|
||||
// Check if the string contains Handlebars templates
|
||||
if (value.includes('{{') && value.includes('}}')) {
|
||||
this.logger.debug({
|
||||
key,
|
||||
jsonPath: value,
|
||||
result: JSON.stringify(result).substring(0, 200),
|
||||
resultType: Array.isArray(result) ? 'array' : typeof result
|
||||
}, 'JSONPath resolved successfully')
|
||||
|
||||
resolved[key] = result
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
key,
|
||||
path: value,
|
||||
contextSnapshot: JSON.stringify(context).substring(0, 500)
|
||||
}, 'Failed to resolve JSONPath')
|
||||
resolved[key] = value // Keep original value if resolution fails
|
||||
template: value,
|
||||
availableSteps: Object.keys(context.steps),
|
||||
hasTriggerData: !!context.trigger?.data,
|
||||
hasTriggerDoc: !!context.trigger?.doc
|
||||
}, 'Processing Handlebars template')
|
||||
|
||||
try {
|
||||
const template = Handlebars.compile(value)
|
||||
const result = template(context)
|
||||
|
||||
this.logger.debug({
|
||||
key,
|
||||
template: value,
|
||||
result: JSON.stringify(result).substring(0, 200),
|
||||
resultType: typeof result
|
||||
}, 'Handlebars template resolved successfully')
|
||||
|
||||
resolved[key] = this.convertValueType(result, key)
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
key,
|
||||
template: value,
|
||||
contextSnapshot: JSON.stringify(context).substring(0, 500)
|
||||
}, 'Failed to resolve Handlebars template')
|
||||
resolved[key] = value // Keep original value if resolution fails
|
||||
}
|
||||
} else {
|
||||
// Regular string, apply type conversion
|
||||
resolved[key] = this.convertValueType(value, key)
|
||||
}
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
// Recursively resolve nested objects
|
||||
@@ -512,8 +599,8 @@ export class WorkflowExecutor {
|
||||
key,
|
||||
nestedKeys: Object.keys(value as Record<string, unknown>)
|
||||
}, 'Recursively resolving nested object')
|
||||
|
||||
resolved[key] = this.resolveStepInput(value as Record<string, unknown>, context)
|
||||
|
||||
resolved[key] = this.resolveStepInput(value as Record<string, unknown>, context, stepType)
|
||||
} else {
|
||||
// Keep literal values as-is
|
||||
resolved[key] = value
|
||||
@@ -533,22 +620,22 @@ export class WorkflowExecutor {
|
||||
*/
|
||||
private safeSerialize(obj: unknown): unknown {
|
||||
const seen = new WeakSet()
|
||||
|
||||
|
||||
const serialize = (value: unknown): unknown => {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return value
|
||||
}
|
||||
|
||||
if (seen.has(value as object)) {
|
||||
|
||||
if (seen.has(value)) {
|
||||
return '[Circular Reference]'
|
||||
}
|
||||
|
||||
seen.add(value as object)
|
||||
|
||||
|
||||
seen.add(value)
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(serialize)
|
||||
}
|
||||
|
||||
|
||||
const result: Record<string, unknown> = {}
|
||||
for (const [key, val] of Object.entries(value as Record<string, unknown>)) {
|
||||
try {
|
||||
@@ -562,94 +649,13 @@ export class WorkflowExecutor {
|
||||
result[key] = '[Non-serializable]'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
return serialize(obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts detailed error information from job logs and input
|
||||
*/
|
||||
private extractErrorDetailsFromJob(job: any, stepContext: any, stepName: string) {
|
||||
try {
|
||||
// Get error information from multiple sources
|
||||
const input = stepContext.input || {}
|
||||
const logs = job.log || []
|
||||
const latestLog = logs[logs.length - 1]
|
||||
|
||||
// Extract error message from job error or log
|
||||
const errorMessage = job.error?.message || latestLog?.error?.message || 'Unknown error'
|
||||
|
||||
// For timeout scenarios, check if it's a timeout based on duration and timeout setting
|
||||
let errorType = this.classifyErrorType(errorMessage)
|
||||
|
||||
// Special handling for HTTP timeouts - if task failed and duration exceeds timeout, it's likely a timeout
|
||||
if (errorType === 'unknown' && input.timeout && stepContext.executionInfo?.duration) {
|
||||
const timeoutMs = parseInt(input.timeout) || 30000
|
||||
const actualDuration = stepContext.executionInfo.duration
|
||||
|
||||
// If execution duration is close to or exceeds timeout, classify as timeout
|
||||
if (actualDuration >= (timeoutMs * 0.9)) { // 90% of timeout threshold
|
||||
errorType = 'timeout'
|
||||
this.logger.debug({
|
||||
timeoutMs,
|
||||
actualDuration,
|
||||
stepName
|
||||
}, 'Classified error as timeout based on duration analysis')
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate duration from execution info if available
|
||||
const duration = stepContext.executionInfo?.duration || 0
|
||||
|
||||
// Extract attempt count from logs
|
||||
const attempts = job.totalTried || 1
|
||||
|
||||
return {
|
||||
stepId: `${stepName}-${Date.now()}`,
|
||||
errorType,
|
||||
duration,
|
||||
attempts,
|
||||
finalError: errorMessage,
|
||||
context: {
|
||||
url: input.url,
|
||||
method: input.method,
|
||||
timeout: input.timeout,
|
||||
statusCode: latestLog?.output?.status,
|
||||
headers: input.headers
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stepName
|
||||
}, 'Failed to extract error details from job')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Classifies error types based on error messages
|
||||
*/
|
||||
private classifyErrorType(errorMessage: string): string {
|
||||
if (errorMessage.includes('timeout') || errorMessage.includes('ETIMEDOUT')) {
|
||||
return 'timeout'
|
||||
}
|
||||
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
|
||||
return 'dns'
|
||||
}
|
||||
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('ECONNRESET')) {
|
||||
return 'connection'
|
||||
}
|
||||
if (errorMessage.includes('network') || errorMessage.includes('fetch')) {
|
||||
return 'network'
|
||||
}
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow run with current context
|
||||
*/
|
||||
@@ -683,7 +689,7 @@ export class WorkflowExecutor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a condition using JSONPath and comparison operators
|
||||
* Evaluate a condition using Handlebars templates and comparison operators
|
||||
*/
|
||||
public evaluateCondition(condition: string, context: ExecutionContext): boolean {
|
||||
this.logger.debug({
|
||||
@@ -697,16 +703,16 @@ export class WorkflowExecutor {
|
||||
try {
|
||||
// Check if this is a comparison expression
|
||||
const comparisonMatch = condition.match(/^(.+?)\s*(==|!=|>|<|>=|<=)\s*(.+)$/)
|
||||
|
||||
|
||||
if (comparisonMatch) {
|
||||
const [, leftExpr, operator, rightExpr] = comparisonMatch
|
||||
|
||||
// Evaluate left side (should be JSONPath)
|
||||
const leftValue = this.resolveJSONPathValue(leftExpr.trim(), context)
|
||||
|
||||
// Parse right side (could be string, number, boolean, or JSONPath)
|
||||
const rightValue = this.parseConditionValue(rightExpr.trim(), context)
|
||||
|
||||
|
||||
// Evaluate left side (could be Handlebars template or JSONPath)
|
||||
const leftValue = this.resolveConditionValue(leftExpr.trim(), context)
|
||||
|
||||
// Evaluate right side (could be Handlebars template, JSONPath, or literal)
|
||||
const rightValue = this.resolveConditionValue(rightExpr.trim(), context)
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
leftExpr: leftExpr.trim(),
|
||||
@@ -717,32 +723,32 @@ export class WorkflowExecutor {
|
||||
leftType: typeof leftValue,
|
||||
rightType: typeof rightValue
|
||||
}, 'Evaluating comparison condition')
|
||||
|
||||
|
||||
// Perform comparison
|
||||
let result: boolean
|
||||
switch (operator) {
|
||||
case '==':
|
||||
result = leftValue === rightValue
|
||||
break
|
||||
case '!=':
|
||||
result = leftValue !== rightValue
|
||||
break
|
||||
case '>':
|
||||
result = Number(leftValue) > Number(rightValue)
|
||||
break
|
||||
case '<':
|
||||
result = Number(leftValue) < Number(rightValue)
|
||||
break
|
||||
case '>=':
|
||||
result = Number(leftValue) >= Number(rightValue)
|
||||
break
|
||||
case '<=':
|
||||
result = Number(leftValue) <= Number(rightValue)
|
||||
break
|
||||
case '==':
|
||||
result = leftValue === rightValue
|
||||
break
|
||||
case '>':
|
||||
result = Number(leftValue) > Number(rightValue)
|
||||
break
|
||||
case '>=':
|
||||
result = Number(leftValue) >= Number(rightValue)
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown comparison operator: ${operator}`)
|
||||
}
|
||||
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
result,
|
||||
@@ -750,22 +756,18 @@ export class WorkflowExecutor {
|
||||
rightValue,
|
||||
operator
|
||||
}, 'Comparison condition evaluation completed')
|
||||
|
||||
|
||||
return result
|
||||
} else {
|
||||
// Treat as simple JSONPath boolean evaluation
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: condition,
|
||||
wrap: false
|
||||
})
|
||||
// Treat as template or JSONPath boolean evaluation
|
||||
const result = this.resolveConditionValue(condition, context)
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
result,
|
||||
resultType: Array.isArray(result) ? 'array' : typeof result,
|
||||
resultLength: Array.isArray(result) ? result.length : undefined
|
||||
}, 'JSONPath boolean evaluation result')
|
||||
}, 'Boolean evaluation result')
|
||||
|
||||
// Handle different result types
|
||||
let finalResult: boolean
|
||||
@@ -794,46 +796,40 @@ export class WorkflowExecutor {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resolve a JSONPath value from the context
|
||||
* Resolve a condition value using Handlebars templates or JSONPath
|
||||
*/
|
||||
private resolveJSONPathValue(expr: string, context: ExecutionContext): any {
|
||||
if (expr.startsWith('$')) {
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: expr,
|
||||
wrap: false
|
||||
})
|
||||
// Return first result if array, otherwise the result itself
|
||||
return Array.isArray(result) && result.length > 0 ? result[0] : result
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a condition value (string literal, number, boolean, or JSONPath)
|
||||
*/
|
||||
private parseConditionValue(expr: string, context: ExecutionContext): any {
|
||||
private resolveConditionValue(expr: string, context: ExecutionContext): any {
|
||||
// Handle string literals
|
||||
if ((expr.startsWith('"') && expr.endsWith('"')) || (expr.startsWith("'") && expr.endsWith("'"))) {
|
||||
return expr.slice(1, -1) // Remove quotes
|
||||
}
|
||||
|
||||
|
||||
// Handle boolean literals
|
||||
if (expr === 'true') return true
|
||||
if (expr === 'false') return false
|
||||
|
||||
if (expr === 'true') {return true}
|
||||
if (expr === 'false') {return false}
|
||||
|
||||
// Handle number literals
|
||||
if (/^-?\d+(\.\d+)?$/.test(expr)) {
|
||||
if (/^-?\d+(?:\.\d+)?$/.test(expr)) {
|
||||
return Number(expr)
|
||||
}
|
||||
|
||||
// Handle JSONPath expressions
|
||||
if (expr.startsWith('$')) {
|
||||
return this.resolveJSONPathValue(expr, context)
|
||||
|
||||
// Handle Handlebars templates
|
||||
if (expr.includes('{{') && expr.includes('}}')) {
|
||||
try {
|
||||
const template = Handlebars.compile(expr)
|
||||
return template(context)
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
expr
|
||||
}, 'Failed to resolve Handlebars condition')
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Return as string if nothing else matches
|
||||
return expr
|
||||
}
|
||||
@@ -979,163 +975,4 @@ export class WorkflowExecutor {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and execute workflows triggered by a collection operation
|
||||
*/
|
||||
async executeTriggeredWorkflows(
|
||||
collection: string,
|
||||
operation: 'create' | 'delete' | 'read' | 'update',
|
||||
doc: unknown,
|
||||
previousDoc: unknown,
|
||||
req: PayloadRequest
|
||||
): Promise<void> {
|
||||
console.log('🚨 EXECUTOR: executeTriggeredWorkflows called!')
|
||||
console.log('🚨 EXECUTOR: Collection =', collection)
|
||||
console.log('🚨 EXECUTOR: Operation =', operation)
|
||||
console.log('🚨 EXECUTOR: Doc ID =', (doc as any)?.id)
|
||||
console.log('🚨 EXECUTOR: Has payload?', !!this.payload)
|
||||
console.log('🚨 EXECUTOR: Has logger?', !!this.logger)
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
operation,
|
||||
docId: (doc as any)?.id
|
||||
}, 'executeTriggeredWorkflows called')
|
||||
|
||||
try {
|
||||
// Find workflows with matching triggers
|
||||
const workflows = await this.payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2, // Include steps and triggers
|
||||
limit: 100,
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
workflowCount: workflows.docs.length
|
||||
}, 'Found workflows to check')
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
// Check if this workflow has a matching trigger
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
type: string
|
||||
parameters?: {
|
||||
collection?: string
|
||||
collectionSlug?: string
|
||||
operation?: string
|
||||
[key: string]: any
|
||||
}
|
||||
}>
|
||||
|
||||
this.logger.debug({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
triggerCount: triggers?.length || 0,
|
||||
triggers: triggers?.map(t => ({
|
||||
type: t.type,
|
||||
collection: t.parameters?.collection,
|
||||
collectionSlug: t.parameters?.collectionSlug,
|
||||
operation: t.parameters?.operation
|
||||
}))
|
||||
}, 'Checking workflow triggers')
|
||||
|
||||
const matchingTriggers = triggers?.filter(trigger =>
|
||||
trigger.type === 'collection-trigger' &&
|
||||
(trigger.parameters?.collection === collection || trigger.parameters?.collectionSlug === collection) &&
|
||||
trigger.parameters?.operation === operation
|
||||
) || []
|
||||
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
matchingTriggerCount: matchingTriggers.length,
|
||||
targetCollection: collection,
|
||||
targetOperation: operation
|
||||
}, 'Matching triggers found')
|
||||
|
||||
for (const trigger of matchingTriggers) {
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
triggerDetails: {
|
||||
type: trigger.type,
|
||||
collection: trigger.parameters?.collection,
|
||||
collectionSlug: trigger.parameters?.collectionSlug,
|
||||
operation: trigger.parameters?.operation,
|
||||
hasCondition: !!trigger.condition
|
||||
}
|
||||
}, 'Processing matching trigger - about to execute workflow')
|
||||
|
||||
// Create execution context for condition evaluation
|
||||
const context: ExecutionContext = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'collection',
|
||||
collection,
|
||||
doc,
|
||||
operation,
|
||||
previousDoc,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
// Check trigger condition if present
|
||||
if (trigger.condition) {
|
||||
this.logger.debug({
|
||||
collection,
|
||||
operation,
|
||||
condition: trigger.condition,
|
||||
docId: (doc as any)?.id,
|
||||
docFields: doc ? Object.keys(doc) : [],
|
||||
previousDocId: (previousDoc as any)?.id,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Evaluating collection trigger condition')
|
||||
|
||||
const conditionMet = this.evaluateCondition(trigger.condition, context)
|
||||
|
||||
if (!conditionMet) {
|
||||
this.logger.info({
|
||||
collection,
|
||||
condition: trigger.condition,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
docSnapshot: JSON.stringify(doc).substring(0, 200)
|
||||
}, 'Trigger condition not met, skipping workflow')
|
||||
continue
|
||||
}
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
condition: trigger.condition,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
docSnapshot: JSON.stringify(doc).substring(0, 200)
|
||||
}, 'Trigger condition met')
|
||||
}
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Triggering workflow')
|
||||
|
||||
// Execute the workflow
|
||||
await this.execute(workflow as PayloadWorkflow, context, req)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error({ error: error instanceof Error ? error.message : 'Unknown error' }, 'Workflow execution failed')
|
||||
this.logger.error({
|
||||
collection,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
operation
|
||||
}, 'Failed to execute triggered workflows')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
// Client-side components that may have CSS imports or PayloadCMS UI dependencies
|
||||
// These are separated to avoid CSS import errors during Node.js type generation
|
||||
|
||||
export { TriggerWorkflowButton } from '../components/TriggerWorkflowButton.js'
|
||||
export { StatusCell } from '../components/StatusCell.js'
|
||||
export { ErrorDisplay } from '../components/ErrorDisplay.js'
|
||||
export { WorkflowExecutionStatus } from '../components/WorkflowExecutionStatus.js'
|
||||
|
||||
// Future client components can be added here:
|
||||
// export { default as WorkflowDashboard } from '../components/WorkflowDashboard/index.js'
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
/**
|
||||
* Trigger builder helpers for creating custom triggers with less boilerplate
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { createTrigger, createTriggerField, webhookTrigger } from '@xtr-dev/payload-automation/helpers'
|
||||
*
|
||||
* // Simple trigger with array of fields
|
||||
* const myTrigger = createTrigger('my-trigger', [
|
||||
* { name: 'apiKey', type: 'text', required: true },
|
||||
* { name: 'timeout', type: 'number', defaultValue: 30 }
|
||||
* ])
|
||||
*
|
||||
* // Single field with virtual storage
|
||||
* const field = createTriggerField(
|
||||
* { name: 'webhookUrl', type: 'text', required: true },
|
||||
* 'my-trigger'
|
||||
* )
|
||||
*
|
||||
* // Webhook trigger preset
|
||||
* const orderWebhook = webhookTrigger('order-webhook')
|
||||
* ```
|
||||
*/
|
||||
|
||||
// Core helpers
|
||||
export {
|
||||
createTriggerField,
|
||||
createTrigger
|
||||
} from '../utils/trigger-helpers.js'
|
||||
|
||||
// Preset builders
|
||||
export {
|
||||
webhookTrigger,
|
||||
cronTrigger,
|
||||
eventTrigger,
|
||||
manualTrigger,
|
||||
apiTrigger
|
||||
} from '../utils/trigger-presets.js'
|
||||
113
src/fields/WorkflowBuilderField.tsx
Normal file
113
src/fields/WorkflowBuilderField.tsx
Normal file
@@ -0,0 +1,113 @@
|
||||
'use client'
|
||||
|
||||
import React, { useCallback, useEffect, useState } from 'react'
|
||||
import { useField, useFormFields } from '@payloadcms/ui'
|
||||
import { WorkflowBuilder } from '../components/WorkflowBuilder/index.js'
|
||||
|
||||
// Import the step types from the steps module
|
||||
import * as stepTasks from '../steps/index.js'
|
||||
|
||||
// Extract available step types from imported tasks
|
||||
const getAvailableStepTypes = () => {
|
||||
const stepTypes: Array<{
|
||||
slug: string
|
||||
label?: string
|
||||
inputSchema?: any[]
|
||||
outputSchema?: any[]
|
||||
}> = []
|
||||
|
||||
// Get all exported step tasks
|
||||
const tasks = [
|
||||
stepTasks.HttpRequestStepTask,
|
||||
stepTasks.CreateDocumentStepTask,
|
||||
stepTasks.ReadDocumentStepTask,
|
||||
stepTasks.UpdateDocumentStepTask,
|
||||
stepTasks.DeleteDocumentStepTask,
|
||||
stepTasks.SendEmailStepTask
|
||||
]
|
||||
|
||||
tasks.forEach(task => {
|
||||
if (task && task.slug) {
|
||||
stepTypes.push({
|
||||
slug: task.slug,
|
||||
label: undefined, // Tasks don't have labels, will use slug
|
||||
inputSchema: task.inputSchema,
|
||||
outputSchema: task.outputSchema
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return stepTypes
|
||||
}
|
||||
|
||||
interface WorkflowBuilderFieldProps {
|
||||
name?: string
|
||||
path?: string
|
||||
}
|
||||
|
||||
export const WorkflowBuilderField: React.FC<WorkflowBuilderFieldProps> = ({
|
||||
name,
|
||||
path
|
||||
}) => {
|
||||
const availableStepTypes = getAvailableStepTypes()
|
||||
const { value: steps, setValue: setSteps } = useField<any>({ path: 'steps' })
|
||||
const { value: layout, setValue: setLayout } = useField<any>({ path: 'layout' })
|
||||
const { value: workflowName } = useField<string>({ path: 'name' })
|
||||
|
||||
const [workflowData, setWorkflowData] = useState<any>({
|
||||
id: 'temp',
|
||||
name: workflowName || 'Workflow',
|
||||
steps: steps || [],
|
||||
layout: layout || {}
|
||||
})
|
||||
|
||||
// Update local state when form fields change
|
||||
useEffect(() => {
|
||||
setWorkflowData({
|
||||
id: 'temp',
|
||||
name: workflowName || 'Workflow',
|
||||
steps: steps || [],
|
||||
layout: layout || {}
|
||||
})
|
||||
}, [steps, layout, workflowName])
|
||||
|
||||
const handleSave = useCallback((updatedWorkflow: any) => {
|
||||
// Update the form fields
|
||||
if (updatedWorkflow.steps) {
|
||||
setSteps(updatedWorkflow.steps)
|
||||
}
|
||||
if (updatedWorkflow.layout) {
|
||||
setLayout(updatedWorkflow.layout)
|
||||
}
|
||||
}, [setSteps, setLayout])
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
marginTop: '20px',
|
||||
marginBottom: '20px',
|
||||
border: '1px solid var(--theme-elevation-100)',
|
||||
borderRadius: '4px',
|
||||
overflow: 'hidden'
|
||||
}}>
|
||||
<div style={{
|
||||
background: 'var(--theme-elevation-50)',
|
||||
padding: '12px 16px',
|
||||
borderBottom: '1px solid var(--theme-elevation-100)'
|
||||
}}>
|
||||
<h3 style={{ margin: 0, fontSize: '16px', fontWeight: '600', color: 'var(--theme-text)' }}>
|
||||
Visual Workflow Builder
|
||||
</h3>
|
||||
<p style={{ margin: '4px 0 0', fontSize: '12px', color: 'var(--theme-text-400)' }}>
|
||||
Drag and drop steps to build your workflow visually. Click on any step to configure its parameters.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<WorkflowBuilder
|
||||
workflow={workflowData}
|
||||
availableStepTypes={availableStepTypes}
|
||||
onSave={handleSave}
|
||||
readonly={false}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
32
src/fields/parameter.ts
Normal file
32
src/fields/parameter.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import type {Field} from "payload"
|
||||
|
||||
|
||||
export const parameter = (slug: string, field: {name: string} & Field): Field => ({
|
||||
...field,
|
||||
name: 'parameter' + field.name.replace(/^\w/, c => c.toUpperCase()) + Math.random().toString().replace(/\D/g, ''),
|
||||
admin: {
|
||||
...(field.admin as unknown || {}),
|
||||
condition: (_, siblingData, __) => {
|
||||
const previous = field.admin?.condition?.call(null, _, siblingData, __)
|
||||
return (previous === undefined || previous) && (siblingData?.type === slug)
|
||||
},
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
const parameters = siblingData?.parameters || {}
|
||||
return parameters[field.name]
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {
|
||||
siblingData.parameters = {}
|
||||
}
|
||||
siblingData.parameters[field.name] = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
virtual: true,
|
||||
} as Field)
|
||||
99
src/plugin/collection-hook.ts
Normal file
99
src/plugin/collection-hook.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import {WorkflowExecutor} from "../core/workflow-executor.js"
|
||||
|
||||
export const createCollectionTriggerHook = (collectionSlug: string, hookType: string) => {
|
||||
return async (args: any) => {
|
||||
const req = 'req' in args ? args.req :
|
||||
'args' in args ? args.args.req :
|
||||
undefined
|
||||
if (!req) {
|
||||
throw new Error('No request object found in hook arguments')
|
||||
}
|
||||
const payload = req.payload
|
||||
const {docs: workflows} = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
where: {
|
||||
'triggers.parameters.collectionSlug': {
|
||||
equals: collectionSlug
|
||||
},
|
||||
'triggers.parameters.hook': {
|
||||
equals: hookType
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'collection-hook'
|
||||
}
|
||||
}
|
||||
})
|
||||
const executor = new WorkflowExecutor(payload, payload.logger)
|
||||
// invoke each workflow
|
||||
for (const workflow of workflows) {
|
||||
// Create execution context
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
...args,
|
||||
type: 'collection',
|
||||
collection: collectionSlug,
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any trigger has a condition and evaluate it
|
||||
let shouldExecute = false
|
||||
for (const trigger of workflow.triggers || []) {
|
||||
if (trigger.type === 'collection-hook' &&
|
||||
trigger.parameters?.collectionSlug === collectionSlug &&
|
||||
trigger.parameters?.hook === hookType) {
|
||||
|
||||
if (trigger.condition) {
|
||||
// Evaluate the condition
|
||||
try {
|
||||
const conditionMet = executor.evaluateCondition(trigger.condition, context)
|
||||
if (conditionMet) {
|
||||
shouldExecute = true
|
||||
break
|
||||
}
|
||||
} catch (error) {
|
||||
payload.logger.error({
|
||||
workflowId: workflow.id,
|
||||
condition: trigger.condition,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Failed to evaluate trigger condition')
|
||||
}
|
||||
} else {
|
||||
// No condition means always execute
|
||||
shouldExecute = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldExecute) {
|
||||
payload.logger.debug({
|
||||
workflowId: workflow.id,
|
||||
collection: collectionSlug,
|
||||
hookType
|
||||
}, 'Workflow skipped due to unmet condition')
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
await executor.execute(workflow as any, context, req)
|
||||
payload.logger.info({
|
||||
workflowId: workflow.id,
|
||||
collection: collectionSlug,
|
||||
hookType
|
||||
}, 'Workflow executed successfully')
|
||||
} catch (error) {
|
||||
payload.logger.error({
|
||||
workflowId: workflow.id,
|
||||
collection: collectionSlug,
|
||||
hookType,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Workflow execution failed')
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,25 +1,21 @@
|
||||
import type {Field, TaskConfig} from "payload"
|
||||
import type {CollectionConfig, GlobalConfig, TaskConfig} from "payload"
|
||||
|
||||
export type CollectionTriggerConfigCrud = {
|
||||
create?: true
|
||||
delete?: true
|
||||
read?: true
|
||||
update?: true
|
||||
}
|
||||
import type {Trigger} from "../triggers/types.js"
|
||||
|
||||
export type CollectionTriggerConfig = CollectionTriggerConfigCrud | true
|
||||
export type TriggerConfig = (config: WorkflowsPluginConfig) => Trigger
|
||||
|
||||
export type CustomTriggerConfig = {
|
||||
inputs?: Field[]
|
||||
slug: string,
|
||||
}
|
||||
|
||||
export type WorkflowsPluginConfig<TSlug extends string> = {
|
||||
collectionTriggers: {
|
||||
[key in TSlug]?: CollectionTriggerConfig
|
||||
export type WorkflowsPluginConfig<TSlug extends string = string, TGlobal extends string = string> = {
|
||||
collectionTriggers?: {
|
||||
[key in TSlug]?: {
|
||||
[key in keyof CollectionConfig['hooks']]?: true
|
||||
} | true
|
||||
}
|
||||
globalTriggers?: {
|
||||
[key in TGlobal]?: {
|
||||
[key in keyof GlobalConfig['hooks']]?: true
|
||||
} | true
|
||||
}
|
||||
enabled?: boolean
|
||||
steps: TaskConfig<string>[],
|
||||
triggers?: CustomTriggerConfig[]
|
||||
webhookPrefix?: string
|
||||
steps: TaskConfig<string>[]
|
||||
triggers?: TriggerConfig[]
|
||||
}
|
||||
|
||||
@@ -1,642 +0,0 @@
|
||||
import type {Config, Payload, TaskConfig} from 'payload'
|
||||
|
||||
import cron from 'node-cron'
|
||||
|
||||
import {type PayloadWorkflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {getConfigLogger} from './logger.js'
|
||||
|
||||
/**
|
||||
* Generate dynamic cron tasks for all workflows with cron triggers
|
||||
* This is called at config time to register all scheduled tasks
|
||||
*/
|
||||
export function generateCronTasks(config: Config): void {
|
||||
const logger = getConfigLogger()
|
||||
|
||||
// Note: We can't query the database at config time, so we'll need a different approach
|
||||
// We'll create a single task that handles all cron-triggered workflows
|
||||
const cronTask: TaskConfig = {
|
||||
slug: 'workflow-cron-executor',
|
||||
handler: async ({ input, req }) => {
|
||||
const { cronExpression, timezone, workflowId } = input as {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
workflowId: string
|
||||
}
|
||||
|
||||
const logger = req.payload.logger.child({ plugin: '@xtr-dev/payload-automation' })
|
||||
|
||||
try {
|
||||
// Get the workflow
|
||||
const workflow = await req.payload.findByID({
|
||||
id: workflowId,
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
req
|
||||
})
|
||||
|
||||
if (!workflow) {
|
||||
throw new Error(`Workflow ${workflowId} not found`)
|
||||
}
|
||||
|
||||
// Create execution context for cron trigger
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'cron',
|
||||
req,
|
||||
triggeredAt: new Date().toISOString()
|
||||
}
|
||||
}
|
||||
|
||||
// Create executor
|
||||
const executor = new WorkflowExecutor(req.payload, logger)
|
||||
|
||||
// Find the matching cron trigger and check its condition if present
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
parameters?: {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
[key: string]: any
|
||||
}
|
||||
type: string
|
||||
}>
|
||||
|
||||
const matchingTrigger = triggers?.find(trigger =>
|
||||
trigger.type === 'cron-trigger' &&
|
||||
trigger.parameters?.cronExpression === cronExpression
|
||||
)
|
||||
|
||||
// Check trigger condition if present
|
||||
if (matchingTrigger?.condition) {
|
||||
const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context)
|
||||
|
||||
if (!conditionMet) {
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Cron trigger condition not met, skipping workflow execution')
|
||||
|
||||
// Re-queue for next execution but don't run workflow
|
||||
if (cronExpression) {
|
||||
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
|
||||
}
|
||||
|
||||
return {
|
||||
output: {
|
||||
executedAt: new Date().toISOString(),
|
||||
reason: 'Condition not met',
|
||||
status: 'skipped',
|
||||
workflowId
|
||||
},
|
||||
state: 'succeeded'
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Cron trigger condition met')
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
|
||||
// Re-queue the job for the next scheduled execution if cronExpression is provided
|
||||
if (cronExpression) {
|
||||
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
|
||||
}
|
||||
|
||||
return {
|
||||
output: {
|
||||
executedAt: new Date().toISOString(),
|
||||
status: 'completed',
|
||||
workflowId
|
||||
},
|
||||
state: 'succeeded'
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Cron job execution failed')
|
||||
|
||||
// Re-queue even on failure to ensure continuity (unless it's a validation error)
|
||||
if (cronExpression && !(error instanceof Error && error.message.includes('Invalid cron'))) {
|
||||
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
|
||||
.catch((requeueError) => {
|
||||
logger.error({
|
||||
error: requeueError instanceof Error ? requeueError.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to re-queue cron job after execution failure')
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
output: {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
},
|
||||
state: 'failed'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the cron task to config if not already present
|
||||
if (!config.jobs) {
|
||||
config.jobs = { tasks: [] }
|
||||
}
|
||||
|
||||
if (!config.jobs.tasks) {
|
||||
config.jobs.tasks = []
|
||||
}
|
||||
|
||||
if (!config.jobs.tasks.find(task => task.slug === cronTask.slug)) {
|
||||
logger.debug(`Registering cron executor task: ${cronTask.slug}`)
|
||||
config.jobs.tasks.push(cronTask)
|
||||
} else {
|
||||
logger.debug(`Cron executor task ${cronTask.slug} already registered, skipping`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register cron jobs for workflows with cron triggers
|
||||
* This is called at runtime after PayloadCMS is initialized
|
||||
*/
|
||||
export async function registerCronJobs(payload: Payload, logger: Payload['logger']): Promise<void> {
|
||||
try {
|
||||
// Find all workflows with cron triggers
|
||||
const workflows = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 0,
|
||||
limit: 1000,
|
||||
where: {
|
||||
'triggers.type': {
|
||||
equals: 'cron-trigger'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`Found ${workflows.docs.length} workflows with cron triggers`)
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
const triggers = workflow.triggers as Array<{
|
||||
parameters?: {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
[key: string]: any
|
||||
}
|
||||
type: string
|
||||
}>
|
||||
|
||||
// Find all cron triggers for this workflow
|
||||
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
|
||||
|
||||
for (const trigger of cronTriggers) {
|
||||
if (trigger.parameters?.cronExpression) {
|
||||
try {
|
||||
// Validate cron expression before queueing
|
||||
if (!validateCronExpression(trigger.parameters.cronExpression)) {
|
||||
logger.error({
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid cron expression format')
|
||||
continue
|
||||
}
|
||||
|
||||
// Validate timezone if provided
|
||||
if (trigger.parameters?.timezone) {
|
||||
try {
|
||||
// Test if timezone is valid by trying to create a date with it
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.parameters.timezone })
|
||||
} catch {
|
||||
logger.error({
|
||||
timezone: trigger.parameters.timezone,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid timezone specified')
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate next execution time
|
||||
const nextExecution = getNextCronTime(trigger.parameters.cronExpression, trigger.parameters?.timezone)
|
||||
|
||||
// Queue the job
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression: trigger.parameters.cronExpression, timezone: trigger.parameters?.timezone, workflowId: workflow.id },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: nextExecution
|
||||
})
|
||||
|
||||
logger.info({
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
nextExecution: nextExecution.toISOString(),
|
||||
timezone: trigger.parameters?.timezone || 'UTC',
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Queued initial cron job for workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timezone: trigger.parameters?.timezone,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to queue cron job')
|
||||
}
|
||||
} else {
|
||||
logger.warn({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Cron trigger found but no cron expression specified')
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Failed to register cron jobs')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a cron expression
|
||||
*/
|
||||
export function validateCronExpression(cronExpression: string): boolean {
|
||||
return cron.validate(cronExpression)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the next time a cron expression should run
|
||||
*/
|
||||
function getNextCronTime(cronExpression: string, timezone?: string): Date {
|
||||
if (!validateCronExpression(cronExpression)) {
|
||||
throw new Error(`Invalid cron expression: ${cronExpression}`)
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const options: { timezone?: string } = timezone ? { timezone } : {}
|
||||
|
||||
// Create a task to find the next execution time
|
||||
const task = cron.schedule(cronExpression, () => {}, {
|
||||
...options
|
||||
})
|
||||
|
||||
// Parse cron expression parts
|
||||
const cronParts = cronExpression.trim().split(/\s+/)
|
||||
if (cronParts.length !== 5) {
|
||||
void task.destroy()
|
||||
throw new Error(`Invalid cron format: ${cronExpression}. Expected 5 parts.`)
|
||||
}
|
||||
|
||||
const [minutePart, hourPart, dayPart, monthPart, weekdayPart] = cronParts
|
||||
|
||||
// Calculate next execution with proper lookahead for any schedule frequency
|
||||
// Start from next minute and look ahead systematically
|
||||
let testTime = new Date(now.getTime() + 60 * 1000) // Start 1 minute from now
|
||||
testTime.setSeconds(0, 0) // Reset seconds and milliseconds
|
||||
|
||||
// Maximum iterations to prevent infinite loops (covers ~2 years)
|
||||
const maxIterations = 2 * 365 * 24 * 60 // 2 years worth of minutes
|
||||
let iterations = 0
|
||||
|
||||
while (iterations < maxIterations) {
|
||||
const minute = testTime.getMinutes()
|
||||
const hour = testTime.getHours()
|
||||
const dayOfMonth = testTime.getDate()
|
||||
const month = testTime.getMonth() + 1
|
||||
const dayOfWeek = testTime.getDay()
|
||||
|
||||
if (matchesCronPart(minute, minutePart) &&
|
||||
matchesCronPart(hour, hourPart) &&
|
||||
matchesCronPart(dayOfMonth, dayPart) &&
|
||||
matchesCronPart(month, monthPart) &&
|
||||
matchesCronPart(dayOfWeek, weekdayPart)) {
|
||||
void task.destroy()
|
||||
return testTime
|
||||
}
|
||||
|
||||
// Increment time intelligently based on cron pattern
|
||||
testTime = incrementTimeForCronPattern(testTime, cronParts)
|
||||
iterations++
|
||||
}
|
||||
|
||||
void task.destroy()
|
||||
throw new Error(`Could not calculate next execution time for cron expression: ${cronExpression} within reasonable timeframe`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Intelligently increment time based on cron pattern to avoid unnecessary iterations
|
||||
*/
|
||||
function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Date {
|
||||
const [minutePart, hourPart, _dayPart, _monthPart, _weekdayPart] = cronParts
|
||||
const nextTime = new Date(currentTime)
|
||||
|
||||
// If minute is specific (not wildcard), we can jump to next hour
|
||||
if (minutePart !== '*' && !minutePart.includes('/')) {
|
||||
const targetMinute = getNextValidCronValue(currentTime.getMinutes(), minutePart)
|
||||
if (targetMinute <= currentTime.getMinutes()) {
|
||||
// Move to next hour
|
||||
nextTime.setHours(nextTime.getHours() + 1, targetMinute, 0, 0)
|
||||
} else {
|
||||
nextTime.setMinutes(targetMinute, 0, 0)
|
||||
}
|
||||
return nextTime
|
||||
}
|
||||
|
||||
// If hour is specific and we're past it, jump to next day
|
||||
if (hourPart !== '*' && !hourPart.includes('/')) {
|
||||
const targetHour = getNextValidCronValue(currentTime.getHours(), hourPart)
|
||||
if (targetHour <= currentTime.getHours()) {
|
||||
// Move to next day
|
||||
nextTime.setDate(nextTime.getDate() + 1)
|
||||
nextTime.setHours(targetHour, 0, 0, 0)
|
||||
} else {
|
||||
nextTime.setHours(targetHour, 0, 0, 0)
|
||||
}
|
||||
return nextTime
|
||||
}
|
||||
|
||||
// Default: increment by 1 minute
|
||||
nextTime.setTime(nextTime.getTime() + 60 * 1000)
|
||||
return nextTime
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next valid value for a cron part
|
||||
*/
|
||||
function getNextValidCronValue(currentValue: number, cronPart: string): number {
|
||||
if (cronPart === '*') {return currentValue + 1}
|
||||
|
||||
// Handle specific values and ranges
|
||||
const values = parseCronPart(cronPart)
|
||||
return values.find(v => v > currentValue) || values[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a cron part into an array of valid values
|
||||
*/
|
||||
function parseCronPart(cronPart: string): number[] {
|
||||
if (cronPart === '*') {return []}
|
||||
|
||||
const values: number[] = []
|
||||
|
||||
// Handle comma-separated values
|
||||
if (cronPart.includes(',')) {
|
||||
cronPart.split(',').forEach(part => {
|
||||
values.push(...parseCronPart(part.trim()))
|
||||
})
|
||||
return values.sort((a, b) => a - b)
|
||||
}
|
||||
|
||||
// Handle ranges
|
||||
if (cronPart.includes('-')) {
|
||||
const [start, end] = cronPart.split('-').map(n => parseInt(n, 10))
|
||||
for (let i = start; i <= end; i++) {
|
||||
values.push(i)
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
// Handle step values
|
||||
if (cronPart.includes('/')) {
|
||||
const [range, step] = cronPart.split('/')
|
||||
const stepNum = parseInt(step, 10)
|
||||
|
||||
if (range === '*') {
|
||||
// For wildcards with steps, return empty - handled elsewhere
|
||||
return []
|
||||
}
|
||||
|
||||
const baseValues = parseCronPart(range)
|
||||
return baseValues.filter((_, index) => index % stepNum === 0)
|
||||
}
|
||||
|
||||
// Single value
|
||||
values.push(parseInt(cronPart, 10))
|
||||
return values
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value matches a cron expression part
|
||||
*/
|
||||
function matchesCronPart(value: number, cronPart: string): boolean {
|
||||
if (cronPart === '*') {return true}
|
||||
|
||||
// Handle step values (e.g., */5)
|
||||
if (cronPart.includes('/')) {
|
||||
const [range, step] = cronPart.split('/')
|
||||
const stepNum = parseInt(step, 10)
|
||||
|
||||
if (range === '*') {
|
||||
return value % stepNum === 0
|
||||
}
|
||||
}
|
||||
|
||||
// Handle ranges (e.g., 1-5)
|
||||
if (cronPart.includes('-')) {
|
||||
const [start, end] = cronPart.split('-').map(n => parseInt(n, 10))
|
||||
return value >= start && value <= end
|
||||
}
|
||||
|
||||
// Handle comma-separated values (e.g., 1,3,5)
|
||||
if (cronPart.includes(',')) {
|
||||
const values = cronPart.split(',').map(n => parseInt(n, 10))
|
||||
return values.includes(value)
|
||||
}
|
||||
|
||||
// Handle single value
|
||||
const cronValue = parseInt(cronPart, 10)
|
||||
return value === cronValue
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle re-queueing of cron jobs after they execute
|
||||
* This ensures the job runs again at the next scheduled time
|
||||
*/
|
||||
export async function requeueCronJob(
|
||||
workflowId: string,
|
||||
cronExpression: string,
|
||||
timezone: string | undefined,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Queue the job to run at the next scheduled time
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression, timezone, workflowId },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: getNextCronTime(cronExpression, timezone)
|
||||
})
|
||||
|
||||
logger.debug({
|
||||
nextRun: getNextCronTime(cronExpression, timezone),
|
||||
timezone: timezone || 'UTC',
|
||||
workflowId
|
||||
}, 'Re-queued cron job')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to re-queue cron job')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register or update cron jobs for a specific workflow
|
||||
*/
|
||||
export async function updateWorkflowCronJobs(
|
||||
workflowId: string,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): Promise<void> {
|
||||
try {
|
||||
// First, cancel any existing cron jobs for this workflow
|
||||
cancelWorkflowCronJobs(workflowId, payload, logger)
|
||||
|
||||
// Get the workflow
|
||||
const workflow = await payload.findByID({
|
||||
id: workflowId,
|
||||
collection: 'workflows',
|
||||
depth: 0
|
||||
})
|
||||
|
||||
if (!workflow) {
|
||||
logger.warn({ workflowId }, 'Workflow not found for cron job update')
|
||||
return
|
||||
}
|
||||
|
||||
const triggers = workflow.triggers as Array<{
|
||||
parameters?: {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
[key: string]: any
|
||||
}
|
||||
type: string
|
||||
}>
|
||||
|
||||
// Find all cron triggers for this workflow
|
||||
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
|
||||
|
||||
if (cronTriggers.length === 0) {
|
||||
logger.debug({ workflowId }, 'No cron triggers found for workflow')
|
||||
return
|
||||
}
|
||||
|
||||
let scheduledJobs = 0
|
||||
|
||||
for (const trigger of cronTriggers) {
|
||||
if (trigger.parameters?.cronExpression) {
|
||||
try {
|
||||
// Validate cron expression before queueing
|
||||
if (!validateCronExpression(trigger.parameters.cronExpression)) {
|
||||
logger.error({
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid cron expression format')
|
||||
continue
|
||||
}
|
||||
|
||||
// Validate timezone if provided
|
||||
if (trigger.parameters?.timezone) {
|
||||
try {
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.parameters.timezone })
|
||||
} catch {
|
||||
logger.error({
|
||||
timezone: trigger.parameters.timezone,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid timezone specified')
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate next execution time
|
||||
const nextExecution = getNextCronTime(trigger.parameters.cronExpression, trigger.parameters?.timezone)
|
||||
|
||||
// Queue the job
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression: trigger.parameters.cronExpression, timezone: trigger.parameters?.timezone, workflowId },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: nextExecution
|
||||
})
|
||||
|
||||
scheduledJobs++
|
||||
|
||||
logger.info({
|
||||
cronExpression: trigger.parameters.cronExpression,
|
||||
nextExecution: nextExecution.toISOString(),
|
||||
timezone: trigger.parameters?.timezone || 'UTC',
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Scheduled cron job for workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
cronExpression: trigger.parameters?.cronExpression,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timezone: trigger.parameters?.timezone,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to schedule cron job')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scheduledJobs > 0) {
|
||||
logger.info({ scheduledJobs, workflowId }, 'Updated cron jobs for workflow')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to update workflow cron jobs')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel all cron jobs for a specific workflow
|
||||
*/
|
||||
export function cancelWorkflowCronJobs(
|
||||
workflowId: string,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): void {
|
||||
try {
|
||||
// Note: PayloadCMS job system doesn't have a built-in way to cancel specific jobs by input
|
||||
// This is a limitation we need to work around
|
||||
// For now, we log that we would cancel jobs for this workflow
|
||||
logger.debug({ workflowId }, 'Would cancel existing cron jobs for workflow (PayloadCMS limitation: cannot selectively cancel jobs)')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to cancel workflow cron jobs')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove cron jobs for a deleted workflow
|
||||
*/
|
||||
export function removeWorkflowCronJobs(
|
||||
workflowId: string,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): void {
|
||||
try {
|
||||
cancelWorkflowCronJobs(workflowId, payload, logger)
|
||||
logger.info({ workflowId }, 'Removed cron jobs for deleted workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to remove workflow cron jobs')
|
||||
}
|
||||
}
|
||||
95
src/plugin/global-hook.ts
Normal file
95
src/plugin/global-hook.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import {WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
|
||||
export const createGlobalTriggerHook = (globalSlug: string, hookType: string) => {
|
||||
return async function payloadGlobalAutomationHook(args: any) {
|
||||
const req = 'req' in args ? args.req :
|
||||
'args' in args ? args.args.req :
|
||||
undefined
|
||||
if (!req) {
|
||||
throw new Error('No request object found in global hook arguments')
|
||||
}
|
||||
|
||||
const payload = req.payload
|
||||
const logger = payload.logger
|
||||
|
||||
try {
|
||||
logger.info({
|
||||
global: globalSlug,
|
||||
hookType,
|
||||
operation: hookType
|
||||
}, 'Global automation hook triggered')
|
||||
|
||||
// Create executor on-demand
|
||||
const executor = new WorkflowExecutor(payload, logger)
|
||||
|
||||
logger.debug('Executing triggered global workflows...')
|
||||
|
||||
// Find workflows with matching global triggers
|
||||
const {docs: workflows} = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
where: {
|
||||
'triggers.parameters.global': {
|
||||
equals: globalSlug
|
||||
},
|
||||
'triggers.parameters.operation': {
|
||||
equals: hookType
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'global-hook'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Execute each matching workflow
|
||||
for (const workflow of workflows) {
|
||||
// Create execution context
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
...args,
|
||||
type: 'global',
|
||||
global: globalSlug,
|
||||
operation: hookType,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await executor.execute(workflow, context, req)
|
||||
logger.info({
|
||||
workflowId: workflow.id,
|
||||
global: globalSlug,
|
||||
hookType
|
||||
}, 'Global workflow executed successfully')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
workflowId: workflow.id,
|
||||
global: globalSlug,
|
||||
hookType,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Global workflow execution failed')
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({
|
||||
global: globalSlug,
|
||||
hookType
|
||||
}, 'Global workflow execution completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
logger.error({
|
||||
global: globalSlug,
|
||||
hookType,
|
||||
error: errorMessage,
|
||||
errorStack: error instanceof Error ? error.stack : undefined
|
||||
}, 'Global hook execution failed')
|
||||
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,122 +1,16 @@
|
||||
import type {Config} from 'payload'
|
||||
import type {CollectionConfig, Config} from 'payload'
|
||||
|
||||
import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js"
|
||||
import type {WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
import {createWorkflowCollection} from '../collections/Workflow.js'
|
||||
import {WorkflowRunsCollection} from '../collections/WorkflowRuns.js'
|
||||
import {WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {generateCronTasks, registerCronJobs} from './cron-scheduler.js'
|
||||
import {initCollectionHooks} from "./init-collection-hooks.js"
|
||||
import {initGlobalHooks} from "./init-global-hooks.js"
|
||||
import {initStepTasks} from "./init-step-tasks.js"
|
||||
import {initWebhookEndpoint} from "./init-webhook.js"
|
||||
import {initWorkflowHooks} from './init-workflow-hooks.js'
|
||||
import {getConfigLogger, initializeLogger} from './logger.js'
|
||||
import {createCollectionTriggerHook} from "./collection-hook.js"
|
||||
import {createGlobalTriggerHook} from "./global-hook.js"
|
||||
|
||||
export {getLogger} from './logger.js'
|
||||
|
||||
// Improved executor registry with proper error handling and logging
|
||||
interface ExecutorRegistry {
|
||||
executor: null | WorkflowExecutor
|
||||
isInitialized: boolean
|
||||
logger: any | null
|
||||
}
|
||||
|
||||
const executorRegistry: ExecutorRegistry = {
|
||||
executor: null,
|
||||
isInitialized: false,
|
||||
logger: null
|
||||
}
|
||||
|
||||
const setWorkflowExecutor = (executor: WorkflowExecutor, logger: any) => {
|
||||
executorRegistry.executor = executor
|
||||
executorRegistry.logger = logger
|
||||
executorRegistry.isInitialized = true
|
||||
|
||||
logger.info('Workflow executor initialized and registered successfully')
|
||||
}
|
||||
|
||||
const getExecutorRegistry = (): ExecutorRegistry => {
|
||||
return executorRegistry
|
||||
}
|
||||
|
||||
// Helper function to create failed workflow runs for tracking errors
|
||||
const createFailedWorkflowRun = async (args: any, errorMessage: string, logger: any) => {
|
||||
try {
|
||||
// Only create failed workflow runs if we have enough context
|
||||
if (!args?.req?.payload || !args?.collection?.slug) {
|
||||
return
|
||||
}
|
||||
|
||||
// Find workflows that should have been triggered
|
||||
const workflows = await args.req.payload.find({
|
||||
collection: 'workflows',
|
||||
limit: 10,
|
||||
req: args.req,
|
||||
where: {
|
||||
'triggers.collectionSlug': {
|
||||
equals: args.collection.slug
|
||||
},
|
||||
'triggers.operation': {
|
||||
equals: args.operation
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'collection-trigger'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Create failed workflow runs for each matching workflow
|
||||
for (const workflow of workflows.docs) {
|
||||
await args.req.payload.create({
|
||||
collection: 'workflow-runs',
|
||||
data: {
|
||||
completedAt: new Date().toISOString(),
|
||||
context: {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'collection',
|
||||
collection: args.collection.slug,
|
||||
doc: args.doc,
|
||||
operation: args.operation,
|
||||
previousDoc: args.previousDoc,
|
||||
triggeredAt: new Date().toISOString()
|
||||
}
|
||||
},
|
||||
error: `Hook execution failed: ${errorMessage}`,
|
||||
inputs: {},
|
||||
logs: [{
|
||||
level: 'error',
|
||||
message: `Hook execution failed: ${errorMessage}`,
|
||||
timestamp: new Date().toISOString()
|
||||
}],
|
||||
outputs: {},
|
||||
startedAt: new Date().toISOString(),
|
||||
status: 'failed',
|
||||
steps: [],
|
||||
triggeredBy: args?.req?.user?.email || 'system',
|
||||
workflow: workflow.id,
|
||||
workflowVersion: 1
|
||||
},
|
||||
req: args.req
|
||||
})
|
||||
}
|
||||
|
||||
if (workflows.docs.length > 0) {
|
||||
logger.info({
|
||||
errorMessage,
|
||||
workflowCount: workflows.docs.length
|
||||
}, 'Created failed workflow runs for hook execution error')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// Don't let workflow run creation failures break the original operation
|
||||
logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Failed to create failed workflow run record')
|
||||
}
|
||||
}
|
||||
|
||||
const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPluginConfig<T>, config: Config) => {
|
||||
// Add workflow collections
|
||||
if (!config.collections) {
|
||||
@@ -129,8 +23,16 @@ const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPlugin
|
||||
)
|
||||
}
|
||||
|
||||
// Removed config-phase hook registration - user collections don't exist during config phase
|
||||
type AnyHook =
|
||||
CollectionConfig['hooks'] extends infer H
|
||||
? H extends Record<string, unknown>
|
||||
? NonNullable<H[keyof H]> extends (infer U)[]
|
||||
? U
|
||||
: never
|
||||
: never
|
||||
: never;
|
||||
|
||||
type HookArgs = Parameters<AnyHook>[0]
|
||||
|
||||
export const workflowsPlugin =
|
||||
<TSlug extends string>(pluginOptions: WorkflowsPluginConfig<TSlug>) =>
|
||||
@@ -145,107 +47,130 @@ export const workflowsPlugin =
|
||||
// CRITICAL: Modify existing collection configs BEFORE PayloadCMS processes them
|
||||
// This is the ONLY time we can add hooks that will actually work
|
||||
const logger = getConfigLogger()
|
||||
logger.info('Attempting to modify collection configs before PayloadCMS initialization...')
|
||||
|
||||
if (config.collections && pluginOptions.collectionTriggers) {
|
||||
for (const [triggerSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) {continue}
|
||||
for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find the collection config that matches
|
||||
const collectionIndex = config.collections.findIndex(c => c.slug === triggerSlug)
|
||||
const collectionIndex = config.collections.findIndex(c => c.slug === collectionSlug)
|
||||
if (collectionIndex === -1) {
|
||||
logger.warn(`Collection '${triggerSlug}' not found in config.collections`)
|
||||
logger.warn(`Collection '${collectionSlug}' not found in config.collections`)
|
||||
continue
|
||||
}
|
||||
|
||||
const collection = config.collections[collectionIndex]
|
||||
logger.info(`Found collection '${triggerSlug}' - modifying its hooks...`)
|
||||
|
||||
// Initialize hooks if needed
|
||||
if (!collection.hooks) {
|
||||
collection.hooks = {}
|
||||
}
|
||||
if (!collection.hooks.afterChange) {
|
||||
collection.hooks.afterChange = []
|
||||
|
||||
// Determine which hooks to register based on config
|
||||
const hooksToRegister = triggerConfig === true
|
||||
? {
|
||||
afterChange: true,
|
||||
afterDelete: true,
|
||||
afterRead: true,
|
||||
}
|
||||
: triggerConfig
|
||||
|
||||
// Register each configured hook
|
||||
Object.entries(hooksToRegister).forEach(([hookName, enabled]) => {
|
||||
if (!enabled) {
|
||||
return
|
||||
}
|
||||
|
||||
const hookKey = hookName as keyof typeof collection.hooks
|
||||
|
||||
// Initialize the hook array if needed
|
||||
if (!collection.hooks![hookKey]) {
|
||||
collection.hooks![hookKey] = []
|
||||
}
|
||||
|
||||
// Create the automation hook for this specific collection and hook type
|
||||
const automationHook = createCollectionTriggerHook(collectionSlug, hookKey)
|
||||
|
||||
// Mark it for debugging
|
||||
Object.defineProperty(automationHook, '__isAutomationHook', {
|
||||
value: true,
|
||||
enumerable: false
|
||||
})
|
||||
Object.defineProperty(automationHook, '__hookType', {
|
||||
value: hookKey,
|
||||
enumerable: false
|
||||
})
|
||||
|
||||
// Add the hook to the collection
|
||||
;(collection.hooks![hookKey] as Array<unknown>).push(automationHook)
|
||||
|
||||
logger.debug(`Registered ${hookKey} hook for collection '${collectionSlug}'`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Handle global triggers similarly to collection triggers
|
||||
if (config.globals && pluginOptions.globalTriggers) {
|
||||
for (const [globalSlug, triggerConfig] of Object.entries(pluginOptions.globalTriggers)) {
|
||||
if (!triggerConfig) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create a reliable hook function with proper dependency injection
|
||||
const automationHook = Object.assign(
|
||||
async function payloadAutomationHook(args: any) {
|
||||
const registry = getExecutorRegistry()
|
||||
// Find the global config that matches
|
||||
const globalIndex = config.globals.findIndex(g => g.slug === globalSlug)
|
||||
if (globalIndex === -1) {
|
||||
logger.warn(`Global '${globalSlug}' not found in config.globals`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Use proper logger if available, fallback to args.req.payload.logger
|
||||
const logger = registry.logger || args?.req?.payload?.logger || console
|
||||
const global = config.globals[globalIndex]
|
||||
|
||||
try {
|
||||
logger.info({
|
||||
collection: args?.collection?.slug,
|
||||
docId: args?.doc?.id,
|
||||
hookType: 'automation',
|
||||
operation: args?.operation
|
||||
}, 'Collection automation hook triggered')
|
||||
// Initialize hooks if needed
|
||||
if (!global.hooks) {
|
||||
global.hooks = {}
|
||||
}
|
||||
|
||||
if (!registry.isInitialized) {
|
||||
logger.warn('Workflow executor not yet initialized, skipping execution')
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!registry.executor) {
|
||||
logger.error('Workflow executor is null despite being marked as initialized')
|
||||
// Create a failed workflow run to track this issue
|
||||
await createFailedWorkflowRun(args, 'Executor not available', logger)
|
||||
return undefined
|
||||
}
|
||||
|
||||
logger.debug('Executing triggered workflows...')
|
||||
await registry.executor.executeTriggeredWorkflows(
|
||||
args.collection.slug,
|
||||
args.operation,
|
||||
args.doc,
|
||||
args.previousDoc,
|
||||
args.req
|
||||
)
|
||||
|
||||
logger.info({
|
||||
collection: args?.collection?.slug,
|
||||
docId: args?.doc?.id,
|
||||
operation: args?.operation
|
||||
}, 'Workflow execution completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
logger.error({
|
||||
collection: args?.collection?.slug,
|
||||
docId: args?.doc?.id,
|
||||
error: errorMessage,
|
||||
errorStack: error instanceof Error ? error.stack : undefined,
|
||||
operation: args?.operation
|
||||
}, 'Hook execution failed')
|
||||
|
||||
// Create a failed workflow run to track this error
|
||||
try {
|
||||
await createFailedWorkflowRun(args, errorMessage, logger)
|
||||
} catch (createError) {
|
||||
logger.error({
|
||||
error: createError instanceof Error ? createError.message : 'Unknown error'
|
||||
}, 'Failed to create workflow run for hook error')
|
||||
}
|
||||
|
||||
// Don't throw to prevent breaking the original operation
|
||||
// Determine which hooks to register based on config
|
||||
const hooksToRegister = triggerConfig === true
|
||||
? {
|
||||
afterChange: true,
|
||||
afterRead: true,
|
||||
}
|
||||
: triggerConfig
|
||||
|
||||
return undefined
|
||||
},
|
||||
{
|
||||
__isAutomationHook: true,
|
||||
__version: '0.0.22'
|
||||
// Register each configured hook
|
||||
Object.entries(hooksToRegister).forEach(([hookName, enabled]) => {
|
||||
if (!enabled) {
|
||||
return
|
||||
}
|
||||
)
|
||||
|
||||
// Add the hook to the collection config
|
||||
collection.hooks.afterChange.push(automationHook)
|
||||
logger.info(`Added automation hook to '${triggerSlug}' - hook count: ${collection.hooks.afterChange.length}`)
|
||||
const hookKey = hookName as keyof typeof global.hooks
|
||||
|
||||
// Initialize the hook array if needed
|
||||
if (!global.hooks![hookKey]) {
|
||||
global.hooks![hookKey] = []
|
||||
}
|
||||
|
||||
// Create the automation hook for this specific global and hook type
|
||||
const automationHook = createGlobalTriggerHook(globalSlug, hookKey)
|
||||
|
||||
// Mark it for debugging
|
||||
Object.defineProperty(automationHook, '__isAutomationHook', {
|
||||
value: true,
|
||||
enumerable: false
|
||||
})
|
||||
Object.defineProperty(automationHook, '__hookType', {
|
||||
value: hookKey,
|
||||
enumerable: false
|
||||
})
|
||||
|
||||
// Add the hook to the global
|
||||
;(global.hooks![hookKey] as Array<unknown>).push(automationHook)
|
||||
|
||||
logger.debug(`Registered ${hookKey} hook for global '${globalSlug}'`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,32 +178,18 @@ export const workflowsPlugin =
|
||||
config.jobs = {tasks: []}
|
||||
}
|
||||
|
||||
const configLogger = getConfigLogger()
|
||||
configLogger.info(`Configuring workflow plugin with ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers`)
|
||||
|
||||
// Generate cron tasks for workflows with cron triggers
|
||||
generateCronTasks(config)
|
||||
|
||||
for (const step of pluginOptions.steps) {
|
||||
if (!config.jobs?.tasks?.find(task => task.slug === step.slug)) {
|
||||
configLogger.debug(`Registering task: ${step.slug}`)
|
||||
config.jobs?.tasks?.push(step)
|
||||
} else {
|
||||
configLogger.debug(`Task ${step.slug} already registered, skipping`)
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize webhook endpoint
|
||||
initWebhookEndpoint(config, pluginOptions.webhookPrefix || 'webhook')
|
||||
|
||||
// Set up onInit to register collection hooks and initialize features
|
||||
// Set up onInit to initialize features
|
||||
const incomingOnInit = config.onInit
|
||||
config.onInit = async (payload) => {
|
||||
configLogger.info(`onInit called - collections: ${Object.keys(payload.collections).length}`)
|
||||
|
||||
// Execute any existing onInit functions first
|
||||
if (incomingOnInit) {
|
||||
configLogger.debug('Executing existing onInit function')
|
||||
await incomingOnInit(payload)
|
||||
}
|
||||
|
||||
@@ -286,33 +197,8 @@ export const workflowsPlugin =
|
||||
const logger = initializeLogger(payload)
|
||||
logger.info('Logger initialized with payload instance')
|
||||
|
||||
// Log collection trigger configuration
|
||||
logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${pluginOptions.steps?.length || 0} steps`)
|
||||
|
||||
// Create workflow executor instance
|
||||
console.log('🚨 CREATING WORKFLOW EXECUTOR INSTANCE')
|
||||
const executor = new WorkflowExecutor(payload, logger)
|
||||
console.log('🚨 EXECUTOR CREATED:', typeof executor)
|
||||
console.log('🚨 EXECUTOR METHODS:', Object.getOwnPropertyNames(Object.getPrototypeOf(executor)))
|
||||
|
||||
// Register executor with proper dependency injection
|
||||
setWorkflowExecutor(executor, logger)
|
||||
|
||||
// Hooks are now registered during config phase - just log status
|
||||
logger.info('Hooks were registered during config phase - executor now available')
|
||||
|
||||
logger.info('Initializing global hooks...')
|
||||
initGlobalHooks(payload, logger, executor)
|
||||
|
||||
logger.info('Initializing workflow hooks...')
|
||||
initWorkflowHooks(payload, logger)
|
||||
|
||||
logger.info('Initializing step tasks...')
|
||||
initStepTasks(pluginOptions, payload, logger)
|
||||
|
||||
// Register cron jobs for workflows with cron triggers
|
||||
logger.info('Registering cron jobs...')
|
||||
await registerCronJobs(payload, logger)
|
||||
// Log trigger configuration
|
||||
logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${Object.keys(pluginOptions.globalTriggers || {}).length} global triggers, ${pluginOptions.steps?.length || 0} steps`)
|
||||
|
||||
logger.info('Plugin initialized successfully - all hooks registered')
|
||||
}
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
import type {Payload} from "payload"
|
||||
import type {Logger} from "pino"
|
||||
|
||||
import type { WorkflowExecutor } from "../core/workflow-executor.js"
|
||||
import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) {
|
||||
|
||||
if (!pluginOptions.collectionTriggers || Object.keys(pluginOptions.collectionTriggers).length === 0) {
|
||||
logger.warn('No collection triggers configured in plugin options')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info({
|
||||
configuredCollections: Object.keys(pluginOptions.collectionTriggers),
|
||||
availableCollections: Object.keys(payload.collections)
|
||||
}, 'Starting collection hook registration')
|
||||
|
||||
// Add hooks to configured collections
|
||||
for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) {
|
||||
logger.debug({collectionSlug}, 'Skipping collection with falsy trigger config')
|
||||
continue
|
||||
}
|
||||
|
||||
const collection = payload.collections[collectionSlug as T]
|
||||
const crud: CollectionTriggerConfigCrud = triggerConfig === true ? {
|
||||
create: true,
|
||||
delete: true,
|
||||
read: true,
|
||||
update: true,
|
||||
} : triggerConfig
|
||||
|
||||
if (!collection.config.hooks) {
|
||||
collection.config.hooks = {} as typeof collection.config.hooks
|
||||
}
|
||||
|
||||
if (crud.update || crud.create) {
|
||||
collection.config.hooks.afterChange = collection.config.hooks.afterChange || []
|
||||
collection.config.hooks.afterChange.push(async (change) => {
|
||||
const operation = change.operation as 'create' | 'update'
|
||||
|
||||
// AGGRESSIVE LOGGING - this should ALWAYS appear
|
||||
console.log('🚨 AUTOMATION PLUGIN HOOK CALLED! 🚨')
|
||||
console.log('Collection:', change.collection.slug)
|
||||
console.log('Operation:', operation)
|
||||
console.log('Doc ID:', change.doc?.id)
|
||||
console.log('Has executor?', !!executor)
|
||||
console.log('Executor type:', typeof executor)
|
||||
|
||||
logger.info({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id,
|
||||
previousDocId: change.previousDoc?.id,
|
||||
hasExecutor: !!executor,
|
||||
executorType: typeof executor
|
||||
}, 'AUTOMATION PLUGIN: Collection hook triggered')
|
||||
|
||||
try {
|
||||
console.log('🚨 About to call executeTriggeredWorkflows')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
operation,
|
||||
change.doc,
|
||||
change.previousDoc,
|
||||
change.req
|
||||
)
|
||||
|
||||
console.log('🚨 executeTriggeredWorkflows completed without error')
|
||||
|
||||
logger.info({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id
|
||||
}, 'AUTOMATION PLUGIN: executeTriggeredWorkflows completed successfully')
|
||||
} catch (error) {
|
||||
console.log('🚨 AUTOMATION PLUGIN ERROR:', error)
|
||||
|
||||
logger.error({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined
|
||||
}, 'AUTOMATION PLUGIN: executeTriggeredWorkflows failed')
|
||||
// Don't re-throw to avoid breaking other hooks
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (crud.read) {
|
||||
collection.config.hooks.afterRead = collection.config.hooks.afterRead || []
|
||||
collection.config.hooks.afterRead.push(async (change) => {
|
||||
logger.debug({
|
||||
slug: change.collection.slug,
|
||||
operation: 'read',
|
||||
}, 'Collection hook triggered')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
'read',
|
||||
change.doc,
|
||||
undefined,
|
||||
change.req
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
if (crud.delete) {
|
||||
collection.config.hooks.afterDelete = collection.config.hooks.afterDelete || []
|
||||
collection.config.hooks.afterDelete.push(async (change) => {
|
||||
logger.debug({
|
||||
slug: change.collection.slug,
|
||||
operation: 'delete',
|
||||
}, 'Collection hook triggered')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
'delete',
|
||||
change.doc,
|
||||
undefined,
|
||||
change.req
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
if (collection) {
|
||||
logger.info({
|
||||
collectionSlug,
|
||||
hooksRegistered: {
|
||||
afterChange: crud.update || crud.create,
|
||||
afterRead: crud.read,
|
||||
afterDelete: crud.delete
|
||||
}
|
||||
}, 'Collection hooks registered successfully')
|
||||
} else {
|
||||
logger.error({
|
||||
collectionSlug,
|
||||
availableCollections: Object.keys(payload.collections)
|
||||
}, 'Collection not found for trigger configuration - check collection slug spelling')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
import type { Payload, PayloadRequest } from "payload"
|
||||
import type { Logger } from "pino"
|
||||
|
||||
import type { WorkflowExecutor, PayloadWorkflow } from "../core/workflow-executor.js"
|
||||
|
||||
export function initGlobalHooks(payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) {
|
||||
// Get all globals from the config
|
||||
const globals = payload.config.globals || []
|
||||
|
||||
for (const globalConfig of globals) {
|
||||
const globalSlug = globalConfig.slug
|
||||
|
||||
// Add afterChange hook to global
|
||||
if (!globalConfig.hooks) {
|
||||
globalConfig.hooks = {
|
||||
afterChange: [],
|
||||
afterRead: [],
|
||||
beforeChange: [],
|
||||
beforeRead: [],
|
||||
beforeValidate: []
|
||||
}
|
||||
}
|
||||
|
||||
if (!globalConfig.hooks.afterChange) {
|
||||
globalConfig.hooks.afterChange = []
|
||||
}
|
||||
|
||||
globalConfig.hooks.afterChange.push(async (change) => {
|
||||
logger.debug({
|
||||
global: globalSlug,
|
||||
operation: 'update'
|
||||
}, 'Global hook triggered')
|
||||
|
||||
// Execute workflows for this global trigger
|
||||
await executeTriggeredGlobalWorkflows(
|
||||
globalSlug,
|
||||
'update',
|
||||
change.doc,
|
||||
change.previousDoc,
|
||||
change.req,
|
||||
payload,
|
||||
logger,
|
||||
executor
|
||||
)
|
||||
})
|
||||
|
||||
logger.info({ globalSlug }, 'Global hooks registered')
|
||||
}
|
||||
}
|
||||
|
||||
async function executeTriggeredGlobalWorkflows(
|
||||
globalSlug: string,
|
||||
operation: 'update',
|
||||
doc: Record<string, any>,
|
||||
previousDoc: Record<string, any>,
|
||||
req: PayloadRequest,
|
||||
payload: Payload,
|
||||
logger: Payload['logger'],
|
||||
executor: WorkflowExecutor
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Find workflows with matching global triggers
|
||||
const workflows = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
req,
|
||||
where: {
|
||||
'triggers.global': {
|
||||
equals: globalSlug
|
||||
},
|
||||
'triggers.globalOperation': {
|
||||
equals: operation
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'global-trigger'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
logger.info({
|
||||
globalSlug,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Triggering global workflow')
|
||||
|
||||
// Create execution context
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'global',
|
||||
doc,
|
||||
global: globalSlug,
|
||||
operation,
|
||||
previousDoc,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
globalSlug,
|
||||
operation
|
||||
}, 'Failed to execute triggered global workflows')
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import type {Payload} from "payload"
|
||||
import type {Logger} from "pino"
|
||||
|
||||
import type {WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
export function initStepTasks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger']) {
|
||||
logger.info({ stepCount: pluginOptions.steps.length, steps: pluginOptions.steps.map(s => s.slug) }, 'Step tasks were registered during config phase')
|
||||
|
||||
// Verify that the tasks are available in the job system
|
||||
const availableTasks = payload.config.jobs?.tasks?.map(t => t.slug) || []
|
||||
const pluginTasks = pluginOptions.steps.map(s => s.slug)
|
||||
|
||||
pluginTasks.forEach(taskSlug => {
|
||||
if (availableTasks.includes(taskSlug)) {
|
||||
logger.info({ taskSlug }, 'Step task confirmed available in job system')
|
||||
} else {
|
||||
logger.error({ taskSlug }, 'Step task not found in job system - this will cause execution failures')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
import type {Config, PayloadRequest} from 'payload'
|
||||
|
||||
import {type PayloadWorkflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {getConfigLogger, initializeLogger} from './logger.js'
|
||||
|
||||
export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'): void {
|
||||
const logger = getConfigLogger()
|
||||
// Ensure the prefix starts with a slash
|
||||
const normalizedPrefix = webhookPrefix.startsWith('/') ? webhookPrefix : `/${webhookPrefix}`
|
||||
logger.debug(`Adding webhook endpoint to config with prefix: ${normalizedPrefix}`)
|
||||
logger.debug('Current config.endpoints length:', config.endpoints?.length || 0)
|
||||
|
||||
// Define webhook endpoint
|
||||
const webhookEndpoint = {
|
||||
handler: async (req: PayloadRequest) => {
|
||||
const {path} = req.routeParams as { path: string }
|
||||
const webhookData = req.body || {}
|
||||
|
||||
logger.debug('Webhook endpoint handler called, path: ' + path)
|
||||
|
||||
try {
|
||||
// Find workflows with matching webhook triggers
|
||||
const workflows = await req.payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
req,
|
||||
where: {
|
||||
'triggers.type': {
|
||||
equals: 'webhook-trigger'
|
||||
},
|
||||
'triggers.webhookPath': {
|
||||
equals: path
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (workflows.docs.length === 0) {
|
||||
return new Response(
|
||||
JSON.stringify({error: 'No workflows found for this webhook path'}),
|
||||
{
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
status: 404
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Create a workflow executor for this request
|
||||
const logger = initializeLogger(req.payload)
|
||||
const executor = new WorkflowExecutor(req.payload, logger)
|
||||
|
||||
const executionPromises = workflows.docs.map(async (workflow) => {
|
||||
try {
|
||||
// Create execution context for the webhook trigger
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'webhook',
|
||||
data: webhookData,
|
||||
headers: Object.fromEntries(req.headers?.entries() || []),
|
||||
path,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
// Find the matching trigger and check its condition if present
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
type: string
|
||||
parameters?: {
|
||||
webhookPath?: string
|
||||
[key: string]: any
|
||||
}
|
||||
}>
|
||||
|
||||
const matchingTrigger = triggers?.find(trigger =>
|
||||
trigger.type === 'webhook-trigger' &&
|
||||
trigger.parameters?.webhookPath === path
|
||||
)
|
||||
|
||||
// Check trigger condition if present
|
||||
if (matchingTrigger?.condition) {
|
||||
logger.debug({
|
||||
condition: matchingTrigger.condition,
|
||||
path,
|
||||
webhookData: JSON.stringify(webhookData).substring(0, 200),
|
||||
headers: Object.keys(context.trigger.headers || {}),
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Evaluating webhook trigger condition')
|
||||
|
||||
const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context)
|
||||
|
||||
if (!conditionMet) {
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
path,
|
||||
webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200),
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Webhook trigger condition not met, skipping workflow')
|
||||
|
||||
return { reason: 'Condition not met', status: 'skipped', workflowId: workflow.id }
|
||||
}
|
||||
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
path,
|
||||
webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200),
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Webhook trigger condition met')
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
|
||||
return { status: 'triggered', workflowId: workflow.id }
|
||||
} catch (error) {
|
||||
return {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
status: 'failed',
|
||||
workflowId: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const results = await Promise.allSettled(executionPromises)
|
||||
const resultsData = results.map((result, index) => {
|
||||
const baseResult = { workflowId: workflows.docs[index].id }
|
||||
if (result.status === 'fulfilled') {
|
||||
return { ...baseResult, ...result.value }
|
||||
} else {
|
||||
return { ...baseResult, error: result.reason, status: 'failed' }
|
||||
}
|
||||
})
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
message: `Triggered ${workflows.docs.length} workflow(s)`,
|
||||
results: resultsData
|
||||
}),
|
||||
{
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
status: 200
|
||||
}
|
||||
)
|
||||
|
||||
} catch (error) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
error: 'Failed to process webhook'
|
||||
}),
|
||||
{
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
status: 500
|
||||
}
|
||||
)
|
||||
}
|
||||
},
|
||||
method: 'post' as const,
|
||||
path: `${normalizedPrefix}/:path`
|
||||
}
|
||||
|
||||
// Check if the webhook endpoint already exists to avoid duplicates
|
||||
const existingEndpoint = config.endpoints?.find(endpoint =>
|
||||
endpoint.path === webhookEndpoint.path && endpoint.method === webhookEndpoint.method
|
||||
)
|
||||
|
||||
if (!existingEndpoint) {
|
||||
// Combine existing endpoints with the webhook endpoint
|
||||
config.endpoints = [...(config.endpoints || []), webhookEndpoint]
|
||||
logger.debug(`Webhook endpoint added at path: ${webhookEndpoint.path}`)
|
||||
logger.debug('New config.endpoints length:', config.endpoints.length)
|
||||
} else {
|
||||
logger.debug(`Webhook endpoint already exists at path: ${webhookEndpoint.path}`)
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import type {Payload} from 'payload'
|
||||
|
||||
import {updateWorkflowCronJobs, removeWorkflowCronJobs} from './cron-scheduler.js'
|
||||
|
||||
/**
|
||||
* Initialize hooks for the workflows collection itself
|
||||
* to manage cron jobs when workflows are created/updated
|
||||
*/
|
||||
export function initWorkflowHooks(payload: Payload, logger: Payload['logger']): void {
|
||||
// Add afterChange hook to workflows collection to update cron jobs
|
||||
const workflowsCollection = payload.collections.workflows
|
||||
|
||||
if (!workflowsCollection) {
|
||||
logger.warn('Workflows collection not found, cannot initialize workflow hooks')
|
||||
return
|
||||
}
|
||||
|
||||
// Add afterChange hook to register/update cron jobs
|
||||
if (!workflowsCollection.config.hooks?.afterChange) {
|
||||
if (!workflowsCollection.config.hooks) {
|
||||
// @ts-expect-error - hooks object will be populated by Payload
|
||||
workflowsCollection.config.hooks = {}
|
||||
}
|
||||
workflowsCollection.config.hooks.afterChange = []
|
||||
}
|
||||
|
||||
workflowsCollection.config.hooks.afterChange.push(async ({ doc, operation }) => {
|
||||
if (operation === 'create' || operation === 'update') {
|
||||
logger.debug({
|
||||
operation,
|
||||
workflowId: doc.id,
|
||||
workflowName: doc.name
|
||||
}, 'Workflow changed, updating cron jobs selectively')
|
||||
|
||||
// Update cron jobs for this specific workflow only
|
||||
await updateWorkflowCronJobs(doc.id, payload, logger)
|
||||
}
|
||||
})
|
||||
|
||||
// Add afterDelete hook to clean up cron jobs
|
||||
if (!workflowsCollection.config.hooks?.afterDelete) {
|
||||
workflowsCollection.config.hooks.afterDelete = []
|
||||
}
|
||||
|
||||
workflowsCollection.config.hooks.afterDelete.push(async ({ doc }) => {
|
||||
logger.debug({
|
||||
workflowId: doc.id,
|
||||
workflowName: doc.name
|
||||
}, 'Workflow deleted, removing cron jobs')
|
||||
|
||||
// Remove cron jobs for the deleted workflow
|
||||
removeWorkflowCronJobs(doc.id, payload, logger)
|
||||
})
|
||||
|
||||
logger.info('Workflow hooks initialized for cron job management')
|
||||
}
|
||||
@@ -3,25 +3,40 @@ import type { Payload } from 'payload'
|
||||
// Global logger instance - use Payload's logger type
|
||||
let pluginLogger: null | Payload['logger'] = null
|
||||
|
||||
/**
|
||||
* Get the configured log level from environment variables
|
||||
* Supports: PAYLOAD_AUTOMATION_LOG_LEVEL for unified control
|
||||
* Or separate: PAYLOAD_AUTOMATION_CONFIG_LOG_LEVEL and PAYLOAD_AUTOMATION_LOG_LEVEL
|
||||
*/
|
||||
function getConfigLogLevel(): string {
|
||||
return process.env.PAYLOAD_AUTOMATION_CONFIG_LOG_LEVEL ||
|
||||
process.env.PAYLOAD_AUTOMATION_LOG_LEVEL ||
|
||||
'warn' // Default to warn level for production
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple config-time logger for use during plugin configuration
|
||||
* Uses console with plugin prefix since Payload logger isn't available yet
|
||||
*/
|
||||
const configLogger = {
|
||||
debug: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
console.log(`[payload-automation] ${message}`, ...args)
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent' || (level !== 'debug' && level !== 'trace')) {return}
|
||||
console.debug(`[payload-automation] ${message}`, ...args)
|
||||
},
|
||||
error: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent') {return}
|
||||
console.error(`[payload-automation] ${message}`, ...args)
|
||||
},
|
||||
info: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
console.log(`[payload-automation] ${message}`, ...args)
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent' || level === 'error' || level === 'warn') {return}
|
||||
console.info(`[payload-automation] ${message}`, ...args)
|
||||
},
|
||||
warn: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent' || level === 'error') {return}
|
||||
console.warn(`[payload-automation] ${message}`, ...args)
|
||||
}
|
||||
}
|
||||
@@ -39,8 +54,13 @@ export function getConfigLogger() {
|
||||
*/
|
||||
export function initializeLogger(payload: Payload): Payload['logger'] {
|
||||
// Create a child logger with plugin identification
|
||||
// Use PAYLOAD_AUTOMATION_LOG_LEVEL as the primary env var
|
||||
const logLevel = process.env.PAYLOAD_AUTOMATION_LOG_LEVEL ||
|
||||
process.env.PAYLOAD_AUTOMATION_LOGGING || // Legacy support
|
||||
'warn' // Default to warn level for production
|
||||
|
||||
pluginLogger = payload.logger.child({
|
||||
level: process.env.PAYLOAD_AUTOMATION_LOGGING || 'silent',
|
||||
level: logLevel,
|
||||
plugin: '@xtr-dev/payload-automation'
|
||||
})
|
||||
return pluginLogger
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest'
|
||||
|
||||
describe('PayloadCMS Automation Plugin', () => {
|
||||
it('should export the plugin function from server export', async () => {
|
||||
const { workflowsPlugin } = await import('../exports/server.js')
|
||||
expect(workflowsPlugin).toBeDefined()
|
||||
expect(typeof workflowsPlugin).toBe('function')
|
||||
})
|
||||
|
||||
it('should have the correct package name', async () => {
|
||||
// Basic test to ensure the plugin can be imported
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
})
|
||||
@@ -1,356 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest'
|
||||
import { createDocumentHandler } from '../steps/create-document-handler.js'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
describe('CreateDocumentStepHandler', () => {
|
||||
let mockPayload: Payload
|
||||
let mockReq: any
|
||||
|
||||
beforeEach(() => {
|
||||
mockPayload = {
|
||||
create: vi.fn()
|
||||
} as any
|
||||
|
||||
mockReq = {
|
||||
payload: mockPayload,
|
||||
user: { id: 'user-123', email: 'test@example.com' }
|
||||
}
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Document creation', () => {
|
||||
it('should create document successfully', async () => {
|
||||
const createdDoc = {
|
||||
id: 'doc-123',
|
||||
title: 'Test Document',
|
||||
content: 'Test content'
|
||||
}
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: {
|
||||
title: 'Test Document',
|
||||
content: 'Test content'
|
||||
},
|
||||
stepName: 'test-create-step'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(result.output.id).toBe('doc-123')
|
||||
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
title: 'Test Document',
|
||||
content: 'Test content'
|
||||
},
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
|
||||
it('should create document with relationship fields', async () => {
|
||||
const createdDoc = {
|
||||
id: 'doc-456',
|
||||
title: 'Related Document',
|
||||
author: 'user-123',
|
||||
category: 'cat-789'
|
||||
}
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'articles',
|
||||
data: {
|
||||
title: 'Related Document',
|
||||
author: 'user-123',
|
||||
category: 'cat-789'
|
||||
},
|
||||
stepName: 'test-create-with-relations'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'articles',
|
||||
data: {
|
||||
title: 'Related Document',
|
||||
author: 'user-123',
|
||||
category: 'cat-789'
|
||||
},
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
|
||||
it('should create document with complex nested data', async () => {
|
||||
const complexData = {
|
||||
title: 'Complex Document',
|
||||
metadata: {
|
||||
tags: ['tag1', 'tag2'],
|
||||
settings: {
|
||||
featured: true,
|
||||
priority: 5
|
||||
}
|
||||
},
|
||||
blocks: [
|
||||
{ type: 'text', content: 'Text block' },
|
||||
{ type: 'image', src: 'image.jpg', alt: 'Test image' }
|
||||
]
|
||||
}
|
||||
|
||||
const createdDoc = { id: 'doc-complex', ...complexData }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'pages',
|
||||
data: complexData,
|
||||
stepName: 'test-create-complex'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'pages',
|
||||
data: complexData,
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling', () => {
|
||||
it('should handle PayloadCMS validation errors', async () => {
|
||||
const validationError = new Error('Validation failed')
|
||||
;(validationError as any).data = [
|
||||
{
|
||||
message: 'Title is required',
|
||||
path: 'title',
|
||||
value: undefined
|
||||
}
|
||||
]
|
||||
;(mockPayload.create as any).mockRejectedValue(validationError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: {
|
||||
content: 'Missing title'
|
||||
},
|
||||
stepName: 'test-validation-error'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Validation failed')
|
||||
})
|
||||
|
||||
it('should handle permission errors', async () => {
|
||||
const permissionError = new Error('Insufficient permissions')
|
||||
;(permissionError as any).status = 403
|
||||
;(mockPayload.create as any).mockRejectedValue(permissionError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'admin-only',
|
||||
data: {
|
||||
secret: 'confidential data'
|
||||
},
|
||||
stepName: 'test-permission-error'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Insufficient permissions')
|
||||
})
|
||||
|
||||
it('should handle database connection errors', async () => {
|
||||
const dbError = new Error('Database connection failed')
|
||||
;(mockPayload.create as any).mockRejectedValue(dbError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Test' },
|
||||
stepName: 'test-db-error'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Database connection failed')
|
||||
})
|
||||
|
||||
it('should handle unknown collection errors', async () => {
|
||||
const collectionError = new Error('Collection "unknown" not found')
|
||||
;(mockPayload.create as any).mockRejectedValue(collectionError)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'unknown-collection',
|
||||
data: { title: 'Test' },
|
||||
stepName: 'test-unknown-collection'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Collection "unknown" not found')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Input validation', () => {
|
||||
it('should validate required collection slug', async () => {
|
||||
const input = {
|
||||
data: { title: 'Test' },
|
||||
stepName: 'test-missing-collection'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Collection slug is required')
|
||||
})
|
||||
|
||||
it('should validate required data field', async () => {
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
stepName: 'test-missing-data'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Data is required')
|
||||
})
|
||||
|
||||
it('should validate data is an object', async () => {
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: 'invalid-data-type',
|
||||
stepName: 'test-invalid-data-type'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Data must be an object')
|
||||
})
|
||||
|
||||
it('should handle empty data object', async () => {
|
||||
const createdDoc = { id: 'empty-doc' }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: {},
|
||||
stepName: 'test-empty-data'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.document).toEqual(createdDoc)
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'posts',
|
||||
data: {},
|
||||
req: mockReq
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Request context', () => {
|
||||
it('should pass user context from request', async () => {
|
||||
const createdDoc = { id: 'user-doc', title: 'User Document' }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'User Document' },
|
||||
stepName: 'test-user-context'
|
||||
}
|
||||
|
||||
await createDocumentStepHandler({ input, req: mockReq })
|
||||
|
||||
const createCall = (mockPayload.create as any).mock.calls[0][0]
|
||||
expect(createCall.req).toBe(mockReq)
|
||||
expect(createCall.req.user).toEqual({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com'
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle requests without user context', async () => {
|
||||
const reqWithoutUser = {
|
||||
payload: mockPayload,
|
||||
user: null
|
||||
}
|
||||
|
||||
const createdDoc = { id: 'anonymous-doc' }
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Anonymous Document' },
|
||||
stepName: 'test-anonymous'
|
||||
}
|
||||
|
||||
const result = await createDocumentStepHandler({ input, req: reqWithoutUser })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(mockPayload.create).toHaveBeenCalledWith({
|
||||
collection: 'posts',
|
||||
data: { title: 'Anonymous Document' },
|
||||
req: reqWithoutUser
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Output structure', () => {
|
||||
it('should return correct output structure on success', async () => {
|
||||
const createdDoc = {
|
||||
id: 'output-test-doc',
|
||||
title: 'Output Test',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z'
|
||||
}
|
||||
;(mockPayload.create as any).mockResolvedValue(createdDoc)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Output Test' },
|
||||
stepName: 'test-output-structure'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result).toEqual({
|
||||
state: 'succeeded',
|
||||
output: {
|
||||
document: createdDoc,
|
||||
id: 'output-test-doc'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should return correct error structure on failure', async () => {
|
||||
const error = new Error('Test error')
|
||||
;(mockPayload.create as any).mockRejectedValue(error)
|
||||
|
||||
const input = {
|
||||
collectionSlug: 'posts',
|
||||
data: { title: 'Error Test' },
|
||||
stepName: 'test-error-structure'
|
||||
}
|
||||
|
||||
const result = await createDocumentHandler({ input, req: mockReq })
|
||||
|
||||
expect(result).toEqual({
|
||||
state: 'failed',
|
||||
error: 'Test error'
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,348 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest'
|
||||
import { httpRequestStepHandler } from '../steps/http-request-handler.js'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
// Mock fetch globally
|
||||
global.fetch = vi.fn()
|
||||
|
||||
describe('HttpRequestStepHandler', () => {
|
||||
let mockPayload: Payload
|
||||
let mockReq: any
|
||||
|
||||
beforeEach(() => {
|
||||
mockPayload = {} as Payload
|
||||
mockReq = {
|
||||
payload: mockPayload,
|
||||
user: null
|
||||
}
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('GET requests', () => {
|
||||
it('should handle successful GET request', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'application/json' }),
|
||||
text: vi.fn().mockResolvedValue('{"success": true}')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-get-step'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.status).toBe(200)
|
||||
expect(result.output.statusText).toBe('OK')
|
||||
expect(result.output.body).toBe('{"success": true}')
|
||||
expect(result.output.headers).toEqual({ 'content-type': 'application/json' })
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/data', {
|
||||
method: 'GET',
|
||||
headers: {},
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle GET request with custom headers', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('success')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET' as const,
|
||||
headers: {
|
||||
'Authorization': 'Bearer token123',
|
||||
'User-Agent': 'PayloadCMS-Workflow/1.0'
|
||||
},
|
||||
stepName: 'test-get-with-headers'
|
||||
}
|
||||
|
||||
await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/data', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': 'Bearer token123',
|
||||
'User-Agent': 'PayloadCMS-Workflow/1.0'
|
||||
},
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('POST requests', () => {
|
||||
it('should handle POST request with JSON body', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 201,
|
||||
statusText: 'Created',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('{"id": "123"}')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/posts',
|
||||
method: 'POST' as const,
|
||||
body: { title: 'Test Post', content: 'Test content' },
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
stepName: 'test-post-step'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.status).toBe(201)
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/posts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ title: 'Test Post', content: 'Test content' }),
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle POST request with string body', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('OK')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/webhook',
|
||||
method: 'POST' as const,
|
||||
body: 'plain text data',
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
stepName: 'test-post-string'
|
||||
}
|
||||
|
||||
await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://api.example.com/webhook', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
body: 'plain text data',
|
||||
signal: expect.any(AbortSignal)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling', () => {
|
||||
it('should handle network errors', async () => {
|
||||
;(global.fetch as any).mockRejectedValue(new Error('Network error'))
|
||||
|
||||
const input = {
|
||||
url: 'https://invalid-url.example.com',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-network-error'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Network error')
|
||||
})
|
||||
|
||||
it('should handle HTTP error status codes', async () => {
|
||||
const mockResponse = {
|
||||
ok: false,
|
||||
status: 404,
|
||||
statusText: 'Not Found',
|
||||
headers: new Headers(),
|
||||
text: vi.fn().mockResolvedValue('Page not found')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/nonexistent',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-404-error'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('HTTP 404')
|
||||
expect(result.output.status).toBe(404)
|
||||
expect(result.output.statusText).toBe('Not Found')
|
||||
})
|
||||
|
||||
it('should handle timeout errors', async () => {
|
||||
const abortError = new Error('The operation was aborted')
|
||||
abortError.name = 'AbortError'
|
||||
;(global.fetch as any).mockRejectedValue(abortError)
|
||||
|
||||
const input = {
|
||||
url: 'https://slow-api.example.com',
|
||||
method: 'GET' as const,
|
||||
timeout: 1000,
|
||||
stepName: 'test-timeout'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('timeout')
|
||||
})
|
||||
|
||||
it('should handle invalid JSON response parsing', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'application/json' }),
|
||||
text: vi.fn().mockResolvedValue('invalid json {')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/invalid-json',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-invalid-json'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
// Should still succeed but with raw text body
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.body).toBe('invalid json {')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Request validation', () => {
|
||||
it('should validate required URL field', async () => {
|
||||
const input = {
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-missing-url'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq } as any)
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('URL is required')
|
||||
})
|
||||
|
||||
it('should validate HTTP method', async () => {
|
||||
const input = {
|
||||
url: 'https://api.example.com',
|
||||
method: 'INVALID' as any,
|
||||
stepName: 'test-invalid-method'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Invalid HTTP method')
|
||||
})
|
||||
|
||||
it('should validate URL format', async () => {
|
||||
const input = {
|
||||
url: 'not-a-valid-url',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-invalid-url'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('failed')
|
||||
expect(result.error).toContain('Invalid URL')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Response processing', () => {
|
||||
it('should parse JSON responses automatically', async () => {
|
||||
const responseData = { id: 123, name: 'Test Item' }
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'application/json' }),
|
||||
text: vi.fn().mockResolvedValue(JSON.stringify(responseData))
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/item/123',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-json-parsing'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(typeof result.output.body).toBe('string')
|
||||
// Should contain the JSON as string for safe storage
|
||||
expect(result.output.body).toBe(JSON.stringify(responseData))
|
||||
})
|
||||
|
||||
it('should handle non-JSON responses', async () => {
|
||||
const htmlContent = '<html><body>Hello World</body></html>'
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({ 'content-type': 'text/html' }),
|
||||
text: vi.fn().mockResolvedValue(htmlContent)
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://example.com/page',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-html-response'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.body).toBe(htmlContent)
|
||||
})
|
||||
|
||||
it('should capture response headers', async () => {
|
||||
const mockResponse = {
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: new Headers({
|
||||
'content-type': 'application/json',
|
||||
'x-rate-limit': '100',
|
||||
'x-custom-header': 'custom-value'
|
||||
}),
|
||||
text: vi.fn().mockResolvedValue('{}')
|
||||
}
|
||||
;(global.fetch as any).mockResolvedValue(mockResponse)
|
||||
|
||||
const input = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET' as const,
|
||||
stepName: 'test-response-headers'
|
||||
}
|
||||
|
||||
const result = await httpRequestStepHandler({ input, req: mockReq })
|
||||
|
||||
expect(result.state).toBe('succeeded')
|
||||
expect(result.output.headers).toEqual({
|
||||
'content-type': 'application/json',
|
||||
'x-rate-limit': '100',
|
||||
'x-custom-header': 'custom-value'
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,472 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest'
|
||||
import { WorkflowExecutor } from '../core/workflow-executor.js'
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
describe('WorkflowExecutor', () => {
|
||||
let mockPayload: Payload
|
||||
let mockLogger: any
|
||||
let executor: WorkflowExecutor
|
||||
|
||||
beforeEach(() => {
|
||||
mockLogger = {
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn()
|
||||
}
|
||||
|
||||
mockPayload = {
|
||||
jobs: {
|
||||
queue: vi.fn().mockResolvedValue({ id: 'job-123' }),
|
||||
run: vi.fn().mockResolvedValue(undefined)
|
||||
},
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
find: vi.fn()
|
||||
} as any
|
||||
|
||||
executor = new WorkflowExecutor(mockPayload, mockLogger)
|
||||
})
|
||||
|
||||
describe('resolveJSONPathValue', () => {
|
||||
it('should resolve simple JSONPath expressions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'test-id', title: 'Test Title' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveJSONPathValue('$.trigger.doc.id', context)
|
||||
expect(result).toBe('test-id')
|
||||
})
|
||||
|
||||
it('should resolve nested JSONPath expressions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: {
|
||||
id: 'test-id',
|
||||
nested: { value: 'nested-value' }
|
||||
}
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveJSONPathValue('$.trigger.doc.nested.value', context)
|
||||
expect(result).toBe('nested-value')
|
||||
})
|
||||
|
||||
it('should return original value for non-JSONPath strings', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).resolveJSONPathValue('plain-string', context)
|
||||
expect(result).toBe('plain-string')
|
||||
})
|
||||
|
||||
it('should handle missing JSONPath gracefully', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).resolveJSONPathValue('$.trigger.missing.field', context)
|
||||
expect(result).toBe('$.trigger.missing.field') // Should return original if resolution fails
|
||||
})
|
||||
})
|
||||
|
||||
describe('resolveStepInput', () => {
|
||||
it('should resolve all JSONPath expressions in step config', () => {
|
||||
const config = {
|
||||
url: '$.trigger.webhook.url',
|
||||
message: 'Static message',
|
||||
data: {
|
||||
id: '$.trigger.doc.id',
|
||||
title: '$.trigger.doc.title'
|
||||
}
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'doc-123', title: 'Doc Title' },
|
||||
webhook: { url: 'https://example.com/webhook' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveStepInput(config, context)
|
||||
|
||||
expect(result).toEqual({
|
||||
url: 'https://example.com/webhook',
|
||||
message: 'Static message',
|
||||
data: {
|
||||
id: 'doc-123',
|
||||
title: 'Doc Title'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle arrays with JSONPath expressions', () => {
|
||||
const config = {
|
||||
items: ['$.trigger.doc.id', 'static-value', '$.trigger.doc.title']
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'doc-123', title: 'Doc Title' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).resolveStepInput(config, context)
|
||||
|
||||
expect(result).toEqual({
|
||||
items: ['doc-123', 'static-value', 'Doc Title']
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('resolveExecutionOrder', () => {
|
||||
it('should handle steps without dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request' },
|
||||
{ name: 'step2', step: 'create-document' },
|
||||
{ name: 'step3', step: 'http-request' }
|
||||
]
|
||||
|
||||
const result = (executor as any).resolveExecutionOrder(steps)
|
||||
|
||||
expect(result).toHaveLength(1) // All in one batch
|
||||
expect(result[0]).toHaveLength(3) // All steps in first batch
|
||||
})
|
||||
|
||||
it('should handle steps with dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request' },
|
||||
{ name: 'step2', step: 'create-document', dependencies: ['step1'] },
|
||||
{ name: 'step3', step: 'http-request', dependencies: ['step2'] }
|
||||
]
|
||||
|
||||
const result = (executor as any).resolveExecutionOrder(steps)
|
||||
|
||||
expect(result).toHaveLength(3) // Three batches
|
||||
expect(result[0]).toHaveLength(1) // step1 first
|
||||
expect(result[1]).toHaveLength(1) // step2 second
|
||||
expect(result[2]).toHaveLength(1) // step3 third
|
||||
})
|
||||
|
||||
it('should handle parallel execution with partial dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request' },
|
||||
{ name: 'step2', step: 'create-document' },
|
||||
{ name: 'step3', step: 'http-request', dependencies: ['step1'] },
|
||||
{ name: 'step4', step: 'create-document', dependencies: ['step1'] }
|
||||
]
|
||||
|
||||
const result = (executor as any).resolveExecutionOrder(steps)
|
||||
|
||||
expect(result).toHaveLength(2) // Two batches
|
||||
expect(result[0]).toHaveLength(2) // step1 and step2 in parallel
|
||||
expect(result[1]).toHaveLength(2) // step3 and step4 in parallel
|
||||
})
|
||||
|
||||
it('should detect circular dependencies', () => {
|
||||
const steps = [
|
||||
{ name: 'step1', step: 'http-request', dependencies: ['step2'] },
|
||||
{ name: 'step2', step: 'create-document', dependencies: ['step1'] }
|
||||
]
|
||||
|
||||
expect(() => {
|
||||
(executor as any).resolveExecutionOrder(steps)
|
||||
}).toThrow('Circular dependency detected')
|
||||
})
|
||||
})
|
||||
|
||||
describe('evaluateCondition', () => {
|
||||
it('should evaluate simple equality conditions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { status: 'published' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).evaluateCondition('$.trigger.doc.status == "published"', context)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should evaluate inequality conditions', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { count: 5 }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const result = (executor as any).evaluateCondition('$.trigger.doc.count > 3', context)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for invalid conditions', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).evaluateCondition('invalid condition syntax', context)
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle missing context gracefully', () => {
|
||||
const context = { trigger: {}, steps: {} }
|
||||
const result = (executor as any).evaluateCondition('$.trigger.doc.status == "published"', context)
|
||||
expect(result).toBe(false) // Missing values should fail condition
|
||||
})
|
||||
})
|
||||
|
||||
describe('safeSerialize', () => {
|
||||
it('should serialize simple objects', () => {
|
||||
const obj = { name: 'test', value: 123 }
|
||||
const result = (executor as any).safeSerialize(obj)
|
||||
expect(result).toBe('{"name":"test","value":123}')
|
||||
})
|
||||
|
||||
it('should handle circular references', () => {
|
||||
const obj: any = { name: 'test' }
|
||||
obj.self = obj // Create circular reference
|
||||
|
||||
const result = (executor as any).safeSerialize(obj)
|
||||
expect(result).toContain('"name":"test"')
|
||||
expect(result).toContain('"self":"[Circular]"')
|
||||
})
|
||||
|
||||
it('should handle undefined and null values', () => {
|
||||
const obj = {
|
||||
defined: 'value',
|
||||
undefined: undefined,
|
||||
null: null
|
||||
}
|
||||
|
||||
const result = (executor as any).safeSerialize(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
expect(parsed.defined).toBe('value')
|
||||
expect(parsed.null).toBe(null)
|
||||
expect(parsed).not.toHaveProperty('undefined') // undefined props are omitted
|
||||
})
|
||||
})
|
||||
|
||||
describe('executeWorkflow', () => {
|
||||
it('should execute workflow with single step', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
steps: [
|
||||
{
|
||||
name: 'test-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: { doc: { id: 'test-doc' } },
|
||||
steps: {}
|
||||
}
|
||||
|
||||
// Mock step task
|
||||
const mockStepTask = {
|
||||
taskSlug: 'http-request-step',
|
||||
handler: vi.fn().mockResolvedValue({
|
||||
output: { status: 200, body: 'success' },
|
||||
state: 'succeeded'
|
||||
})
|
||||
}
|
||||
|
||||
// Mock the step tasks registry
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = await (executor as any).executeWorkflow(workflow, context)
|
||||
|
||||
expect(result.status).toBe('completed')
|
||||
expect(result.context.steps['test-step']).toBeDefined()
|
||||
expect(result.context.steps['test-step'].state).toBe('succeeded')
|
||||
expect(mockStepTask.handler).toHaveBeenCalledOnce()
|
||||
|
||||
// Restore original step tasks
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
|
||||
it('should handle step execution failures', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
steps: [
|
||||
{
|
||||
name: 'failing-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://invalid-url',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: { doc: { id: 'test-doc' } },
|
||||
steps: {}
|
||||
}
|
||||
|
||||
// Mock failing step task
|
||||
const mockStepTask = {
|
||||
taskSlug: 'http-request-step',
|
||||
handler: vi.fn().mockRejectedValue(new Error('Network error'))
|
||||
}
|
||||
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = await (executor as any).executeWorkflow(workflow, context)
|
||||
|
||||
expect(result.status).toBe('failed')
|
||||
expect(result.error).toContain('Network error')
|
||||
expect(result.context.steps['failing-step']).toBeDefined()
|
||||
expect(result.context.steps['failing-step'].state).toBe('failed')
|
||||
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
|
||||
it('should execute steps with dependencies in correct order', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com/1',
|
||||
method: 'GET'
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com/2',
|
||||
method: 'GET',
|
||||
dependencies: ['step1']
|
||||
},
|
||||
{
|
||||
name: 'step3',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com/3',
|
||||
method: 'GET',
|
||||
dependencies: ['step1']
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const context = {
|
||||
trigger: { doc: { id: 'test-doc' } },
|
||||
steps: {}
|
||||
}
|
||||
|
||||
const executionOrder: string[] = []
|
||||
const mockStepTask = {
|
||||
taskSlug: 'http-request-step',
|
||||
handler: vi.fn().mockImplementation(async ({ input }) => {
|
||||
executionOrder.push(input.stepName)
|
||||
return {
|
||||
output: { status: 200, body: 'success' },
|
||||
state: 'succeeded'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = await (executor as any).executeWorkflow(workflow, context)
|
||||
|
||||
expect(result.status).toBe('completed')
|
||||
expect(executionOrder[0]).toBe('step1') // First step executed first
|
||||
expect(executionOrder.slice(1)).toContain('step2') // Dependent steps after
|
||||
expect(executionOrder.slice(1)).toContain('step3')
|
||||
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
})
|
||||
|
||||
describe('findStepTask', () => {
|
||||
it('should find registered step task by slug', () => {
|
||||
const mockStepTask = {
|
||||
taskSlug: 'test-step',
|
||||
handler: vi.fn()
|
||||
}
|
||||
|
||||
const originalStepTasks = (executor as any).stepTasks
|
||||
;(executor as any).stepTasks = [mockStepTask]
|
||||
|
||||
const result = (executor as any).findStepTask('test-step')
|
||||
expect(result).toBe(mockStepTask)
|
||||
|
||||
;(executor as any).stepTasks = originalStepTasks
|
||||
})
|
||||
|
||||
it('should return undefined for unknown step type', () => {
|
||||
const result = (executor as any).findStepTask('unknown-step')
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateStepConfiguration', () => {
|
||||
it('should validate step with required fields', () => {
|
||||
const step = {
|
||||
name: 'valid-step',
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
(executor as any).validateStepConfiguration(step)
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it('should throw error for step without name', () => {
|
||||
const step = {
|
||||
step: 'http-request-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
(executor as any).validateStepConfiguration(step)
|
||||
}).toThrow('Step name is required')
|
||||
})
|
||||
|
||||
it('should throw error for step without type', () => {
|
||||
const step = {
|
||||
name: 'test-step',
|
||||
url: 'https://example.com',
|
||||
method: 'GET'
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
(executor as any).validateStepConfiguration(step)
|
||||
}).toThrow('Step type is required')
|
||||
})
|
||||
})
|
||||
|
||||
describe('createExecutionContext', () => {
|
||||
it('should create context with trigger data', () => {
|
||||
const triggerContext = {
|
||||
operation: 'create',
|
||||
doc: { id: 'test-id', title: 'Test Doc' },
|
||||
collection: 'posts'
|
||||
}
|
||||
|
||||
const result = (executor as any).createExecutionContext(triggerContext)
|
||||
|
||||
expect(result.trigger).toEqual(triggerContext)
|
||||
expect(result.steps).toEqual({})
|
||||
expect(result.metadata).toBeDefined()
|
||||
expect(result.metadata.startedAt).toBeDefined()
|
||||
})
|
||||
|
||||
it('should include metadata in context', () => {
|
||||
const triggerContext = { doc: { id: 'test' } }
|
||||
const result = (executor as any).createExecutionContext(triggerContext)
|
||||
|
||||
expect(result.metadata).toHaveProperty('startedAt')
|
||||
expect(result.metadata).toHaveProperty('executionId')
|
||||
expect(typeof result.metadata.executionId).toBe('string')
|
||||
})
|
||||
})
|
||||
})
|
||||
36
src/triggers/collection-trigger.ts
Normal file
36
src/triggers/collection-trigger.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import type {TriggerConfig} from '../plugin/config-types.js'
|
||||
|
||||
export const collectionTrigger: TriggerConfig = ({collectionTriggers}) => ({
|
||||
slug: 'collection-hook',
|
||||
parameters: [
|
||||
{
|
||||
name: 'collectionSlug',
|
||||
type: 'select',
|
||||
options: Object.keys(collectionTriggers || {}),
|
||||
},
|
||||
{
|
||||
name: 'hook',
|
||||
type: 'select',
|
||||
options: [
|
||||
"afterChange",
|
||||
"afterDelete",
|
||||
"afterError",
|
||||
"afterForgotPassword",
|
||||
"afterLogin",
|
||||
"afterLogout",
|
||||
"afterMe",
|
||||
"afterOperation",
|
||||
"afterRead",
|
||||
"afterRefresh",
|
||||
"beforeChange",
|
||||
"beforeDelete",
|
||||
"beforeLogin",
|
||||
"beforeOperation",
|
||||
"beforeRead",
|
||||
"beforeValidate",
|
||||
"me",
|
||||
"refresh"
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
29
src/triggers/global-trigger.ts
Normal file
29
src/triggers/global-trigger.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import type {TriggerConfig} from '../plugin/config-types.js'
|
||||
|
||||
export const globalTrigger: TriggerConfig = ({globalTriggers}) => ({
|
||||
slug: 'global-hook',
|
||||
parameters: [
|
||||
{
|
||||
name: 'global',
|
||||
type: 'select',
|
||||
admin: {
|
||||
description: 'Global that triggers the workflow',
|
||||
},
|
||||
options: Object.keys(globalTriggers || {}),
|
||||
},
|
||||
{
|
||||
name: 'operation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
description: 'Global hook that triggers the workflow',
|
||||
},
|
||||
options: [
|
||||
"afterChange",
|
||||
"afterRead",
|
||||
"beforeChange",
|
||||
"beforeRead",
|
||||
"beforeValidate"
|
||||
],
|
||||
}
|
||||
]
|
||||
})
|
||||
2
src/triggers/index.ts
Normal file
2
src/triggers/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { collectionTrigger } from './collection-trigger.js'
|
||||
export { globalTrigger } from './global-trigger.js'
|
||||
6
src/triggers/types.ts
Normal file
6
src/triggers/types.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type {Field} from "payload"
|
||||
|
||||
export type Trigger = {
|
||||
slug: string
|
||||
parameters: Field[]
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
import type { Field } from 'payload'
|
||||
|
||||
import type { CustomTriggerConfig } from '../plugin/config-types.js'
|
||||
|
||||
// Types for better type safety
|
||||
interface FieldWithName {
|
||||
name: string
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
interface HookContext {
|
||||
siblingData: Record<string, unknown>
|
||||
value?: unknown
|
||||
}
|
||||
|
||||
interface ValidationContext {
|
||||
siblingData: Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a virtual field for a trigger parameter that stores its value in the parameters JSON field
|
||||
*
|
||||
* @param field - Standard PayloadCMS field configuration (must be a data field with a name)
|
||||
* @param triggerSlug - The slug of the trigger this field belongs to
|
||||
* @returns Modified field with virtual storage hooks and proper naming
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const myTrigger: CustomTriggerConfig = {
|
||||
* slug: 'my-trigger',
|
||||
* inputs: [
|
||||
* createTriggerField({
|
||||
* name: 'webhookUrl',
|
||||
* type: 'text',
|
||||
* required: true,
|
||||
* admin: {
|
||||
* description: 'URL to call when triggered'
|
||||
* }
|
||||
* }, 'my-trigger')
|
||||
* ]
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function createTriggerField(field: FieldWithName, triggerSlug: string): Field {
|
||||
const originalName = field.name
|
||||
if (!originalName) {
|
||||
throw new Error('Field must have a name property')
|
||||
}
|
||||
|
||||
// Create a unique field name by prefixing with trigger slug
|
||||
const uniqueFieldName = `__trigger_${triggerSlug}_${originalName}`
|
||||
|
||||
const resultField: Record<string, unknown> = {
|
||||
...field,
|
||||
admin: {
|
||||
...(field.admin as Record<string, unknown> || {}),
|
||||
condition: (data: unknown, siblingData: Record<string, unknown>) => {
|
||||
// Only show this field when the trigger type matches
|
||||
const triggerMatches = siblingData?.type === triggerSlug
|
||||
|
||||
// If the original field had a condition, combine it with our trigger condition
|
||||
const originalCondition = (field.admin as Record<string, unknown>)?.condition
|
||||
if (originalCondition && typeof originalCondition === 'function') {
|
||||
return triggerMatches && (originalCondition as (data: unknown, siblingData: Record<string, unknown>) => boolean)(data, siblingData)
|
||||
}
|
||||
|
||||
return triggerMatches
|
||||
}
|
||||
},
|
||||
hooks: {
|
||||
...(field.hooks as Record<string, unknown[]> || {}),
|
||||
afterRead: [
|
||||
...((field.hooks as Record<string, unknown[]>)?.afterRead || []),
|
||||
({ siblingData }: HookContext) => {
|
||||
// Read the value from the parameters JSON field
|
||||
const parameters = siblingData?.parameters as Record<string, unknown>
|
||||
return parameters?.[originalName] ?? (field as Record<string, unknown>).defaultValue
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
...((field.hooks as Record<string, unknown[]>)?.beforeChange || []),
|
||||
({ siblingData, value }: HookContext) => {
|
||||
// Store the value in the parameters JSON field
|
||||
if (!siblingData.parameters) {
|
||||
siblingData.parameters = {}
|
||||
}
|
||||
const parameters = siblingData.parameters as Record<string, unknown>
|
||||
parameters[originalName] = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
name: uniqueFieldName,
|
||||
virtual: true,
|
||||
}
|
||||
|
||||
// Only add validate if the field supports it (data fields)
|
||||
const hasValidation = (field as Record<string, unknown>).validate || (field as Record<string, unknown>).required
|
||||
if (hasValidation) {
|
||||
resultField.validate = (value: unknown, args: ValidationContext) => {
|
||||
const parameters = args.siblingData?.parameters as Record<string, unknown>
|
||||
const paramValue = value ?? parameters?.[originalName]
|
||||
|
||||
// Check required validation
|
||||
const isRequired = (field as Record<string, unknown>).required
|
||||
if (isRequired && args.siblingData?.type === triggerSlug && !paramValue) {
|
||||
const fieldLabel = (field as Record<string, unknown>).label as string
|
||||
const adminDesc = ((field as Record<string, unknown>).admin as Record<string, unknown>)?.description as string
|
||||
const label = fieldLabel || adminDesc || originalName
|
||||
return `${label} is required for ${triggerSlug}`
|
||||
}
|
||||
|
||||
// Run original validation if present
|
||||
const originalValidate = (field as Record<string, unknown>).validate
|
||||
if (originalValidate && typeof originalValidate === 'function') {
|
||||
return (originalValidate as (value: unknown, args: ValidationContext) => boolean | string)(paramValue, args)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return resultField as Field
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a custom trigger configuration with the provided fields
|
||||
*
|
||||
* @param slug - Unique identifier for the trigger
|
||||
* @param fields - Array of PayloadCMS fields that will be shown as trigger parameters
|
||||
* @returns Complete trigger configuration
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const webhookTrigger = createTrigger('webhook', [
|
||||
* {
|
||||
* name: 'url',
|
||||
* type: 'text',
|
||||
* required: true,
|
||||
* admin: {
|
||||
* description: 'Webhook URL'
|
||||
* }
|
||||
* },
|
||||
* {
|
||||
* name: 'method',
|
||||
* type: 'select',
|
||||
* options: ['GET', 'POST', 'PUT', 'DELETE'],
|
||||
* defaultValue: 'POST'
|
||||
* }
|
||||
* ])
|
||||
* ```
|
||||
*/
|
||||
export function createTrigger(slug: string, fields: FieldWithName[]): CustomTriggerConfig {
|
||||
return {
|
||||
slug,
|
||||
inputs: fields.map(field => createTriggerField(field, slug))
|
||||
}
|
||||
}
|
||||
@@ -1,157 +0,0 @@
|
||||
import { createTrigger } from './trigger-helpers.js'
|
||||
import type { CustomTriggerConfig } from '../plugin/config-types.js'
|
||||
|
||||
/**
|
||||
* Preset trigger builders for common patterns
|
||||
*/
|
||||
|
||||
/**
|
||||
* Create a webhook trigger with common webhook parameters pre-configured
|
||||
*/
|
||||
export function webhookTrigger(slug: string): CustomTriggerConfig {
|
||||
return createTrigger(slug, [
|
||||
{
|
||||
name: 'path',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'URL path for the webhook endpoint (e.g., "my-webhook")'
|
||||
},
|
||||
validate: (value: any) => {
|
||||
if (typeof value === 'string' && value.includes(' ')) {
|
||||
return 'Webhook path cannot contain spaces'
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'secret',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Secret key for webhook signature validation (optional but recommended)'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'headers',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Expected HTTP headers for validation (JSON object)'
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a scheduled/cron trigger with timing parameters pre-configured
|
||||
*/
|
||||
export function cronTrigger(slug: string): CustomTriggerConfig {
|
||||
return createTrigger(slug, [
|
||||
{
|
||||
name: 'expression',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Cron expression for scheduling (e.g., "0 9 * * 1" for every Monday at 9 AM)',
|
||||
placeholder: '0 9 * * 1'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'timezone',
|
||||
type: 'text',
|
||||
defaultValue: 'UTC',
|
||||
admin: {
|
||||
description: 'Timezone for cron execution (e.g., "America/New_York", "Europe/London")',
|
||||
placeholder: 'UTC'
|
||||
},
|
||||
validate: (value: any) => {
|
||||
if (value) {
|
||||
try {
|
||||
new Intl.DateTimeFormat('en', { timeZone: value as string })
|
||||
return true
|
||||
} catch {
|
||||
return `Invalid timezone: ${value}. Please use a valid IANA timezone identifier`
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an event-driven trigger with event filtering parameters
|
||||
*/
|
||||
export function eventTrigger(slug: string): CustomTriggerConfig {
|
||||
return createTrigger(slug, [
|
||||
{
|
||||
name: 'eventTypes',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: [
|
||||
{ label: 'User Created', value: 'user.created' },
|
||||
{ label: 'User Updated', value: 'user.updated' },
|
||||
{ label: 'Document Published', value: 'document.published' },
|
||||
{ label: 'Payment Completed', value: 'payment.completed' }
|
||||
],
|
||||
admin: {
|
||||
description: 'Event types that should trigger this workflow'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'filters',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'JSON filters to apply to event data (e.g., {"status": "active"})'
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a simple manual trigger (no parameters needed)
|
||||
*/
|
||||
export function manualTrigger(slug: string): CustomTriggerConfig {
|
||||
return {
|
||||
slug,
|
||||
inputs: []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an API trigger for external systems to call
|
||||
*/
|
||||
export function apiTrigger(slug: string): CustomTriggerConfig {
|
||||
return createTrigger(slug, [
|
||||
{
|
||||
name: 'endpoint',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'API endpoint path (e.g., "/api/triggers/my-trigger")'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'method',
|
||||
type: 'select',
|
||||
options: ['GET', 'POST', 'PUT', 'PATCH'],
|
||||
defaultValue: 'POST',
|
||||
admin: {
|
||||
description: 'HTTP method for the API endpoint'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'authentication',
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'None', value: 'none' },
|
||||
{ label: 'API Key', value: 'api-key' },
|
||||
{ label: 'Bearer Token', value: 'bearer' },
|
||||
{ label: 'Basic Auth', value: 'basic' }
|
||||
],
|
||||
defaultValue: 'api-key',
|
||||
admin: {
|
||||
description: 'Authentication method for the API endpoint'
|
||||
}
|
||||
}
|
||||
])
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
threads: false, // Prevent port/DB conflicts
|
||||
pool: 'forks',
|
||||
poolOptions: {
|
||||
forks: {
|
||||
singleFork: true
|
||||
}
|
||||
},
|
||||
testTimeout: 30000, // 30 second timeout for integration tests
|
||||
setupFiles: ['./dev/test-setup.ts']
|
||||
},
|
||||
})
|
||||
Reference in New Issue
Block a user