mirror of
https://github.com/xtr-dev/payload-automation.git
synced 2025-12-11 01:03:23 +00:00
Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 0da87dbda7 | |||
| 508f4c418a | |||
| 069de012ea | |||
| acdfa411e4 | |||
| 0f741acf73 | |||
| 435f9b0c69 | |||
| cda349846a | |||
| b18e2eaf49 | |||
| 9a3b94ef60 | |||
| 8f0ee4bcef | |||
| 449b80e162 | |||
| 25d42b4653 | |||
| 73c8c20c4b | |||
| e138176878 | |||
| 6245a71516 | |||
| 59a97e519e | |||
| b3d2877f0a | |||
| c050ee835a | |||
| 1f80028042 | |||
| 14d1ecf036 | |||
| 3749881d5f | |||
| c46b58f43e | |||
| 398a2d160e | |||
| 96b36a3caa | |||
| 71ecca8253 | |||
| 8eedaba9ed | |||
| 2bc01f30f8 | |||
| 3e9ff10076 | |||
| e204d1241a | |||
| 0fb23cb425 | |||
| 45c5847f5a | |||
| a8ae877039 | |||
| b7b40c400b | |||
| ab5b26c42c | |||
| c47197223c |
58
.claude/agents/source-docs-generator.md
Normal file
58
.claude/agents/source-docs-generator.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
name: source-docs-generator
|
||||
description: Use this agent when you need to generate or update documentation files for source code. Examples: <example>Context: User wants to document their codebase by creating .md files for each source file. user: 'I need documentation for all my source files in the src directory' assistant: 'I'll use the source-docs-generator agent to create documentation files for your source code' <commentary>The user is requesting documentation generation for source files, which is exactly what the source-docs-generator agent is designed for.</commentary></example> <example>Context: User has added new source files and wants documentation updated. user: 'Can you update the docs for the new files I added to src/components?' assistant: 'I'll use the source-docs-generator agent to check for new or updated source files and generate corresponding documentation' <commentary>The agent will check existing docs and only update what's needed.</commentary></example>
|
||||
model: sonnet
|
||||
---
|
||||
|
||||
You are a Source Code Documentation Generator, an expert technical writer specializing in creating clear, comprehensive documentation for source code files. Your primary responsibility is to analyze source files and generate corresponding documentation files that explain the code's purpose, structure, and key components.
|
||||
|
||||
Your process for each task:
|
||||
|
||||
1. **Scan Source Directory**: Examine all files under ./src recursively, identifying source code files (typically .ts, .tsx, .js, .jsx, .py, etc.)
|
||||
|
||||
2. **Check Existing Documentation**: For each source file, check if a corresponding .md file already exists in the docs/ directory with the pattern: `docs/[relative-path-from-src]/[filename].[extension].md`
|
||||
|
||||
3. **Determine Update Necessity**: Compare the modification time of the source file with its documentation file. Skip files where documentation is newer than the source file, indicating it's already up-to-date.
|
||||
|
||||
4. **Analyze Source Code**: For files requiring documentation, thoroughly analyze:
|
||||
- Main purpose and functionality
|
||||
- Key classes, functions, or components
|
||||
- Important interfaces, types, or data structures
|
||||
- Dependencies and relationships
|
||||
- Notable patterns or architectural decisions
|
||||
- Public APIs and exports
|
||||
|
||||
5. **Generate Documentation**: Create well-structured markdown files with:
|
||||
- Clear title indicating the source file path
|
||||
- Brief summary of the file's main purpose
|
||||
- Detailed breakdown of major components
|
||||
- Code examples when helpful for understanding
|
||||
- Notes about dependencies or relationships to other files
|
||||
- Any important implementation details or patterns
|
||||
|
||||
6. **Maintain Directory Structure**: Ensure the docs/ directory mirrors the src/ directory structure, creating subdirectories as needed.
|
||||
|
||||
7. **Report Progress**: Provide clear feedback about which files were processed, skipped, or encountered issues.
|
||||
|
||||
Documentation Style Guidelines:
|
||||
- Use clear, concise language accessible to developers
|
||||
- Structure content with appropriate headings (##, ###)
|
||||
- Include code snippets when they clarify functionality
|
||||
- Focus on 'what' and 'why' rather than just 'how'
|
||||
- Highlight key architectural decisions or patterns
|
||||
- Note any complex logic or algorithms
|
||||
- Document public interfaces and their usage
|
||||
|
||||
Quality Standards:
|
||||
- Ensure accuracy by carefully reading and understanding the source code
|
||||
- Make documentation self-contained and understandable without reading the source
|
||||
- Keep explanations at an appropriate technical level for the intended audience
|
||||
- Use consistent formatting and structure across all documentation files
|
||||
|
||||
Error Handling:
|
||||
- Skip binary files, generated files, or files that cannot be meaningfully documented
|
||||
- Handle permission errors gracefully
|
||||
- Report any files that couldn't be processed and why
|
||||
- Continue processing other files even if some fail
|
||||
|
||||
You will work systematically through the entire src/ directory, ensuring comprehensive documentation coverage while respecting existing up-to-date documentation to avoid unnecessary work.
|
||||
@@ -1,486 +0,0 @@
|
||||
# PayloadCMS Automation Plugin - Code Review
|
||||
|
||||
**Date:** January 4, 2025
|
||||
**Plugin:** `@xtr-dev/payload-automation` v0.0.22
|
||||
**Reviewer:** Claude Code Review System
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The `@xtr-dev/payload-automation` plugin is a **well-architected** PayloadCMS extension that provides comprehensive workflow automation capabilities. It successfully enables users to create visual workflows without writing code, featuring a robust execution engine, multiple trigger types, and a variety of step implementations. The codebase demonstrates strong engineering practices with proper TypeScript usage, modular architecture, and comprehensive testing.
|
||||
|
||||
**Overall Rating: 8.5/10** - Production-ready with recommended enhancements.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
### ✅ **Strengths**
|
||||
|
||||
**1. Modular Plugin Architecture**
|
||||
- Clean separation between plugin configuration (`src/plugin/`), workflow logic (`src/core/`), collections (`src/collections/`), and steps (`src/steps/`)
|
||||
- Proper PayloadCMS plugin pattern with configuration-time and runtime initialization
|
||||
- Multiple export paths for different use cases (client, server, fields, views, RSC)
|
||||
|
||||
**2. Sophisticated Workflow Execution Engine**
|
||||
- **Topological sorting** for dependency resolution enables parallel step execution within dependency batches
|
||||
- **JSONPath integration** for dynamic data interpolation (`$.trigger.doc.id`, `$.steps.stepName.output`)
|
||||
- **Condition evaluation system** supporting comparison operators and boolean expressions
|
||||
- **Context management** with proper serialization handling circular references
|
||||
|
||||
**3. Comprehensive Trigger System**
|
||||
- Collection hooks (create, update, delete, read)
|
||||
- Webhook triggers with configurable paths
|
||||
- Global document triggers
|
||||
- Cron scheduling with timezone support
|
||||
- Manual trigger capability via UI components
|
||||
|
||||
## Detailed Component Analysis
|
||||
|
||||
### **Workflow Executor** (`src/core/workflow-executor.ts`)
|
||||
**Rating: 9/10** - Excellent implementation
|
||||
|
||||
**Strengths:**
|
||||
- Sophisticated dependency resolution using topological sorting (lines 286-338)
|
||||
- Parallel execution within dependency batches
|
||||
- Comprehensive error handling and logging throughout execution pipeline
|
||||
- JSONPath-based data resolution with fallback mechanisms (lines 343-407)
|
||||
- Safe serialization preventing circular references (lines 412-448)
|
||||
- Proper workflow run tracking and context updates
|
||||
|
||||
**Areas for improvement:**
|
||||
- Line 790: Console logging should use the logger instance consistently
|
||||
- Error handling could be more granular for different failure types
|
||||
- Consider adding execution timeout mechanisms for long-running workflows
|
||||
|
||||
**Code Quality Highlights:**
|
||||
```typescript
|
||||
// Excellent dependency resolution implementation
|
||||
private resolveExecutionOrder(steps: WorkflowStep[]): WorkflowStep[][] {
|
||||
// Topological sort implementation for parallel execution
|
||||
// Lines 286-338 demonstrate sophisticated algorithm usage
|
||||
}
|
||||
|
||||
// Robust JSONPath resolution with error handling
|
||||
private resolveStepInput(config: Record<string, unknown>, context: ExecutionContext) {
|
||||
// Comprehensive data resolution with fallback mechanisms
|
||||
// Lines 343-407 show excellent defensive programming
|
||||
}
|
||||
```
|
||||
|
||||
### **Plugin Integration** (`src/plugin/index.ts`)
|
||||
**Rating: 8/10** - Very good with some complexity
|
||||
|
||||
**Strengths:**
|
||||
- Proper config-time hook registration avoiding PayloadCMS initialization timing issues (lines 66-145)
|
||||
- Global executor registry pattern for hook access
|
||||
- Comprehensive onInit lifecycle management (lines 170-213)
|
||||
- Proper plugin disabling mechanism (lines 54-57)
|
||||
|
||||
**Concerns:**
|
||||
- Complex global variable fallback mechanism (lines 26-29, 108-111) suggests architectural constraints
|
||||
- Heavy reliance on console.log for debugging in production hooks (lines 94, 114, 123)
|
||||
|
||||
**Architectural Pattern:**
|
||||
```typescript
|
||||
// Config-phase hook registration - critical for PayloadCMS timing
|
||||
const automationHook = Object.assign(
|
||||
async function payloadAutomationHook(args: any) {
|
||||
// Hook implementation with multiple executor access methods
|
||||
},
|
||||
{
|
||||
__isAutomationHook: true,
|
||||
__version: '0.0.21'
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
### **Collections Design** (`src/collections/`)
|
||||
**Rating: 9/10** - Excellent schema design
|
||||
|
||||
**Workflow Collection** (`src/collections/Workflow.ts`):
|
||||
- Dynamic field generation based on plugin configuration
|
||||
- Conditional field visibility based on trigger/step types
|
||||
- Comprehensive validation for cron expressions (lines 119-138) and webhook paths (lines 84-90)
|
||||
- Proper integration with custom trigger and step types
|
||||
|
||||
**WorkflowRuns Collection** (`src/collections/WorkflowRuns.ts`):
|
||||
- Rich execution tracking with status management
|
||||
- Comprehensive context preservation using JSON fields
|
||||
- Proper relationship modeling to workflows
|
||||
- Detailed logging and error capture capabilities
|
||||
|
||||
**Schema Highlights:**
|
||||
```typescript
|
||||
// Dynamic field generation based on plugin configuration
|
||||
...(triggers || []).flatMap(t => (t.inputs || []).map(f => ({
|
||||
...f,
|
||||
admin: {
|
||||
...(f.admin || {}),
|
||||
condition: (...args) => args[1]?.type === t.slug && (
|
||||
f.admin?.condition ?
|
||||
f.admin.condition.call(this, ...args) :
|
||||
true
|
||||
),
|
||||
},
|
||||
} as Field)))
|
||||
```
|
||||
|
||||
## Step Implementation Analysis
|
||||
|
||||
### **Step Architecture** (`src/steps/`)
|
||||
**Rating: 8/10** - Well designed and extensible
|
||||
|
||||
**Available Steps:**
|
||||
- HTTP Request (`http-request.ts`, `http-request-handler.ts`)
|
||||
- CRUD Document operations (create, read, update, delete)
|
||||
- Email notifications (`send-email.ts`, `send-email-handler.ts`)
|
||||
|
||||
**Strengths:**
|
||||
- Consistent TaskConfig pattern across all steps
|
||||
- Proper input/output schema definitions
|
||||
- Error handling with state management
|
||||
- Dynamic field generation in workflow UI
|
||||
|
||||
**Example Implementation:**
|
||||
```typescript
|
||||
export const CreateDocumentStepTask = {
|
||||
slug: 'create-document',
|
||||
handler: createDocumentHandler,
|
||||
inputSchema: [
|
||||
{
|
||||
name: 'collectionSlug',
|
||||
type: 'text',
|
||||
required: true
|
||||
},
|
||||
// Comprehensive input schema definition
|
||||
],
|
||||
outputSchema: [
|
||||
// Well-defined output structure
|
||||
]
|
||||
} satisfies TaskConfig<'create-document'>
|
||||
```
|
||||
|
||||
**Improvement opportunities:**
|
||||
- HTTP step could benefit from more configuration options (timeout, authentication, custom headers)
|
||||
- Error messages could be more user-friendly in step handlers (currently quite technical)
|
||||
- Consider adding retry mechanisms for transient failures
|
||||
|
||||
## User Experience & Interface
|
||||
|
||||
### **Admin Interface Integration**
|
||||
**Rating: 8/10** - Good integration with room for enhancement
|
||||
|
||||
**Strengths:**
|
||||
- Workflow and WorkflowRuns collections properly grouped under "Automation"
|
||||
- Manual trigger button component (`TriggerWorkflowButton.tsx`) with proper error handling
|
||||
- Conditional field display based on trigger/step types
|
||||
- Comprehensive workflow run visualization with execution context
|
||||
|
||||
**Current UI Components:**
|
||||
```tsx
|
||||
export const TriggerWorkflowButton: React.FC<TriggerWorkflowButtonProps> = ({
|
||||
workflowId,
|
||||
workflowName,
|
||||
triggerSlug = 'manual-trigger'
|
||||
}) => {
|
||||
// Clean implementation with loading states and error handling
|
||||
// Lines 19-52 show good React patterns
|
||||
}
|
||||
```
|
||||
|
||||
**Missing UI Elements:**
|
||||
- Visual workflow builder/editor (drag-and-drop interface)
|
||||
- Step dependency visualization (graph view)
|
||||
- Real-time execution monitoring dashboard
|
||||
- Workflow debugging tools and step-by-step execution views
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### **Test Coverage**
|
||||
**Rating: 7/10** - Good foundation, needs expansion
|
||||
|
||||
**Current Testing:**
|
||||
```typescript
|
||||
// Integration test example from dev/simple-trigger.spec.ts
|
||||
describe('Workflow Trigger Test', () => {
|
||||
// Proper test setup with MongoDB Memory Server
|
||||
// Comprehensive workflow creation and execution testing
|
||||
// Lines 58-131 demonstrate good testing practices
|
||||
})
|
||||
```
|
||||
|
||||
**Strengths:**
|
||||
- Integration tests using Vitest with MongoDB Memory Server
|
||||
- Basic workflow trigger and execution testing (lines 58-131)
|
||||
- Proper test cleanup and lifecycle management (lines 14-56)
|
||||
- Realistic test scenarios with actual PayloadCMS operations
|
||||
|
||||
**Testing Gaps:**
|
||||
- No E2E tests with Playwright (configured but not implemented)
|
||||
- Limited step handler unit tests
|
||||
- No error scenario testing (malformed inputs, network failures)
|
||||
- Missing performance/load testing for complex workflows
|
||||
- No webhook trigger testing
|
||||
|
||||
### **Test Configuration**
|
||||
**Vitest Config:**
|
||||
```typescript
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
**Development Config:**
|
||||
- Proper test database isolation using MongoDB Memory Server
|
||||
- Clean test environment setup in `dev/payload.config.ts`
|
||||
- Email adapter mocking for testing
|
||||
|
||||
## Code Quality Assessment
|
||||
|
||||
### **TypeScript Usage**
|
||||
**Rating: 9/10** - Excellent type safety
|
||||
|
||||
**Strengths:**
|
||||
- Comprehensive type definitions with proper generics
|
||||
- Generated PayloadCMS type integration avoiding duplication
|
||||
- Proper async/await patterns throughout
|
||||
- Type-safe task handler patterns with `TaskHandler<T>` interface
|
||||
|
||||
**Type System Highlights:**
|
||||
```typescript
|
||||
// Excellent generic type usage
|
||||
export const workflowsPlugin =
|
||||
<TSlug extends string>(pluginOptions: WorkflowsPluginConfig<TSlug>) =>
|
||||
(config: Config): Config => {
|
||||
// Type-safe plugin configuration
|
||||
}
|
||||
|
||||
// Proper task handler typing
|
||||
export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input}) => {
|
||||
// Type-safe step implementation
|
||||
}
|
||||
```
|
||||
|
||||
**TypeScript Configuration:**
|
||||
- Strict mode enabled with comprehensive compiler options
|
||||
- Proper module resolution (NodeNext)
|
||||
- Isolated modules for better build performance
|
||||
- Declaration generation for proper library distribution
|
||||
|
||||
### **Error Handling**
|
||||
**Rating: 7/10** - Good with improvement potential
|
||||
|
||||
**Strengths:**
|
||||
- Try-catch blocks in critical execution paths
|
||||
- Structured error logging with contextual information
|
||||
- Graceful degradation in condition evaluation (lines 583-593 in workflow-executor.ts)
|
||||
|
||||
**Error Handling Patterns:**
|
||||
```typescript
|
||||
// Good error handling with context preservation
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
context.steps[stepName].state = 'failed'
|
||||
context.steps[stepName].error = errorMessage
|
||||
|
||||
this.logger.error({
|
||||
error: errorMessage,
|
||||
input: context.steps[stepName].input,
|
||||
stepName,
|
||||
taskSlug
|
||||
}, 'Step execution failed')
|
||||
|
||||
throw error // Proper re-throwing for upstream handling
|
||||
}
|
||||
```
|
||||
|
||||
**Concerns:**
|
||||
- Some error swallowing in hook execution (line 128 in plugin/index.ts)
|
||||
- Inconsistent error message formats across components
|
||||
- Limited error categorization (network vs. validation vs. system errors)
|
||||
|
||||
### **Performance Considerations**
|
||||
**Rating: 8/10** - Well optimized
|
||||
|
||||
**Strengths:**
|
||||
- Parallel step execution within dependency batches
|
||||
- Efficient topological sorting implementation (O(V+E) complexity)
|
||||
- Proper async/await usage avoiding callback hell
|
||||
- Safe serialization preventing memory issues with circular references
|
||||
|
||||
**Performance Optimizations:**
|
||||
```typescript
|
||||
// Parallel execution implementation
|
||||
const batchPromises = batch.map((step, stepIndex) =>
|
||||
this.executeStep(step, stepIndex, context, req, workflowRun.id)
|
||||
)
|
||||
await Promise.all(batchPromises) // Efficient parallel processing
|
||||
```
|
||||
|
||||
## Security Analysis
|
||||
|
||||
### **Security Posture**
|
||||
**Rating: 8/10** - Good security practices
|
||||
|
||||
**Strengths:**
|
||||
- No code injection vulnerabilities in JSONPath usage (proper JSONPath.js usage)
|
||||
- Proper request context passing maintaining user permissions
|
||||
- Secure webhook endpoint implementation with path validation
|
||||
- Appropriate access controls on collections (configurable via access functions)
|
||||
|
||||
**Security Implementations:**
|
||||
```typescript
|
||||
// Webhook path validation
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'webhook-trigger' && !value) {
|
||||
return 'Webhook path is required for webhook triggers'
|
||||
}
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
**Security Considerations:**
|
||||
- JSONPath expressions in workflows could be validated more strictly (consider allowlist approach)
|
||||
- Webhook endpoints should consider rate limiting implementation
|
||||
- Consider input sanitization for step parameters (especially JSON inputs)
|
||||
- Audit trail for workflow modifications could be enhanced
|
||||
|
||||
## Identified Issues & Improvements
|
||||
|
||||
### **Critical Issues**
|
||||
None identified - the codebase is production-ready.
|
||||
|
||||
### **High Priority Improvements**
|
||||
|
||||
1. **Visual Workflow Builder**
|
||||
- Implement drag-and-drop workflow designer
|
||||
- Step dependency visualization with graph layout
|
||||
- Real-time validation feedback during workflow creation
|
||||
- Template workflow library for common patterns
|
||||
|
||||
2. **Enhanced Error Handling**
|
||||
- Structured error types for different failure modes
|
||||
- User-friendly error messages in the admin interface
|
||||
- Error recovery mechanisms (retry policies, fallback steps)
|
||||
- Better error propagation from nested step execution
|
||||
|
||||
3. **Monitoring & Observability**
|
||||
- Workflow execution metrics and performance dashboards
|
||||
- Real-time execution monitoring with WebSocket updates
|
||||
- Execution history analytics and reporting
|
||||
- Alerting system for failed workflows
|
||||
|
||||
### **Medium Priority Enhancements**
|
||||
|
||||
1. **Step Library Expansion**
|
||||
- Database query steps (aggregations, complex queries)
|
||||
- File processing steps (CSV parsing, image processing)
|
||||
- Integration with popular services (Slack, Discord, Teams)
|
||||
- Conditional branching and loop steps
|
||||
- Data transformation and mapping steps
|
||||
|
||||
2. **Advanced Trigger Types**
|
||||
- File system watchers for document uploads
|
||||
- API polling triggers for external data changes
|
||||
- Event-driven triggers from external systems
|
||||
- Time-based triggers with more sophisticated scheduling
|
||||
|
||||
3. **Testing Improvements**
|
||||
- Comprehensive E2E test suite with Playwright
|
||||
- Step handler unit tests with mocking
|
||||
- Load testing for complex workflows with many parallel steps
|
||||
- Integration testing with actual external services
|
||||
|
||||
### **Low Priority Items**
|
||||
|
||||
1. **Developer Experience**
|
||||
- CLI tools for workflow management and deployment
|
||||
- Workflow import/export functionality (JSON/YAML formats)
|
||||
- Documentation generator for custom steps
|
||||
- Development mode with enhanced debugging
|
||||
|
||||
2. **Performance Optimizations**
|
||||
- Workflow execution caching for repeated executions
|
||||
- Background job queuing improvements
|
||||
- Database query optimization for large workflow sets
|
||||
- Memory usage optimization for long-running workflows
|
||||
|
||||
## Dependencies & Maintenance
|
||||
|
||||
### **Dependency Health**
|
||||
**Rating: 9/10** - Well maintained dependencies
|
||||
|
||||
**Core Dependencies:**
|
||||
- **PayloadCMS 3.45.0**: Latest version with proper peer dependency management
|
||||
- **JSONPath Plus 10.3.0**: Stable, well-maintained library for data resolution
|
||||
- **Node-cron 4.2.1**: Reliable cron implementation with timezone support
|
||||
- **Pino 9.9.0**: Enterprise-grade logging solution
|
||||
|
||||
**Development Dependencies:**
|
||||
- Modern toolchain with SWC for fast compilation
|
||||
- Comprehensive testing setup (Vitest, Playwright, MongoDB Memory Server)
|
||||
- PayloadCMS ecosystem packages for consistent development experience
|
||||
|
||||
### **Maintenance Considerations**
|
||||
- Regular PayloadCMS compatibility updates needed (major version changes)
|
||||
- Monitor JSONPath Plus for security updates
|
||||
- Node.js version requirements clearly specified (^18.20.2 || >=20.9.0)
|
||||
- PNPM package manager requirement for consistent builds
|
||||
|
||||
### **Build System**
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build": "pnpm copyfiles && pnpm build:types && pnpm build:swc",
|
||||
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
||||
"build:types": "tsc --outDir dist --rootDir ./src"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Strengths:**
|
||||
- Fast SWC compilation for production builds
|
||||
- Separate TypeScript declaration generation
|
||||
- Asset copying for complete distribution
|
||||
- Comprehensive export configuration for different usage patterns
|
||||
|
||||
## Recommendations
|
||||
|
||||
### **Immediate Actions**
|
||||
1. **Documentation**: Create comprehensive user documentation with examples
|
||||
2. **Testing**: Implement missing E2E tests and expand unit test coverage
|
||||
3. **Error Messages**: Improve user-facing error messages throughout the system
|
||||
|
||||
### **Short Term (1-3 months)**
|
||||
1. **Visual Builder**: Begin development of drag-and-drop workflow interface
|
||||
2. **Step Library**: Add most commonly requested step types based on user feedback
|
||||
3. **Monitoring**: Implement basic execution monitoring dashboard
|
||||
|
||||
### **Long Term (3-6 months)**
|
||||
1. **Enterprise Features**: Add advanced features like workflow templates, bulk operations
|
||||
2. **Performance**: Implement caching and optimization features for high-volume usage
|
||||
3. **Integrations**: Build ecosystem of pre-built integrations with popular services
|
||||
|
||||
## Conclusion
|
||||
|
||||
The PayloadCMS Automation Plugin represents a **mature, production-ready solution** for workflow automation in PayloadCMS applications. The codebase demonstrates:
|
||||
|
||||
- **Excellent architectural decisions** with proper separation of concerns and extensible design
|
||||
- **Robust execution engine** with sophisticated dependency management and parallel processing
|
||||
- **Comprehensive trigger system** supporting diverse automation scenarios
|
||||
- **Type-safe implementation** following TypeScript best practices
|
||||
- **Production-ready code quality** with proper error handling, logging, and testing foundation
|
||||
|
||||
### **Deployment Readiness: ✅ Ready**
|
||||
|
||||
The plugin can be confidently deployed in production environments with the current feature set. The suggested improvements would enhance user experience and expand capabilities but are not blockers for production use.
|
||||
|
||||
### **Maintenance Score: 8/10**
|
||||
|
||||
The codebase is well-structured for long-term maintenance with clear patterns, comprehensive documentation in code, and good test coverage foundation. The modular architecture supports feature additions without major refactoring.
|
||||
|
||||
---
|
||||
|
||||
**Review completed on January 4, 2025**
|
||||
**Next review recommended: July 2025 (6-month cycle)**
|
||||
225
MIGRATION-v0.0.37.md
Normal file
225
MIGRATION-v0.0.37.md
Normal file
@@ -0,0 +1,225 @@
|
||||
# Migration Guide: v0.0.36 to v0.0.37
|
||||
|
||||
## Overview
|
||||
|
||||
Version 0.0.37 introduces significant refactoring and cleanup changes focused on simplifying the plugin architecture and removing unused features. This version removes several deprecated components and consolidates trigger handling.
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
### 1. Removed Components and Files
|
||||
|
||||
The following components and modules have been completely removed:
|
||||
|
||||
#### Components
|
||||
- `TriggerWorkflowButton` - Manual workflow triggering component
|
||||
- `WorkflowExecutionStatus` - Workflow execution status display component
|
||||
|
||||
#### Plugin Modules
|
||||
- `init-global-hooks.ts` - Global hook initialization (functionality moved to main plugin)
|
||||
- `init-step-tasks.ts` - Step task initialization (functionality integrated elsewhere)
|
||||
- `init-webhook.ts` - Webhook initialization (functionality removed)
|
||||
- `init-workflow-hooks.ts` - Workflow hook initialization (functionality moved to main plugin)
|
||||
|
||||
#### Triggers
|
||||
- `webhook-trigger.ts` - Webhook trigger support has been removed
|
||||
- `cron-trigger.ts` - Cron/scheduled trigger support has been removed
|
||||
- `cron-scheduler.ts` - Cron scheduling system has been removed
|
||||
|
||||
#### Tests
|
||||
- `webhook-triggers.spec.ts` - Webhook trigger integration tests
|
||||
|
||||
### 2. Cron/Scheduled Workflows Removal
|
||||
|
||||
Cron trigger functionality has been completely removed from the plugin. If you were using cron triggers in your workflows:
|
||||
|
||||
**Migration Path:**
|
||||
- Use external scheduling services like GitHub Actions or Vercel Cron
|
||||
- Trigger workflows via webhook endpoints from external schedulers
|
||||
- Implement custom scheduling in your application using libraries like `node-cron`
|
||||
|
||||
**Example with GitHub Actions:**
|
||||
```yaml
|
||||
name: Trigger Workflow
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 9 * * *' # Daily at 9 AM
|
||||
jobs:
|
||||
trigger:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger Workflow
|
||||
run: |
|
||||
curl -X POST https://your-app.com/api/workflows/trigger/your-workflow-id
|
||||
```
|
||||
|
||||
### 3. Webhook Trigger Removal
|
||||
|
||||
Webhook triggers have been removed. If you were using webhook triggers:
|
||||
|
||||
**Migration Path:**
|
||||
- Implement custom webhook endpoints in your PayloadCMS application
|
||||
- Use collection or global hooks to trigger workflows based on document changes
|
||||
- Create manual trigger endpoints using the workflow executor directly
|
||||
|
||||
### 4. Architecture Changes
|
||||
|
||||
#### ExecutorRegistry Removal
|
||||
The `executorRegistry` singleton pattern has been removed. WorkflowExecutor instances are now created on-demand for each execution.
|
||||
|
||||
**What this means:**
|
||||
- No shared state between workflow executions
|
||||
- Each execution is completely independent
|
||||
- Better memory management and isolation
|
||||
|
||||
#### Hook Registration Consolidation
|
||||
Hook registration logic has been consolidated into the main plugin file:
|
||||
- Collection hooks are now registered directly in `plugin/index.ts`
|
||||
- Global hooks are handled through the new `plugin/global-hook.ts` module
|
||||
- Simplified hook management with better TypeScript typing
|
||||
|
||||
## Non-Breaking Changes
|
||||
|
||||
### 1. Trigger Module Refactoring
|
||||
|
||||
Triggers have been reorganized into a dedicated `triggers/` directory with improved modularity:
|
||||
|
||||
- `triggers/collection-trigger.ts` - Collection-based triggers
|
||||
- `triggers/global-trigger.ts` - Global document triggers
|
||||
- `triggers/index.ts` - Trigger exports
|
||||
- `triggers/types.ts` - Trigger type definitions
|
||||
|
||||
### 2. Field Helper Improvements
|
||||
|
||||
New `triggerField` helper function standardizes virtual field creation across all trigger modules:
|
||||
|
||||
```typescript
|
||||
// Before (manual virtual field creation)
|
||||
{
|
||||
name: '__builtin_collection',
|
||||
type: 'text',
|
||||
admin: { hidden: true },
|
||||
virtual: true,
|
||||
access: { read: () => false, update: () => false }
|
||||
}
|
||||
|
||||
// After (using helper)
|
||||
triggerField('collection', {
|
||||
type: 'text',
|
||||
// helper handles virtual field setup automatically
|
||||
})
|
||||
```
|
||||
|
||||
### 3. TypeScript Improvements
|
||||
|
||||
- Replaced 'any' types with proper TypeScript types
|
||||
- Added `CollectionAfterChangeHook` and `PayloadRequest` type usage
|
||||
- Improved type safety throughout the codebase
|
||||
|
||||
### 4. Code Organization
|
||||
|
||||
#### New File Structure
|
||||
```
|
||||
src/
|
||||
├── plugin/
|
||||
│ ├── collection-hook.ts # Collection hook logic
|
||||
│ ├── global-hook.ts # Global hook logic (new)
|
||||
│ └── index.ts # Main plugin (consolidated)
|
||||
├── triggers/ # Trigger modules (new directory)
|
||||
├── fields/
|
||||
│ └── parameter.ts # Moved from triggers/helpers.ts
|
||||
```
|
||||
|
||||
#### ESLint Configuration
|
||||
- Disabled `perfectionist/sort-object-types` and `perfectionist/sort-objects` rules
|
||||
- Allows natural object property ordering without enforced alphabetical sorting
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### 1. Update Imports
|
||||
|
||||
If you were importing removed components or modules, remove these imports:
|
||||
|
||||
```typescript
|
||||
// Remove these imports - no longer available
|
||||
import { TriggerWorkflowButton } from '@xtr-dev/payload-automation/client'
|
||||
import { WorkflowExecutionStatus } from '@xtr-dev/payload-automation/client'
|
||||
```
|
||||
|
||||
### 2. Update Workflow Configurations
|
||||
|
||||
If your workflows used cron or webhook triggers, you'll need to modify them:
|
||||
|
||||
**Before:**
|
||||
```javascript
|
||||
{
|
||||
trigger: {
|
||||
type: 'cron',
|
||||
schedule: '0 9 * * *'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**After:**
|
||||
```javascript
|
||||
{
|
||||
trigger: {
|
||||
type: 'collection', // Use collection or global triggers instead
|
||||
collection: 'your-collection',
|
||||
operation: 'create'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Replace Webhook Functionality
|
||||
|
||||
If you were using webhook triggers, implement custom webhook handling:
|
||||
|
||||
```typescript
|
||||
// In your PayloadCMS config
|
||||
export default buildConfig({
|
||||
endpoints: [
|
||||
{
|
||||
path: '/trigger-workflow/:workflowId',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
const { workflowId } = req.params
|
||||
// Implement your workflow triggering logic here
|
||||
// Use the WorkflowExecutor directly if needed
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
### 4. Update Custom Components
|
||||
|
||||
If you built custom components using the removed ones as reference, update them to work with the new architecture.
|
||||
|
||||
## Benefits of This Release
|
||||
|
||||
1. **Simplified Architecture**: Consolidated plugin initialization reduces complexity
|
||||
2. **Better Memory Management**: On-demand executor creation eliminates shared state issues
|
||||
3. **Improved Type Safety**: Proper TypeScript typing throughout
|
||||
4. **Reduced Bundle Size**: Removal of unused code reduces package size
|
||||
5. **Better Maintainability**: Cleaner code organization and module structure
|
||||
6. **More Reliable**: External scheduling is more robust than in-process cron jobs
|
||||
|
||||
## Testing Your Migration
|
||||
|
||||
After migrating:
|
||||
|
||||
1. **Test Existing Workflows**: Ensure collection and global triggers still work as expected
|
||||
2. **Verify External Triggers**: Test any external webhook or scheduling implementations
|
||||
3. **Check Custom Components**: Validate any custom UI components that interact with workflows
|
||||
4. **Run Integration Tests**: Execute your test suite to catch any breaking changes
|
||||
|
||||
## Support
|
||||
|
||||
If you encounter issues migrating from v0.0.36 to v0.0.37:
|
||||
|
||||
1. Check that you're not using any of the removed components or features
|
||||
2. Verify your workflow trigger types are supported (collection, global, manual)
|
||||
3. Update any custom integrations that relied on removed modules
|
||||
4. Consider the external scheduling alternatives for cron functionality
|
||||
|
||||
For additional support, please refer to the plugin documentation or open an issue in the project repository.
|
||||
@@ -1,68 +0,0 @@
|
||||
# Steps and Triggers Not Implementing
|
||||
|
||||
This document lists workflow steps and triggers that are intentionally **not** being implemented in the core plugin. These are either better suited as custom user implementations or fall outside the plugin's scope.
|
||||
|
||||
## Steps Not Implementing
|
||||
|
||||
### Workflow Orchestration
|
||||
- **Stop Workflow** - Can be achieved through conditional logic
|
||||
- **Run Workflow** - Adds complexity to execution tracking and circular dependency management
|
||||
- **Parallel Fork/Join** - Current dependency system already enables parallel execution
|
||||
|
||||
### External Service Integrations
|
||||
- **GraphQL Query** - Better as custom HTTP request step
|
||||
- **S3/Cloud Storage** - Too provider-specific
|
||||
- **Message Queue** (Kafka, RabbitMQ, SQS) - Infrastructure-specific
|
||||
- **SMS** (Twilio, etc.) - Requires external accounts
|
||||
- **Push Notifications** - Platform-specific implementation
|
||||
- **Slack/Discord/Teams** - Better as custom HTTP webhooks
|
||||
- **Calendar Integration** - Too many providers to support
|
||||
|
||||
### AI/ML Operations
|
||||
- **AI Prompt** (OpenAI, Claude, etc.) - Requires API keys, better as custom implementation
|
||||
- **Text Analysis** - Too many variations and providers
|
||||
- **Image Processing** - Better handled by dedicated services
|
||||
|
||||
### Specialized Data Operations
|
||||
- **Database Query** (Direct SQL/NoSQL) - Security concerns, bypasses Payload
|
||||
- **File Operations** - Complex permission and security implications
|
||||
- **Hash/Encrypt** - Security-sensitive, needs careful implementation
|
||||
- **RSS/Feed Processing** - Too specific for core plugin
|
||||
|
||||
## Triggers Not Implementing
|
||||
|
||||
### Workflow Events
|
||||
- **Workflow Complete/Failed** - Adds circular dependency complexity
|
||||
- **Step Failed** - Complicates error handling flow
|
||||
|
||||
### System Events
|
||||
- **File Upload** - Can use collection hooks on media collections
|
||||
- **User Authentication** (Login/Logout) - Security implications
|
||||
- **Server Start/Stop** - Lifecycle management complexity
|
||||
- **Cache Clear** - Too implementation-specific
|
||||
- **Migration/Backup Events** - Infrastructure-specific
|
||||
|
||||
### External Monitoring
|
||||
- **Email Received** (IMAP/POP3) - Requires mail server setup
|
||||
- **Git Webhooks** - Better as standard webhook triggers
|
||||
- **Performance Alerts** - Requires monitoring infrastructure
|
||||
- **Error Events** - Better handled by dedicated error tracking
|
||||
|
||||
### Advanced Time-Based
|
||||
- **Recurring Patterns** (e.g., "every 2nd Tuesday") - Complex parsing and timezone handling
|
||||
- **Date Range Triggers** - Can be achieved with conditional logic in workflows
|
||||
|
||||
## Why These Aren't Core Features
|
||||
|
||||
1. **Maintainability**: Each external integration requires ongoing maintenance as APIs change
|
||||
2. **Security**: Many features have security implications that are better handled by users who understand their specific requirements
|
||||
3. **Flexibility**: Users can implement these as custom steps/triggers tailored to their needs
|
||||
4. **Scope**: The plugin focuses on being a solid workflow engine, not an everything-integration platform
|
||||
5. **Dependencies**: Avoiding external service dependencies keeps the plugin lightweight
|
||||
|
||||
## What Users Can Do Instead
|
||||
|
||||
- Implement custom steps using the plugin's TaskConfig interface
|
||||
- Use HTTP Request step for most external integrations
|
||||
- Create custom triggers through Payload hooks
|
||||
- Build specialized workflow packages on top of this plugin
|
||||
61
README.md
61
README.md
@@ -9,6 +9,7 @@ A comprehensive workflow automation plugin for PayloadCMS 3.x that enables visua
|
||||
- 🔄 **Visual Workflow Builder** - Create complex workflows with drag-and-drop interface
|
||||
- ⚡ **Parallel Execution** - Smart dependency resolution for optimal performance
|
||||
- 🎯 **Multiple Triggers** - Collection hooks, webhooks, manual execution
|
||||
- ⏰ **Scheduled Workflows** - Use webhook triggers with external cron services
|
||||
- 📊 **Execution Tracking** - Complete history and monitoring of workflow runs
|
||||
- 🔧 **Extensible Steps** - HTTP requests, document CRUD, email notifications
|
||||
- 🔍 **JSONPath Integration** - Dynamic data interpolation and transformation
|
||||
@@ -155,6 +156,66 @@ Use JSONPath to access workflow data:
|
||||
- Node.js ^18.20.2 || >=20.9.0
|
||||
- pnpm ^9 || ^10
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Control plugin logging with these environment variables:
|
||||
|
||||
### `PAYLOAD_AUTOMATION_LOG_LEVEL`
|
||||
Controls both configuration-time and runtime logging.
|
||||
- **Values**: `silent`, `error`, `warn`, `info`, `debug`, `trace`
|
||||
- **Default**: `warn`
|
||||
- **Example**: `PAYLOAD_AUTOMATION_LOG_LEVEL=debug`
|
||||
|
||||
### `PAYLOAD_AUTOMATION_CONFIG_LOG_LEVEL` (optional)
|
||||
Override log level specifically for configuration-time logs (plugin setup).
|
||||
- **Values**: Same as above
|
||||
- **Default**: Falls back to `PAYLOAD_AUTOMATION_LOG_LEVEL` or `warn`
|
||||
- **Example**: `PAYLOAD_AUTOMATION_CONFIG_LOG_LEVEL=silent`
|
||||
|
||||
### Production Usage
|
||||
For production, keep the default (`warn`) or use `error` or `silent`:
|
||||
```bash
|
||||
PAYLOAD_AUTOMATION_LOG_LEVEL=error npm start
|
||||
```
|
||||
|
||||
### Development Usage
|
||||
For debugging, use `debug` or `info`:
|
||||
```bash
|
||||
PAYLOAD_AUTOMATION_LOG_LEVEL=debug npm run dev
|
||||
```
|
||||
|
||||
## Scheduled Workflows
|
||||
|
||||
For scheduled workflows, use **webhook triggers** with external cron services instead of built-in cron triggers:
|
||||
|
||||
### GitHub Actions (Free)
|
||||
```yaml
|
||||
# .github/workflows/daily-report.yml
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 9 * * *' # Daily at 9 AM UTC
|
||||
jobs:
|
||||
trigger-workflow:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: curl -X POST https://your-app.com/api/workflows-webhook/daily-report
|
||||
```
|
||||
|
||||
### Vercel Cron (Serverless)
|
||||
```js
|
||||
// api/cron/daily.js
|
||||
export default async function handler(req, res) {
|
||||
await fetch('https://your-app.com/api/workflows-webhook/daily-report', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ source: 'vercel-cron' })
|
||||
});
|
||||
res.status(200).json({ success: true });
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits**: Better reliability, proper process isolation, easier debugging, and leverages existing infrastructure.
|
||||
|
||||
## Documentation
|
||||
|
||||
Full documentation coming soon. For now, explore the development environment in the repository for examples and patterns.
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
|
||||
import { StatusCell as StatusCell_6f365a93b6cb4b34ad564b391e21db6f } from '@xtr-dev/payload-automation/client'
|
||||
import { ErrorDisplay as ErrorDisplay_6f365a93b6cb4b34ad564b391e21db6f } from '@xtr-dev/payload-automation/client'
|
||||
|
||||
export const importMap = {
|
||||
|
||||
"@xtr-dev/payload-automation/client#StatusCell": StatusCell_6f365a93b6cb4b34ad564b391e21db6f,
|
||||
"@xtr-dev/payload-automation/client#ErrorDisplay": ErrorDisplay_6f365a93b6cb4b34ad564b391e21db6f
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ export interface Config {
|
||||
'payload-migrations': PayloadMigrationsSelect<false> | PayloadMigrationsSelect<true>;
|
||||
};
|
||||
db: {
|
||||
defaultIDType: number;
|
||||
defaultIDType: string;
|
||||
};
|
||||
globals: {};
|
||||
globalsSelect: {};
|
||||
@@ -136,7 +136,7 @@ export interface UserAuthOperations {
|
||||
* via the `definition` "posts".
|
||||
*/
|
||||
export interface Post {
|
||||
id: number;
|
||||
id: string;
|
||||
content?: string | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
@@ -146,7 +146,7 @@ export interface Post {
|
||||
* via the `definition` "media".
|
||||
*/
|
||||
export interface Media {
|
||||
id: number;
|
||||
id: string;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
url?: string | null;
|
||||
@@ -164,9 +164,9 @@ export interface Media {
|
||||
* via the `definition` "auditLog".
|
||||
*/
|
||||
export interface AuditLog {
|
||||
id: number;
|
||||
post?: (number | null) | Post;
|
||||
user?: (number | null) | User;
|
||||
id: string;
|
||||
post?: (string | null) | Post;
|
||||
user?: (string | null) | User;
|
||||
message?: string | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
@@ -176,7 +176,7 @@ export interface AuditLog {
|
||||
* via the `definition` "users".
|
||||
*/
|
||||
export interface User {
|
||||
id: number;
|
||||
id: string;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
email: string;
|
||||
@@ -202,7 +202,7 @@ export interface User {
|
||||
* via the `definition` "workflows".
|
||||
*/
|
||||
export interface Workflow {
|
||||
id: number;
|
||||
id: string;
|
||||
/**
|
||||
* Human-readable name for the workflow
|
||||
*/
|
||||
@@ -214,36 +214,45 @@ export interface Workflow {
|
||||
triggers?:
|
||||
| {
|
||||
type?: ('collection-trigger' | 'webhook-trigger' | 'global-trigger' | 'cron-trigger') | null;
|
||||
parameters?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Collection that triggers the workflow
|
||||
*/
|
||||
collectionSlug?: ('posts' | 'media') | null;
|
||||
__builtin_collectionSlug?: ('posts' | 'media') | null;
|
||||
/**
|
||||
* Collection operation that triggers the workflow
|
||||
*/
|
||||
operation?: ('create' | 'delete' | 'read' | 'update') | null;
|
||||
__builtin_operation?: ('create' | 'delete' | 'read' | 'update') | null;
|
||||
/**
|
||||
* URL path for the webhook (e.g., "my-webhook"). Full URL will be /api/workflows/webhook/my-webhook
|
||||
* URL path for the webhook (e.g., "my-webhook"). Full URL will be /api/workflows-webhook/my-webhook
|
||||
*/
|
||||
webhookPath?: string | null;
|
||||
__builtin_webhookPath?: string | null;
|
||||
/**
|
||||
* Global that triggers the workflow
|
||||
*/
|
||||
global?: string | null;
|
||||
__builtin_global?: string | null;
|
||||
/**
|
||||
* Global operation that triggers the workflow
|
||||
*/
|
||||
globalOperation?: 'update' | null;
|
||||
__builtin_globalOperation?: 'update' | null;
|
||||
/**
|
||||
* Cron expression for scheduled execution (e.g., "0 0 * * *" for daily at midnight)
|
||||
*/
|
||||
cronExpression?: string | null;
|
||||
__builtin_cronExpression?: string | null;
|
||||
/**
|
||||
* Timezone for cron execution (e.g., "America/New_York", "Europe/London"). Defaults to UTC.
|
||||
*/
|
||||
timezone?: string | null;
|
||||
__builtin_timezone?: string | null;
|
||||
/**
|
||||
* JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.doc.status == 'published'")
|
||||
* JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.trigger.doc.status == 'published'")
|
||||
*/
|
||||
condition?: string | null;
|
||||
id?: string | null;
|
||||
@@ -253,7 +262,18 @@ export interface Workflow {
|
||||
| {
|
||||
step?: ('http-request-step' | 'create-document') | null;
|
||||
name?: string | null;
|
||||
input?:
|
||||
/**
|
||||
* The URL to make the HTTP request to
|
||||
*/
|
||||
url?: string | null;
|
||||
/**
|
||||
* HTTP method to use
|
||||
*/
|
||||
method?: ('GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH') | null;
|
||||
/**
|
||||
* HTTP headers as JSON object (e.g., {"Content-Type": "application/json"})
|
||||
*/
|
||||
headers?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
@@ -262,6 +282,80 @@ export interface Workflow {
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Request body data. Use JSONPath to reference values (e.g., {"postId": "$.trigger.doc.id", "title": "$.trigger.doc.title"})
|
||||
*/
|
||||
body?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Request timeout in milliseconds (default: 30000)
|
||||
*/
|
||||
timeout?: number | null;
|
||||
authentication?: {
|
||||
/**
|
||||
* Authentication method
|
||||
*/
|
||||
type?: ('none' | 'bearer' | 'basic' | 'apikey') | null;
|
||||
/**
|
||||
* Bearer token value
|
||||
*/
|
||||
token?: string | null;
|
||||
/**
|
||||
* Basic auth username
|
||||
*/
|
||||
username?: string | null;
|
||||
/**
|
||||
* Basic auth password
|
||||
*/
|
||||
password?: string | null;
|
||||
/**
|
||||
* API key header name (e.g., "X-API-Key")
|
||||
*/
|
||||
headerName?: string | null;
|
||||
/**
|
||||
* API key value
|
||||
*/
|
||||
headerValue?: string | null;
|
||||
};
|
||||
/**
|
||||
* Number of retry attempts on failure (max: 5)
|
||||
*/
|
||||
retries?: number | null;
|
||||
/**
|
||||
* Delay between retries in milliseconds
|
||||
*/
|
||||
retryDelay?: number | null;
|
||||
/**
|
||||
* The collection slug to create a document in
|
||||
*/
|
||||
collectionSlug?: string | null;
|
||||
/**
|
||||
* The document data to create. Use JSONPath to reference trigger data (e.g., {"title": "$.trigger.doc.title", "author": "$.trigger.doc.author"})
|
||||
*/
|
||||
data?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Create as draft (if collection has drafts enabled)
|
||||
*/
|
||||
draft?: boolean | null;
|
||||
/**
|
||||
* Locale for the document (if localization is enabled)
|
||||
*/
|
||||
locale?: string | null;
|
||||
/**
|
||||
* Step names that must complete before this step can run
|
||||
*/
|
||||
@@ -282,11 +376,11 @@ export interface Workflow {
|
||||
* via the `definition` "workflow-runs".
|
||||
*/
|
||||
export interface WorkflowRun {
|
||||
id: number;
|
||||
id: string;
|
||||
/**
|
||||
* Reference to the workflow that was executed
|
||||
*/
|
||||
workflow: number | Workflow;
|
||||
workflow: string | Workflow;
|
||||
/**
|
||||
* Version of the workflow that was executed
|
||||
*/
|
||||
@@ -380,7 +474,7 @@ export interface WorkflowRun {
|
||||
* via the `definition` "payload-jobs".
|
||||
*/
|
||||
export interface PayloadJob {
|
||||
id: number;
|
||||
id: string;
|
||||
/**
|
||||
* Input data provided to the job
|
||||
*/
|
||||
@@ -472,40 +566,40 @@ export interface PayloadJob {
|
||||
* via the `definition` "payload-locked-documents".
|
||||
*/
|
||||
export interface PayloadLockedDocument {
|
||||
id: number;
|
||||
id: string;
|
||||
document?:
|
||||
| ({
|
||||
relationTo: 'posts';
|
||||
value: number | Post;
|
||||
value: string | Post;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'media';
|
||||
value: number | Media;
|
||||
value: string | Media;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'auditLog';
|
||||
value: number | AuditLog;
|
||||
value: string | AuditLog;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'workflows';
|
||||
value: number | Workflow;
|
||||
value: string | Workflow;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'workflow-runs';
|
||||
value: number | WorkflowRun;
|
||||
value: string | WorkflowRun;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'users';
|
||||
value: number | User;
|
||||
value: string | User;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'payload-jobs';
|
||||
value: number | PayloadJob;
|
||||
value: string | PayloadJob;
|
||||
} | null);
|
||||
globalSlug?: string | null;
|
||||
user: {
|
||||
relationTo: 'users';
|
||||
value: number | User;
|
||||
value: string | User;
|
||||
};
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
@@ -515,10 +609,10 @@ export interface PayloadLockedDocument {
|
||||
* via the `definition` "payload-preferences".
|
||||
*/
|
||||
export interface PayloadPreference {
|
||||
id: number;
|
||||
id: string;
|
||||
user: {
|
||||
relationTo: 'users';
|
||||
value: number | User;
|
||||
value: string | User;
|
||||
};
|
||||
key?: string | null;
|
||||
value?:
|
||||
@@ -538,7 +632,7 @@ export interface PayloadPreference {
|
||||
* via the `definition` "payload-migrations".
|
||||
*/
|
||||
export interface PayloadMigration {
|
||||
id: number;
|
||||
id: string;
|
||||
name?: string | null;
|
||||
batch?: number | null;
|
||||
updatedAt: string;
|
||||
@@ -592,13 +686,14 @@ export interface WorkflowsSelect<T extends boolean = true> {
|
||||
| T
|
||||
| {
|
||||
type?: T;
|
||||
collectionSlug?: T;
|
||||
operation?: T;
|
||||
webhookPath?: T;
|
||||
global?: T;
|
||||
globalOperation?: T;
|
||||
cronExpression?: T;
|
||||
timezone?: T;
|
||||
parameters?: T;
|
||||
__builtin_collectionSlug?: T;
|
||||
__builtin_operation?: T;
|
||||
__builtin_webhookPath?: T;
|
||||
__builtin_global?: T;
|
||||
__builtin_globalOperation?: T;
|
||||
__builtin_cronExpression?: T;
|
||||
__builtin_timezone?: T;
|
||||
condition?: T;
|
||||
id?: T;
|
||||
};
|
||||
@@ -607,7 +702,27 @@ export interface WorkflowsSelect<T extends boolean = true> {
|
||||
| {
|
||||
step?: T;
|
||||
name?: T;
|
||||
input?: T;
|
||||
url?: T;
|
||||
method?: T;
|
||||
headers?: T;
|
||||
body?: T;
|
||||
timeout?: T;
|
||||
authentication?:
|
||||
| T
|
||||
| {
|
||||
type?: T;
|
||||
token?: T;
|
||||
username?: T;
|
||||
password?: T;
|
||||
headerName?: T;
|
||||
headerValue?: T;
|
||||
};
|
||||
retries?: T;
|
||||
retryDelay?: T;
|
||||
collectionSlug?: T;
|
||||
data?: T;
|
||||
draft?: T;
|
||||
locale?: T;
|
||||
dependencies?: T;
|
||||
condition?: T;
|
||||
id?: T;
|
||||
@@ -736,10 +851,118 @@ export interface TaskWorkflowCronExecutor {
|
||||
*/
|
||||
export interface TaskHttpRequestStep {
|
||||
input: {
|
||||
url?: string | null;
|
||||
/**
|
||||
* The URL to make the HTTP request to
|
||||
*/
|
||||
url: string;
|
||||
/**
|
||||
* HTTP method to use
|
||||
*/
|
||||
method?: ('GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH') | null;
|
||||
/**
|
||||
* HTTP headers as JSON object (e.g., {"Content-Type": "application/json"})
|
||||
*/
|
||||
headers?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Request body data. Use JSONPath to reference values (e.g., {"postId": "$.trigger.doc.id", "title": "$.trigger.doc.title"})
|
||||
*/
|
||||
body?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Request timeout in milliseconds (default: 30000)
|
||||
*/
|
||||
timeout?: number | null;
|
||||
authentication?: {
|
||||
/**
|
||||
* Authentication method
|
||||
*/
|
||||
type?: ('none' | 'bearer' | 'basic' | 'apikey') | null;
|
||||
/**
|
||||
* Bearer token value
|
||||
*/
|
||||
token?: string | null;
|
||||
/**
|
||||
* Basic auth username
|
||||
*/
|
||||
username?: string | null;
|
||||
/**
|
||||
* Basic auth password
|
||||
*/
|
||||
password?: string | null;
|
||||
/**
|
||||
* API key header name (e.g., "X-API-Key")
|
||||
*/
|
||||
headerName?: string | null;
|
||||
/**
|
||||
* API key value
|
||||
*/
|
||||
headerValue?: string | null;
|
||||
};
|
||||
/**
|
||||
* Number of retry attempts on failure (max: 5)
|
||||
*/
|
||||
retries?: number | null;
|
||||
/**
|
||||
* Delay between retries in milliseconds
|
||||
*/
|
||||
retryDelay?: number | null;
|
||||
};
|
||||
output: {
|
||||
response?: string | null;
|
||||
/**
|
||||
* HTTP status code
|
||||
*/
|
||||
status?: number | null;
|
||||
/**
|
||||
* HTTP status text
|
||||
*/
|
||||
statusText?: string | null;
|
||||
/**
|
||||
* Response headers
|
||||
*/
|
||||
headers?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Response body
|
||||
*/
|
||||
body?: string | null;
|
||||
/**
|
||||
* Parsed response data (if JSON)
|
||||
*/
|
||||
data?:
|
||||
| {
|
||||
[k: string]: unknown;
|
||||
}
|
||||
| unknown[]
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null;
|
||||
/**
|
||||
* Request duration in milliseconds
|
||||
*/
|
||||
duration?: number | null;
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -753,7 +976,7 @@ export interface TaskCreateDocument {
|
||||
*/
|
||||
collectionSlug: string;
|
||||
/**
|
||||
* The document data to create
|
||||
* The document data to create. Use JSONPath to reference trigger data (e.g., {"title": "$.trigger.doc.title", "author": "$.trigger.doc.author"})
|
||||
*/
|
||||
data:
|
||||
| {
|
||||
|
||||
@@ -24,13 +24,24 @@ if (!process.env.ROOT_DIR) {
|
||||
const buildConfigWithMemoryDB = async () => {
|
||||
// Use MongoDB adapter for testing instead of SQLite
|
||||
const { mongooseAdapter } = await import('@payloadcms/db-mongodb')
|
||||
|
||||
|
||||
return buildConfig({
|
||||
admin: {
|
||||
importMap: {
|
||||
baseDir: path.resolve(dirname, '..'),
|
||||
},
|
||||
},
|
||||
globals: [
|
||||
{
|
||||
slug: 'settings',
|
||||
fields: [
|
||||
{
|
||||
name: 'siteName',
|
||||
type: 'text'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
collections: [
|
||||
{
|
||||
slug: 'posts',
|
||||
@@ -96,14 +107,13 @@ const buildConfigWithMemoryDB = async () => {
|
||||
posts: true,
|
||||
media: true
|
||||
},
|
||||
globalTriggers: {
|
||||
settings: true
|
||||
},
|
||||
steps: [
|
||||
HttpRequestStepTask,
|
||||
CreateDocumentStepTask
|
||||
],
|
||||
triggers: [
|
||||
|
||||
],
|
||||
webhookPrefix: '/workflows-webhook'
|
||||
}),
|
||||
],
|
||||
secret: process.env.PAYLOAD_SECRET || 'test-secret_key',
|
||||
|
||||
@@ -1,483 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { getTestPayload, cleanDatabase } from './test-setup.js'
|
||||
|
||||
describe('Webhook Trigger Testing', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase()
|
||||
})
|
||||
|
||||
it('should trigger workflow via webhook endpoint simulation', async () => {
|
||||
const payload = getTestPayload()
|
||||
|
||||
// Create a workflow with webhook trigger
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Basic Trigger',
|
||||
description: 'Tests basic webhook triggering',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-basic'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'create-webhook-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Webhook triggered successfully',
|
||||
user: '$.trigger.data.userId'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
expect(workflow).toBeDefined()
|
||||
|
||||
// Directly execute the workflow with webhook-like data
|
||||
const executor = (globalThis as any).__workflowExecutor
|
||||
if (!executor) {
|
||||
console.warn('⚠️ Workflow executor not available, skipping webhook execution')
|
||||
return
|
||||
}
|
||||
|
||||
// Simulate webhook trigger by directly executing the workflow
|
||||
const webhookData = {
|
||||
userId: 'webhook-test-user',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
|
||||
const mockReq = {
|
||||
payload,
|
||||
user: null,
|
||||
headers: {}
|
||||
}
|
||||
|
||||
await executor.execute({
|
||||
workflow,
|
||||
trigger: {
|
||||
type: 'webhook',
|
||||
path: 'test-basic',
|
||||
data: webhookData,
|
||||
headers: {}
|
||||
},
|
||||
req: mockReq as any,
|
||||
payload
|
||||
})
|
||||
|
||||
console.log('✅ Workflow executed directly')
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 2000))
|
||||
|
||||
// Verify workflow run was created
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).not.toBe('failed')
|
||||
|
||||
// Verify audit log was created
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'Webhook triggered'
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
console.log('✅ Webhook audit log created')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook with complex data', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Complex Data',
|
||||
description: 'Tests webhook with complex JSON data',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-complex'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'echo-webhook-data',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
originalData: '$.trigger.data',
|
||||
headers: '$.trigger.headers',
|
||||
path: '$.trigger.path'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const complexData = {
|
||||
user: {
|
||||
id: 123,
|
||||
name: 'Test User',
|
||||
permissions: ['read', 'write']
|
||||
},
|
||||
event: {
|
||||
type: 'user_action',
|
||||
timestamp: new Date().toISOString(),
|
||||
metadata: {
|
||||
source: 'webhook-test',
|
||||
version: '1.0.0'
|
||||
}
|
||||
},
|
||||
nested: {
|
||||
deeply: {
|
||||
nested: {
|
||||
value: 'deep-test-value'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response = await makeWebhookRequest('test-complex', complexData)
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed')
|
||||
|
||||
// Verify the complex data was properly passed through
|
||||
const stepOutput = runs.docs[0].context.steps['echo-webhook-data'].output
|
||||
expect(stepOutput.status).toBe(200)
|
||||
|
||||
const responseBody = JSON.parse(stepOutput.body)
|
||||
expect(responseBody.json.originalData.user.name).toBe('Test User')
|
||||
expect(responseBody.json.originalData.nested.deeply.nested.value).toBe('deep-test-value')
|
||||
|
||||
console.log('✅ Complex webhook data processed correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook conditions', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Conditional',
|
||||
description: 'Tests conditional webhook execution',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-conditional',
|
||||
condition: '$.data.action == "important"'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'conditional-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Webhook condition met - important action'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// First request - should NOT trigger (condition not met)
|
||||
const response1 = await makeWebhookRequest('test-conditional', {
|
||||
action: 'normal',
|
||||
data: 'test'
|
||||
})
|
||||
expect(response1.status).toBe(200)
|
||||
|
||||
// Second request - SHOULD trigger (condition met)
|
||||
const response2 = await makeWebhookRequest('test-conditional', {
|
||||
action: 'important',
|
||||
priority: 'high'
|
||||
})
|
||||
expect(response2.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Should have exactly 1 run (only for the matching condition)
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).not.toBe('failed')
|
||||
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'condition met'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(1)
|
||||
console.log('✅ Webhook conditional execution working')
|
||||
}, 30000)
|
||||
|
||||
it('should handle webhook authentication headers', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Headers',
|
||||
description: 'Tests webhook header processing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-headers'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'process-headers',
|
||||
step: 'http-request-step',
|
||||
url: 'https://httpbin.org/post',
|
||||
method: 'POST',
|
||||
body: {
|
||||
receivedHeaders: '$.trigger.headers',
|
||||
authorization: '$.trigger.headers.authorization',
|
||||
userAgent: '$.trigger.headers.user-agent'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Make webhook request with custom headers
|
||||
const webhookUrl = `${baseUrl}/api/workflows/webhook/test-headers`
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer test-token-123',
|
||||
'User-Agent': 'Webhook-Test-Client/1.0',
|
||||
'X-Custom-Header': 'custom-value'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
test: 'header processing'
|
||||
})
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Wait for workflow execution
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
},
|
||||
limit: 1
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(1)
|
||||
expect(runs.docs[0].status).toBe('completed')
|
||||
|
||||
// Verify headers were captured and processed
|
||||
const stepOutput = runs.docs[0].context.steps['process-headers'].output
|
||||
const responseBody = JSON.parse(stepOutput.body)
|
||||
|
||||
expect(responseBody.json.authorization).toBe('Bearer test-token-123')
|
||||
expect(responseBody.json.userAgent).toBe('Webhook-Test-Client/1.0')
|
||||
|
||||
console.log('✅ Webhook headers processed correctly')
|
||||
}, 30000)
|
||||
|
||||
it('should handle multiple concurrent webhook requests', async () => {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Concurrent',
|
||||
description: 'Tests concurrent webhook processing',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-concurrent'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'concurrent-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Concurrent webhook execution',
|
||||
requestId: '$.trigger.data.requestId'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Make multiple concurrent webhook requests
|
||||
const concurrentRequests = Array.from({ length: 5 }, (_, i) =>
|
||||
makeWebhookRequest('test-concurrent', {
|
||||
requestId: `concurrent-${i + 1}`,
|
||||
timestamp: new Date().toISOString()
|
||||
})
|
||||
)
|
||||
|
||||
const responses = await Promise.all(concurrentRequests)
|
||||
responses.forEach(response => {
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
|
||||
// Wait for all workflow executions
|
||||
await new Promise(resolve => setTimeout(resolve, 8000))
|
||||
|
||||
const runs = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(runs.totalDocs).toBe(5)
|
||||
|
||||
// Verify all runs completed successfully
|
||||
const failedRuns = runs.docs.filter(run => run.status === 'failed')
|
||||
expect(failedRuns).toHaveLength(0)
|
||||
|
||||
// Verify all audit logs were created
|
||||
const auditLogs = await payload.find({
|
||||
collection: 'auditLog',
|
||||
where: {
|
||||
message: {
|
||||
contains: 'Concurrent webhook'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(auditLogs.totalDocs).toBe(5)
|
||||
console.log('✅ Concurrent webhook requests processed successfully')
|
||||
}, 35000)
|
||||
|
||||
it('should handle non-existent webhook paths gracefully', async () => {
|
||||
// Test that workflows with non-matching webhook paths don't get triggered
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Non-existent Path',
|
||||
description: 'Should not be triggered by different path',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'specific-path'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'create-audit',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'This should not be created'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Simulate trying to trigger with wrong path - should not execute workflow
|
||||
const initialRuns = await payload.find({
|
||||
collection: 'workflow-runs',
|
||||
where: {
|
||||
workflow: {
|
||||
equals: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(initialRuns.totalDocs).toBe(0)
|
||||
console.log('✅ Non-existent webhook path handled: no workflow runs created')
|
||||
}, 10000)
|
||||
|
||||
it('should handle malformed webhook JSON', async () => {
|
||||
const webhookUrl = `${baseUrl}/api/workflows/webhook/test-malformed`
|
||||
|
||||
// First create a workflow to receive the malformed request
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Test Webhook - Malformed JSON',
|
||||
description: 'Tests malformed JSON handling',
|
||||
triggers: [
|
||||
{
|
||||
type: 'webhook-trigger',
|
||||
webhookPath: 'test-malformed'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'malformed-test',
|
||||
step: 'create-document',
|
||||
collectionSlug: 'auditLog',
|
||||
data: {
|
||||
message: 'Processed malformed request'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// Send malformed JSON
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: '{"malformed": json, "missing": quotes}'
|
||||
})
|
||||
|
||||
// Should handle malformed JSON gracefully
|
||||
expect([400, 422]).toContain(response.status)
|
||||
console.log('✅ Malformed JSON handled:', response.status)
|
||||
}, 15000)
|
||||
})
|
||||
@@ -28,6 +28,12 @@ export default [
|
||||
rules: {
|
||||
'no-restricted-exports': 'off',
|
||||
'no-console': 'off',
|
||||
'perfectionist/sort-object-types': 'off',
|
||||
'perfectionist/sort-objects': 'off',
|
||||
'perfectionist/sort-exports': 'off',
|
||||
'perfectionist/sort-imports': 'off',
|
||||
'perfectionist/sort-switch-case': 'off',
|
||||
'perfectionist/sort-interfaces': 'off'
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
# PayloadCMS Workflows Plugin Examples
|
||||
|
||||
This directory contains example code demonstrating how to use the PayloadCMS Workflows plugin.
|
||||
|
||||
## Manual Trigger Example
|
||||
|
||||
The `manual-trigger-example.ts` file shows how to:
|
||||
- Create a workflow with a manual trigger button in the admin UI
|
||||
- Trigger workflows programmatically using custom triggers
|
||||
- Access trigger data in workflow steps using JSONPath
|
||||
|
||||
### Setting up a Manual Trigger Workflow
|
||||
|
||||
1. Configure the plugin with a custom trigger:
|
||||
```typescript
|
||||
workflowsPlugin({
|
||||
triggers: [
|
||||
{
|
||||
slug: 'manual-trigger',
|
||||
inputs: [] // No inputs needed for simple manual triggers
|
||||
}
|
||||
],
|
||||
// ... other config
|
||||
})
|
||||
```
|
||||
|
||||
2. Create a workflow with the manual trigger:
|
||||
```typescript
|
||||
await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'My Manual Workflow',
|
||||
triggers: [
|
||||
{
|
||||
type: 'manual-trigger'
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
// Your workflow steps here
|
||||
]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
3. The workflow will now have a "Trigger Workflow" button in the admin UI
|
||||
|
||||
### Triggering Workflows Programmatically
|
||||
|
||||
```typescript
|
||||
import { triggerCustomWorkflow } from '@xtr-dev/payload-automation'
|
||||
|
||||
// Trigger all workflows with 'manual-trigger'
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-trigger',
|
||||
data: {
|
||||
// Custom data to pass to the workflow
|
||||
source: 'api',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### Accessing Trigger Data in Steps
|
||||
|
||||
Use JSONPath expressions to access trigger data in your workflow steps:
|
||||
- `$.trigger.data.source` - Access custom data fields
|
||||
- `$.trigger.type` - The trigger type
|
||||
- `$.trigger.triggeredAt` - When the trigger was activated
|
||||
@@ -1,274 +0,0 @@
|
||||
import { buildConfig } from 'payload'
|
||||
import { workflowsPlugin, triggerCustomWorkflow } from '@xtr-dev/payload-automation'
|
||||
import type { Field } from 'payload'
|
||||
|
||||
// Example: Data import trigger with custom fields
|
||||
const dataImportFields: Field[] = [
|
||||
{
|
||||
name: 'sourceUrl',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'URL of the data source to import from'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'format',
|
||||
type: 'select',
|
||||
options: ['json', 'csv', 'xml'],
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Format of the data to import'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'mapping',
|
||||
type: 'json',
|
||||
admin: {
|
||||
description: 'Field mapping configuration'
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
// Example: Manual review trigger with approval fields
|
||||
const manualReviewFields: Field[] = [
|
||||
{
|
||||
name: 'reviewerId',
|
||||
type: 'text',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'ID of the reviewer'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'reviewNotes',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Notes from the review'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'approved',
|
||||
type: 'checkbox',
|
||||
defaultValue: false,
|
||||
admin: {
|
||||
description: 'Whether the item was approved'
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
export default buildConfig({
|
||||
// ... other config
|
||||
|
||||
plugins: [
|
||||
workflowsPlugin({
|
||||
collectionTriggers: {
|
||||
posts: true, // Enable all CRUD triggers for posts
|
||||
products: { // Selective triggers for products
|
||||
create: true,
|
||||
update: true
|
||||
}
|
||||
},
|
||||
|
||||
// Define custom triggers that will appear in the workflow UI
|
||||
triggers: [
|
||||
{
|
||||
slug: 'data-import',
|
||||
inputs: dataImportFields
|
||||
},
|
||||
{
|
||||
slug: 'manual-review',
|
||||
inputs: manualReviewFields
|
||||
},
|
||||
{
|
||||
slug: 'scheduled-report',
|
||||
inputs: [
|
||||
{
|
||||
name: 'reportType',
|
||||
type: 'select',
|
||||
options: ['daily', 'weekly', 'monthly'],
|
||||
required: true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
steps: [
|
||||
// ... your workflow steps
|
||||
]
|
||||
})
|
||||
],
|
||||
|
||||
onInit: async (payload) => {
|
||||
// Example 1: Trigger workflow from external data source
|
||||
// This could be called from a webhook, scheduled job, or any other event
|
||||
const handleDataImport = async (sourceUrl: string, format: string) => {
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'data-import',
|
||||
data: {
|
||||
sourceUrl,
|
||||
format,
|
||||
mapping: {
|
||||
title: 'name',
|
||||
description: 'summary'
|
||||
},
|
||||
importedAt: new Date().toISOString()
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Data import workflows triggered:', results)
|
||||
}
|
||||
|
||||
// Example 2: Trigger workflow after custom business logic
|
||||
const handleDocumentReview = async (documentId: string, reviewerId: string, approved: boolean) => {
|
||||
// Perform your custom review logic here
|
||||
const reviewData = {
|
||||
documentId,
|
||||
reviewerId,
|
||||
reviewNotes: approved ? 'Document meets all requirements' : 'Needs revision',
|
||||
approved,
|
||||
reviewedAt: new Date().toISOString()
|
||||
}
|
||||
|
||||
// Trigger workflows that listen for manual review
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-review',
|
||||
data: reviewData,
|
||||
user: {
|
||||
id: reviewerId,
|
||||
email: 'reviewer@example.com'
|
||||
}
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Example 3: Integrate with external services
|
||||
// You could set up listeners for external events
|
||||
if (process.env.ENABLE_EXTERNAL_SYNC) {
|
||||
// Listen to external service events (example with a hypothetical event emitter)
|
||||
// externalService.on('data-ready', async (event) => {
|
||||
// await triggerCustomWorkflow(payload, {
|
||||
// slug: 'data-import',
|
||||
// data: event.data
|
||||
// })
|
||||
// })
|
||||
}
|
||||
|
||||
// Example 4: Create scheduled reports using node-cron or similar
|
||||
// This shows how you might trigger a custom workflow on a schedule
|
||||
// without using the built-in cron trigger
|
||||
const scheduleReports = async () => {
|
||||
// This could be called by a cron job or scheduled task
|
||||
await triggerCustomWorkflow(payload, {
|
||||
slug: 'scheduled-report',
|
||||
data: {
|
||||
reportType: 'daily',
|
||||
generatedAt: new Date().toISOString(),
|
||||
metrics: {
|
||||
totalUsers: 1000,
|
||||
activeUsers: 750,
|
||||
newSignups: 25
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Example 5: Hook into collection operations for complex logic
|
||||
const postsCollection = payload.collections.posts
|
||||
if (postsCollection) {
|
||||
postsCollection.config.hooks = postsCollection.config.hooks || {}
|
||||
postsCollection.config.hooks.afterChange = postsCollection.config.hooks.afterChange || []
|
||||
|
||||
postsCollection.config.hooks.afterChange.push(async ({ doc, operation, req }) => {
|
||||
// Custom logic to determine if we should trigger a workflow
|
||||
if (operation === 'create' && doc.status === 'published') {
|
||||
// Trigger a custom workflow for newly published posts
|
||||
await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-review',
|
||||
data: {
|
||||
documentId: doc.id,
|
||||
documentType: 'post',
|
||||
reviewerId: 'auto-review',
|
||||
reviewNotes: 'Automatically queued for review',
|
||||
approved: false
|
||||
},
|
||||
req // Pass the request context
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Make functions available globally for testing/debugging
|
||||
;(global as any).handleDataImport = handleDataImport
|
||||
;(global as any).handleDocumentReview = handleDocumentReview
|
||||
;(global as any).scheduleReports = scheduleReports
|
||||
}
|
||||
})
|
||||
|
||||
// Example workflow configuration that would use these custom triggers:
|
||||
/*
|
||||
{
|
||||
name: "Process Data Import",
|
||||
triggers: [{
|
||||
type: "data-import",
|
||||
sourceUrl: "https://api.example.com/data",
|
||||
format: "json",
|
||||
mapping: { ... }
|
||||
}],
|
||||
steps: [
|
||||
{
|
||||
step: "http-request",
|
||||
name: "fetch-data",
|
||||
input: {
|
||||
url: "$.trigger.data.sourceUrl",
|
||||
method: "GET"
|
||||
}
|
||||
},
|
||||
{
|
||||
step: "create-document",
|
||||
name: "import-records",
|
||||
input: {
|
||||
collection: "imported-data",
|
||||
data: "$.steps.fetch-data.output.body"
|
||||
},
|
||||
dependencies: ["fetch-data"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
{
|
||||
name: "Review Approval Workflow",
|
||||
triggers: [{
|
||||
type: "manual-review",
|
||||
reviewerId: "",
|
||||
reviewNotes: "",
|
||||
approved: false
|
||||
}],
|
||||
steps: [
|
||||
{
|
||||
step: "update-document",
|
||||
name: "update-status",
|
||||
input: {
|
||||
collection: "documents",
|
||||
id: "$.trigger.data.documentId",
|
||||
data: {
|
||||
status: "$.trigger.data.approved ? 'approved' : 'rejected'",
|
||||
reviewedBy: "$.trigger.data.reviewerId",
|
||||
reviewedAt: "$.trigger.data.reviewedAt"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
step: "send-email",
|
||||
name: "notify-author",
|
||||
input: {
|
||||
to: "author@example.com",
|
||||
subject: "Document Review Complete",
|
||||
text: "Your document has been $.trigger.data.approved ? 'approved' : 'rejected'"
|
||||
},
|
||||
dependencies: ["update-status"]
|
||||
}
|
||||
]
|
||||
}
|
||||
*/
|
||||
@@ -1,122 +0,0 @@
|
||||
/**
|
||||
* Example: Manual Trigger Workflow
|
||||
*
|
||||
* This example shows how to create a workflow that can be triggered
|
||||
* manually from the PayloadCMS admin interface using a custom button.
|
||||
*/
|
||||
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
/**
|
||||
* Create a workflow with manual trigger
|
||||
*/
|
||||
export async function createManualTriggerWorkflow(payload: Payload) {
|
||||
const workflow = await payload.create({
|
||||
collection: 'workflows',
|
||||
data: {
|
||||
name: 'Manual Data Processing',
|
||||
description: 'A workflow that can be triggered manually from the admin UI',
|
||||
triggers: [
|
||||
{
|
||||
type: 'manual-trigger' // This enables the trigger button in the admin
|
||||
}
|
||||
],
|
||||
steps: [
|
||||
{
|
||||
name: 'fetch-data',
|
||||
type: 'http-request-step',
|
||||
input: {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET'
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'process-data',
|
||||
type: 'create-document',
|
||||
input: {
|
||||
collection: 'auditLog',
|
||||
data: {
|
||||
message: 'Manual workflow executed',
|
||||
triggeredAt: '$.trigger.data.timestamp'
|
||||
}
|
||||
},
|
||||
dependencies: ['fetch-data'] // This step depends on fetch-data
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Created workflow:', workflow.id)
|
||||
return workflow
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger a workflow programmatically using the custom trigger
|
||||
*/
|
||||
export async function triggerWorkflowProgrammatically(payload: Payload) {
|
||||
// Import the trigger functions from the plugin
|
||||
const { triggerCustomWorkflow, triggerWorkflowById } = await import('@xtr-dev/payload-automation')
|
||||
|
||||
// Option 1: Trigger all workflows with a specific trigger slug
|
||||
const results = await triggerCustomWorkflow(payload, {
|
||||
slug: 'manual-trigger',
|
||||
data: {
|
||||
source: 'api',
|
||||
timestamp: new Date().toISOString(),
|
||||
user: 'system'
|
||||
}
|
||||
})
|
||||
|
||||
console.log('Triggered workflows:', results)
|
||||
|
||||
// Option 2: Trigger a specific workflow by ID
|
||||
const workflowId = 'your-workflow-id'
|
||||
const result = await triggerWorkflowById(
|
||||
payload,
|
||||
workflowId,
|
||||
'manual-trigger',
|
||||
{
|
||||
source: 'api',
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
)
|
||||
|
||||
console.log('Triggered workflow:', result)
|
||||
}
|
||||
|
||||
/**
|
||||
* Example usage in your application
|
||||
*/
|
||||
export async function setupManualTriggerExample(payload: Payload) {
|
||||
// Create the workflow
|
||||
const workflow = await createManualTriggerWorkflow(payload)
|
||||
|
||||
// The workflow is now available in the admin UI with a trigger button
|
||||
console.log('Workflow created! You can now:')
|
||||
console.log('1. Go to the admin UI and navigate to the Workflows collection')
|
||||
console.log('2. Open the workflow:', workflow.name)
|
||||
console.log('3. Click the "Trigger Workflow" button to execute it manually')
|
||||
|
||||
// You can also trigger it programmatically
|
||||
await triggerWorkflowProgrammatically(payload)
|
||||
}
|
||||
|
||||
/**
|
||||
* Notes:
|
||||
*
|
||||
* 1. The manual trigger button appears automatically in the workflow admin UI
|
||||
* when a workflow has a trigger with type 'manual-trigger'
|
||||
*
|
||||
* 2. You can have multiple triggers on the same workflow, including manual triggers
|
||||
*
|
||||
* 3. The trigger passes data to the workflow execution context, accessible via:
|
||||
* - $.trigger.data - The custom data passed when triggering
|
||||
* - $.trigger.type - The trigger type ('manual-trigger')
|
||||
* - $.trigger.triggeredAt - Timestamp of when the trigger was activated
|
||||
*
|
||||
* 4. Manual triggers are useful for:
|
||||
* - Administrative tasks
|
||||
* - Data migration workflows
|
||||
* - Testing and debugging
|
||||
* - On-demand processing
|
||||
*/
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@xtr-dev/payload-workflows",
|
||||
"version": "0.0.23",
|
||||
"version": "0.0.38",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@xtr-dev/payload-workflows",
|
||||
"version": "0.0.23",
|
||||
"version": "0.0.38",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jsonpath-plus": "^10.3.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xtr-dev/payload-automation",
|
||||
"version": "0.0.23",
|
||||
"version": "0.0.38",
|
||||
"description": "PayloadCMS Automation Plugin - Comprehensive workflow automation system with visual workflow building, execution tracking, and step types",
|
||||
"license": "MIT",
|
||||
"type": "module",
|
||||
@@ -34,6 +34,11 @@
|
||||
"import": "./dist/exports/server.js",
|
||||
"types": "./dist/exports/server.d.ts",
|
||||
"default": "./dist/exports/server.js"
|
||||
},
|
||||
"./helpers": {
|
||||
"import": "./dist/exports/helpers.js",
|
||||
"types": "./dist/exports/helpers.d.ts",
|
||||
"default": "./dist/exports/helpers.js"
|
||||
}
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
|
||||
@@ -1,247 +1,123 @@
|
||||
import type {CollectionConfig, Field} from 'payload'
|
||||
import type {CollectionConfig} from 'payload'
|
||||
|
||||
import type {WorkflowsPluginConfig} from "../plugin/config-types.js"
|
||||
|
||||
export const createWorkflowCollection: <T extends string>(options: WorkflowsPluginConfig<T>) => CollectionConfig = ({
|
||||
collectionTriggers,
|
||||
steps,
|
||||
triggers
|
||||
}) => ({
|
||||
slug: 'workflows',
|
||||
access: {
|
||||
create: () => true,
|
||||
delete: () => true,
|
||||
read: () => true,
|
||||
update: () => true,
|
||||
},
|
||||
admin: {
|
||||
defaultColumns: ['name', 'updatedAt'],
|
||||
description: 'Create and manage automated workflows.',
|
||||
group: 'Automation',
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Human-readable name for the workflow',
|
||||
import {parameter} from "../fields/parameter.js"
|
||||
import {collectionTrigger, globalTrigger} from "../triggers/index.js"
|
||||
|
||||
export const createWorkflowCollection: <T extends string>(options: WorkflowsPluginConfig<T>) => CollectionConfig = (options) => {
|
||||
const steps = options.steps || []
|
||||
const triggers = (options.triggers || []).map(t => t(options)).concat(collectionTrigger(options), globalTrigger(options))
|
||||
return {
|
||||
slug: 'workflows',
|
||||
access: {
|
||||
create: () => true,
|
||||
delete: () => true,
|
||||
read: () => true,
|
||||
update: () => true,
|
||||
},
|
||||
admin: {
|
||||
defaultColumns: ['name', 'updatedAt'],
|
||||
description: 'Create and manage automated workflows.',
|
||||
group: 'Automation',
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Human-readable name for the workflow',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Optional description of what this workflow does',
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
admin: {
|
||||
description: 'Optional description of what this workflow does',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'executionStatus',
|
||||
type: 'ui',
|
||||
admin: {
|
||||
components: {
|
||||
Field: '@/components/WorkflowExecutionStatus'
|
||||
},
|
||||
condition: (data) => !!data?.id // Only show for existing workflows
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'triggers',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: [
|
||||
'collection-trigger',
|
||||
'webhook-trigger',
|
||||
'global-trigger',
|
||||
'cron-trigger',
|
||||
...(triggers || []).map(t => t.slug)
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'collectionSlug',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'collection-trigger',
|
||||
description: 'Collection that triggers the workflow',
|
||||
{
|
||||
name: 'triggers',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: [
|
||||
...triggers.map(t => t.slug)
|
||||
]
|
||||
},
|
||||
options: Object.keys(collectionTriggers || {})
|
||||
},
|
||||
{
|
||||
name: 'operation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'collection-trigger',
|
||||
description: 'Collection operation that triggers the workflow',
|
||||
},
|
||||
options: [
|
||||
'create',
|
||||
'delete',
|
||||
'read',
|
||||
'update',
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'webhookPath',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'webhook-trigger',
|
||||
description: 'URL path for the webhook (e.g., "my-webhook"). Full URL will be /api/workflows-webhook/my-webhook',
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'webhook-trigger' && !value) {
|
||||
return 'Webhook path is required for webhook triggers'
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'global',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'global-trigger',
|
||||
description: 'Global that triggers the workflow',
|
||||
},
|
||||
options: [] // Will be populated dynamically based on available globals
|
||||
},
|
||||
{
|
||||
name: 'globalOperation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'global-trigger',
|
||||
description: 'Global operation that triggers the workflow',
|
||||
},
|
||||
options: [
|
||||
'update'
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'cronExpression',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'cron-trigger',
|
||||
description: 'Cron expression for scheduled execution (e.g., "0 0 * * *" for daily at midnight)',
|
||||
placeholder: '0 0 * * *'
|
||||
},
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'cron-trigger' && !value) {
|
||||
return 'Cron expression is required for cron triggers'
|
||||
}
|
||||
|
||||
// Validate cron expression format if provided
|
||||
if (siblingData?.type === 'cron-trigger' && value) {
|
||||
// Basic format validation - should be 5 parts separated by spaces
|
||||
const cronParts = value.trim().split(/\s+/)
|
||||
if (cronParts.length !== 5) {
|
||||
return 'Invalid cron expression format. Expected 5 parts: "minute hour day month weekday" (e.g., "0 9 * * 1")'
|
||||
}
|
||||
|
||||
// Additional validation could use node-cron but we avoid dynamic imports here
|
||||
// The main validation happens at runtime in the cron scheduler
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'timezone',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'cron-trigger',
|
||||
description: 'Timezone for cron execution (e.g., "America/New_York", "Europe/London"). Defaults to UTC.',
|
||||
placeholder: 'UTC'
|
||||
},
|
||||
defaultValue: 'UTC',
|
||||
validate: (value: any, {siblingData}: any) => {
|
||||
if (siblingData?.type === 'cron-trigger' && value) {
|
||||
try {
|
||||
// Test if timezone is valid by trying to create a date with it
|
||||
new Intl.DateTimeFormat('en', {timeZone: value})
|
||||
return true
|
||||
} catch {
|
||||
return `Invalid timezone: ${value}. Please use a valid IANA timezone identifier (e.g., "America/New_York", "Europe/London")`
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
...(triggers || []).flatMap(t => (t.inputs || []).map(f => ({
|
||||
...f,
|
||||
admin: {
|
||||
...(f.admin || {}),
|
||||
condition: (...args) => args[1]?.type === t.slug && (
|
||||
f.admin?.condition ?
|
||||
f.admin.condition.call(this, ...args) :
|
||||
true
|
||||
),
|
||||
},
|
||||
} as Field)))
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'steps',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
type: 'row',
|
||||
fields: [
|
||||
{
|
||||
name: 'step',
|
||||
type: 'select',
|
||||
options: steps.map(t => t.slug)
|
||||
{
|
||||
name: 'parameters',
|
||||
type: 'json',
|
||||
admin: {
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
}
|
||||
]
|
||||
},
|
||||
...(steps || []).flatMap(step => (step.inputSchema || []).map(field => ({
|
||||
...field,
|
||||
admin: {
|
||||
...(field.admin || {}),
|
||||
condition: (...args) => args[1]?.step === step.slug && (
|
||||
field.admin?.condition ?
|
||||
field.admin.condition.call(this, ...args) :
|
||||
true
|
||||
),
|
||||
defaultValue: {}
|
||||
},
|
||||
} as Field))),
|
||||
{
|
||||
name: 'dependencies',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Step names that must complete before this step can run'
|
||||
// Virtual fields for custom triggers
|
||||
...triggers.flatMap(t => (t.parameters || []).map(p => parameter(t.slug, p as any))),
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this trigger to execute the workflow (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
hasMany: true,
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this step to execute (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'steps',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
defaultValue: 'Unnamed Step'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
versions: {
|
||||
drafts: {
|
||||
autosave: false,
|
||||
{
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: steps.map(t => t.slug)
|
||||
},
|
||||
{
|
||||
name: 'parameters',
|
||||
type: 'json',
|
||||
admin: {
|
||||
hidden: true,
|
||||
},
|
||||
defaultValue: {}
|
||||
},
|
||||
// Virtual fields for custom triggers
|
||||
...steps.flatMap(step => (step.inputSchema || []).map(s => parameter(step.slug, s as any))),
|
||||
{
|
||||
name: 'dependencies',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'Step names that must complete before this step can run'
|
||||
},
|
||||
hasMany: true,
|
||||
required: false
|
||||
},
|
||||
{
|
||||
name: 'condition',
|
||||
type: 'text',
|
||||
admin: {
|
||||
description: 'JSONPath expression that must evaluate to true for this step to execute (e.g., "$.trigger.doc.status == \'published\'")'
|
||||
},
|
||||
required: false
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
versions: {
|
||||
drafts: {
|
||||
autosave: false,
|
||||
},
|
||||
maxPerDoc: 10,
|
||||
},
|
||||
maxPerDoc: 10,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ export const WorkflowRunsCollection: CollectionConfig = {
|
||||
admin: {
|
||||
description: 'Current execution status',
|
||||
components: {
|
||||
Cell: '@/components/StatusCell'
|
||||
Cell: '@xtr-dev/payload-automation/client#StatusCell'
|
||||
}
|
||||
},
|
||||
defaultValue: 'pending',
|
||||
@@ -141,7 +141,7 @@ export const WorkflowRunsCollection: CollectionConfig = {
|
||||
description: 'Error message if workflow execution failed',
|
||||
condition: (_, siblingData) => siblingData?.status === 'failed',
|
||||
components: {
|
||||
Field: '@/components/ErrorDisplay'
|
||||
Field: '@xtr-dev/payload-automation/client#ErrorDisplay'
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
@@ -10,10 +10,10 @@ interface ErrorDisplayProps {
|
||||
path?: string
|
||||
}
|
||||
|
||||
export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
readOnly = false
|
||||
export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
readOnly = false
|
||||
}) => {
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
|
||||
@@ -32,7 +32,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Network error') || error.includes('fetch')) {
|
||||
return {
|
||||
type: 'network',
|
||||
@@ -41,7 +41,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Hook execution failed')) {
|
||||
return {
|
||||
type: 'hook',
|
||||
@@ -50,7 +50,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Executor not available')) {
|
||||
return {
|
||||
type: 'executor',
|
||||
@@ -59,7 +59,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('Collection slug is required') || error.includes('Document data is required')) {
|
||||
return {
|
||||
type: 'validation',
|
||||
@@ -68,7 +68,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('status') && error.includes('4')) {
|
||||
return {
|
||||
type: 'client',
|
||||
@@ -77,7 +77,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
technical: error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (error.includes('status') && error.includes('5')) {
|
||||
return {
|
||||
type: 'server',
|
||||
@@ -127,7 +127,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
const errorColor = getErrorColor(errorInfo.type)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
<div style={{
|
||||
border: `2px solid ${errorColor}30`,
|
||||
borderRadius: '8px',
|
||||
backgroundColor: `${errorColor}08`,
|
||||
@@ -135,9 +135,9 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
marginTop: '8px'
|
||||
}}>
|
||||
{/* Error Header */}
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
<div style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '12px',
|
||||
marginBottom: '12px'
|
||||
}}>
|
||||
@@ -145,15 +145,15 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
{getErrorIcon(errorInfo.type)}
|
||||
</span>
|
||||
<div>
|
||||
<h4 style={{
|
||||
margin: 0,
|
||||
<h4 style={{
|
||||
margin: 0,
|
||||
color: errorColor,
|
||||
fontSize: '16px',
|
||||
fontWeight: '600'
|
||||
}}>
|
||||
{errorInfo.title}
|
||||
</h4>
|
||||
<p style={{
|
||||
<p style={{
|
||||
margin: '4px 0 0 0',
|
||||
color: '#6B7280',
|
||||
fontSize: '14px',
|
||||
@@ -166,15 +166,16 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
|
||||
{/* Technical Details Toggle */}
|
||||
<div>
|
||||
<Button
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
style={{ marginBottom: expanded ? '12px' : '0' }}
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} Technical Details
|
||||
</Button>
|
||||
|
||||
<div style={{ marginBottom: expanded ? '12px' : '0' }}>
|
||||
<Button
|
||||
buttonStyle="secondary"
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} Technical Details
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{expanded && (
|
||||
<div style={{
|
||||
backgroundColor: '#F8F9FA',
|
||||
@@ -193,7 +194,7 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
</div>
|
||||
|
||||
{/* Quick Actions */}
|
||||
<div style={{
|
||||
<div style={{
|
||||
marginTop: '12px',
|
||||
padding: '12px',
|
||||
backgroundColor: `${errorColor}10`,
|
||||
@@ -252,11 +253,11 @@ export const ErrorDisplay: React.FC<ErrorDisplayProps> = ({
|
||||
{/* Hidden textarea for editing if needed */}
|
||||
{!readOnly && onChange && (
|
||||
<textarea
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={{ display: 'none' }}
|
||||
value={value}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { Button, toast } from '@payloadcms/ui'
|
||||
import { useState } from 'react'
|
||||
|
||||
interface TriggerWorkflowButtonProps {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
triggerSlug?: string
|
||||
}
|
||||
|
||||
export const TriggerWorkflowButton: React.FC<TriggerWorkflowButtonProps> = ({
|
||||
workflowId,
|
||||
workflowName,
|
||||
triggerSlug = 'manual-trigger'
|
||||
}) => {
|
||||
const [loading, setLoading] = useState(false)
|
||||
|
||||
const handleTrigger = async () => {
|
||||
setLoading(true)
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/workflows/trigger-custom', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workflowId,
|
||||
triggerSlug,
|
||||
data: {
|
||||
triggeredAt: new Date().toISOString(),
|
||||
source: 'admin-button'
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.message || 'Failed to trigger workflow')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
toast.success(`Workflow "${workflowName}" triggered successfully! Run ID: ${result.runId}`)
|
||||
} catch (error) {
|
||||
console.error('Error triggering workflow:', error)
|
||||
toast.error(`Failed to trigger workflow: ${error instanceof Error ? error.message : 'Unknown error'}`)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Button
|
||||
onClick={handleTrigger}
|
||||
disabled={loading}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
{loading ? 'Triggering...' : 'Trigger Workflow'}
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import React, { useState, useEffect } from 'react'
|
||||
import { Button } from '@payloadcms/ui'
|
||||
|
||||
interface WorkflowRun {
|
||||
id: string
|
||||
status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'
|
||||
startedAt: string
|
||||
completedAt?: string
|
||||
error?: string
|
||||
triggeredBy: string
|
||||
}
|
||||
|
||||
interface WorkflowExecutionStatusProps {
|
||||
workflowId: string | number
|
||||
}
|
||||
|
||||
export const WorkflowExecutionStatus: React.FC<WorkflowExecutionStatusProps> = ({ workflowId }) => {
|
||||
const [runs, setRuns] = useState<WorkflowRun[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const fetchRecentRuns = async () => {
|
||||
try {
|
||||
const response = await fetch(`/api/workflow-runs?where[workflow][equals]=${workflowId}&limit=5&sort=-startedAt`)
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setRuns(data.docs || [])
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to fetch workflow runs:', error)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
fetchRecentRuns()
|
||||
}, [workflowId])
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ padding: '16px', color: '#6B7280' }}>
|
||||
Loading execution history...
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (runs.length === 0) {
|
||||
return (
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
backgroundColor: '#F9FAFB',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '8px',
|
||||
color: '#6B7280',
|
||||
textAlign: 'center'
|
||||
}}>
|
||||
📋 No execution history yet
|
||||
<br />
|
||||
<small>This workflow hasn't been triggered yet.</small>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const getStatusIcon = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending': return '⏳'
|
||||
case 'running': return '🔄'
|
||||
case 'completed': return '✅'
|
||||
case 'failed': return '❌'
|
||||
case 'cancelled': return '⏹️'
|
||||
default: return '❓'
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusColor = (status: string) => {
|
||||
switch (status) {
|
||||
case 'pending': return '#6B7280'
|
||||
case 'running': return '#3B82F6'
|
||||
case 'completed': return '#10B981'
|
||||
case 'failed': return '#EF4444'
|
||||
case 'cancelled': return '#F59E0B'
|
||||
default: return '#6B7280'
|
||||
}
|
||||
}
|
||||
|
||||
const formatDate = (dateString: string) => {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffMs = now.getTime() - date.getTime()
|
||||
|
||||
if (diffMs < 60000) { // Less than 1 minute
|
||||
return 'Just now'
|
||||
} else if (diffMs < 3600000) { // Less than 1 hour
|
||||
return `${Math.floor(diffMs / 60000)} min ago`
|
||||
} else if (diffMs < 86400000) { // Less than 1 day
|
||||
return `${Math.floor(diffMs / 3600000)} hrs ago`
|
||||
} else {
|
||||
return date.toLocaleDateString()
|
||||
}
|
||||
}
|
||||
|
||||
const getDuration = (startedAt: string, completedAt?: string) => {
|
||||
const start = new Date(startedAt)
|
||||
const end = completedAt ? new Date(completedAt) : new Date()
|
||||
const diffMs = end.getTime() - start.getTime()
|
||||
|
||||
if (diffMs < 1000) return '<1s'
|
||||
if (diffMs < 60000) return `${Math.floor(diffMs / 1000)}s`
|
||||
if (diffMs < 3600000) return `${Math.floor(diffMs / 60000)}m ${Math.floor((diffMs % 60000) / 1000)}s`
|
||||
return `${Math.floor(diffMs / 3600000)}h ${Math.floor((diffMs % 3600000) / 60000)}m`
|
||||
}
|
||||
|
||||
const recentRun = runs[0]
|
||||
const recentStatus = getStatusIcon(recentRun.status)
|
||||
const recentColor = getStatusColor(recentRun.status)
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '8px',
|
||||
backgroundColor: '#FAFAFA'
|
||||
}}>
|
||||
{/* Summary Header */}
|
||||
<div style={{
|
||||
padding: '16px',
|
||||
borderBottom: expanded ? '1px solid #E5E7EB' : 'none',
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '12px' }}>
|
||||
<span style={{ fontSize: '20px' }}>{recentStatus}</span>
|
||||
<div>
|
||||
<div style={{ fontWeight: '600', color: recentColor }}>
|
||||
Last run: {recentRun.status}
|
||||
</div>
|
||||
<div style={{ fontSize: '13px', color: '#6B7280' }}>
|
||||
{formatDate(recentRun.startedAt)} • Duration: {getDuration(recentRun.startedAt, recentRun.completedAt)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
{expanded ? 'Hide' : 'Show'} History ({runs.length})
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Detailed History */}
|
||||
{expanded && (
|
||||
<div style={{ padding: '16px' }}>
|
||||
<h4 style={{ margin: '0 0 12px 0', fontSize: '14px', fontWeight: '600' }}>
|
||||
Recent Executions
|
||||
</h4>
|
||||
|
||||
{runs.map((run, index) => (
|
||||
<div
|
||||
key={run.id}
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
padding: '8px 12px',
|
||||
marginBottom: index < runs.length - 1 ? '8px' : '0',
|
||||
backgroundColor: 'white',
|
||||
border: '1px solid #E5E7EB',
|
||||
borderRadius: '6px'
|
||||
}}
|
||||
>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '10px' }}>
|
||||
<span style={{ fontSize: '16px' }}>
|
||||
{getStatusIcon(run.status)}
|
||||
</span>
|
||||
|
||||
<div>
|
||||
<div style={{
|
||||
fontSize: '13px',
|
||||
fontWeight: '500',
|
||||
color: getStatusColor(run.status)
|
||||
}}>
|
||||
{run.status.charAt(0).toUpperCase() + run.status.slice(1)}
|
||||
</div>
|
||||
<div style={{ fontSize: '12px', color: '#6B7280' }}>
|
||||
{formatDate(run.startedAt)} • {run.triggeredBy}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style={{
|
||||
fontSize: '12px',
|
||||
color: '#6B7280',
|
||||
textAlign: 'right'
|
||||
}}>
|
||||
<div>
|
||||
{getDuration(run.startedAt, run.completedAt)}
|
||||
</div>
|
||||
{run.error && (
|
||||
<div style={{ color: '#EF4444', marginTop: '2px' }}>
|
||||
Error
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div style={{
|
||||
marginTop: '12px',
|
||||
textAlign: 'center'
|
||||
}}>
|
||||
<Button
|
||||
onClick={() => {
|
||||
// Navigate to workflow runs filtered by this workflow
|
||||
window.location.href = `/admin/collections/workflow-runs?where[workflow][equals]=${workflowId}`
|
||||
}}
|
||||
size="small"
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
View All Runs →
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -5,20 +5,25 @@ import type { Payload, PayloadRequest } from 'payload'
|
||||
export type PayloadWorkflow = {
|
||||
id: number
|
||||
name: string
|
||||
description?: string | null
|
||||
description?: null | string
|
||||
triggers?: Array<{
|
||||
type?: string | null
|
||||
collectionSlug?: string | null
|
||||
operation?: string | null
|
||||
condition?: string | null
|
||||
type?: null | string
|
||||
condition?: null | string
|
||||
parameters?: {
|
||||
collectionSlug?: null | string
|
||||
operation?: null | string
|
||||
global?: null | string
|
||||
globalOperation?: null | string
|
||||
[key: string]: unknown
|
||||
} | null
|
||||
[key: string]: unknown
|
||||
}> | null
|
||||
steps?: Array<{
|
||||
step?: string | null
|
||||
name?: string | null
|
||||
step?: null | string
|
||||
name?: null | string
|
||||
input?: unknown
|
||||
dependencies?: string[] | null
|
||||
condition?: string | null
|
||||
dependencies?: null | string[]
|
||||
condition?: null | string
|
||||
[key: string]: unknown
|
||||
}> | null
|
||||
[key: string]: unknown
|
||||
@@ -27,39 +32,18 @@ export type PayloadWorkflow = {
|
||||
import { JSONPath } from 'jsonpath-plus'
|
||||
|
||||
// Helper type to extract workflow step data from the generated types
|
||||
export type WorkflowStep = NonNullable<PayloadWorkflow['steps']>[0] & {
|
||||
export type WorkflowStep = {
|
||||
name: string // Ensure name is always present for our execution logic
|
||||
}
|
||||
} & NonNullable<PayloadWorkflow['steps']>[0]
|
||||
|
||||
// Helper type to extract workflow trigger data from the generated types
|
||||
export type WorkflowTrigger = NonNullable<PayloadWorkflow['triggers']>[0] & {
|
||||
// Helper type to extract workflow trigger data from the generated types
|
||||
export type WorkflowTrigger = {
|
||||
type: string // Ensure type is always present for our execution logic
|
||||
}
|
||||
} & NonNullable<PayloadWorkflow['triggers']>[0]
|
||||
|
||||
export interface ExecutionContext {
|
||||
steps: Record<string, {
|
||||
error?: string
|
||||
input: unknown
|
||||
output: unknown
|
||||
state: 'failed' | 'pending' | 'running' | 'succeeded'
|
||||
}>
|
||||
trigger: {
|
||||
collection?: string
|
||||
data?: unknown
|
||||
doc?: unknown
|
||||
headers?: Record<string, string>
|
||||
operation?: string
|
||||
path?: string
|
||||
previousDoc?: unknown
|
||||
req?: PayloadRequest
|
||||
triggeredAt?: string
|
||||
type: string
|
||||
user?: {
|
||||
collection?: string
|
||||
email?: string
|
||||
id?: string
|
||||
}
|
||||
}
|
||||
steps: Record<string, any>
|
||||
trigger: Record<string, any>
|
||||
}
|
||||
|
||||
export class WorkflowExecutor {
|
||||
@@ -68,6 +52,25 @@ export class WorkflowExecutor {
|
||||
private logger: Payload['logger']
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Classifies error types based on error messages
|
||||
*/
|
||||
private classifyErrorType(errorMessage: string): string {
|
||||
if (errorMessage.includes('timeout') || errorMessage.includes('ETIMEDOUT')) {
|
||||
return 'timeout'
|
||||
}
|
||||
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
|
||||
return 'dns'
|
||||
}
|
||||
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('ECONNRESET')) {
|
||||
return 'connection'
|
||||
}
|
||||
if (errorMessage.includes('network') || errorMessage.includes('fetch')) {
|
||||
return 'network'
|
||||
}
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a step condition using JSONPath
|
||||
*/
|
||||
@@ -159,20 +162,32 @@ export class WorkflowExecutor {
|
||||
}
|
||||
|
||||
// Move taskSlug declaration outside try block so it's accessible in catch
|
||||
const taskSlug = step.step // Use the 'step' field for task type
|
||||
const taskSlug = step.type as string
|
||||
|
||||
try {
|
||||
// Extract input data from step - PayloadCMS flattens inputSchema fields to step level
|
||||
const inputFields: Record<string, unknown> = {}
|
||||
|
||||
|
||||
// Get all fields except the core step fields
|
||||
const coreFields = ['step', 'name', 'dependencies', 'condition']
|
||||
const coreFields = ['step', 'name', 'dependencies', 'condition', 'type', 'id', 'parameters']
|
||||
for (const [key, value] of Object.entries(step)) {
|
||||
if (!coreFields.includes(key)) {
|
||||
inputFields[key] = value
|
||||
// Handle flattened parameters (remove 'parameter' prefix)
|
||||
if (key.startsWith('parameter')) {
|
||||
const cleanKey = key.replace('parameter', '')
|
||||
const properKey = cleanKey.charAt(0).toLowerCase() + cleanKey.slice(1)
|
||||
inputFields[properKey] = value
|
||||
} else {
|
||||
inputFields[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Also extract from nested parameters object if it exists
|
||||
if (step.parameters && typeof step.parameters === 'object') {
|
||||
Object.assign(inputFields, step.parameters)
|
||||
}
|
||||
|
||||
// Resolve input data using JSONPath
|
||||
const resolvedInput = this.resolveStepInput(inputFields, context)
|
||||
context.steps[stepName].input = resolvedInput
|
||||
@@ -200,8 +215,8 @@ export class WorkflowExecutor {
|
||||
id: job.id,
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
|
||||
this.logger.info({
|
||||
jobId: job.id,
|
||||
runResult: runResults,
|
||||
hasResult: !!runResults
|
||||
@@ -246,7 +261,7 @@ export class WorkflowExecutor {
|
||||
if (!errorMessage && taskStatus?.output?.error) {
|
||||
errorMessage = taskStatus.output.error
|
||||
}
|
||||
|
||||
|
||||
// Check if task handler returned with state='failed'
|
||||
if (!errorMessage && taskStatus?.state === 'failed') {
|
||||
errorMessage = 'Task handler returned a failed state'
|
||||
@@ -307,7 +322,7 @@ export class WorkflowExecutor {
|
||||
const errorDetails = this.extractErrorDetailsFromJob(completedJob, context.steps[stepName], stepName)
|
||||
if (errorDetails) {
|
||||
context.steps[stepName].errorDetails = errorDetails
|
||||
|
||||
|
||||
this.logger.info({
|
||||
stepName,
|
||||
errorType: errorDetails.errorType,
|
||||
@@ -370,6 +385,95 @@ export class WorkflowExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts detailed error information from job logs and input
|
||||
*/
|
||||
private extractErrorDetailsFromJob(job: any, stepContext: any, stepName: string) {
|
||||
try {
|
||||
// Get error information from multiple sources
|
||||
const input = stepContext.input || {}
|
||||
const logs = job.log || []
|
||||
const latestLog = logs[logs.length - 1]
|
||||
|
||||
// Extract error message from job error or log
|
||||
const errorMessage = job.error?.message || latestLog?.error?.message || 'Unknown error'
|
||||
|
||||
// For timeout scenarios, check if it's a timeout based on duration and timeout setting
|
||||
let errorType = this.classifyErrorType(errorMessage)
|
||||
|
||||
// Special handling for HTTP timeouts - if task failed and duration exceeds timeout, it's likely a timeout
|
||||
if (errorType === 'unknown' && input.timeout && stepContext.executionInfo?.duration) {
|
||||
const timeoutMs = parseInt(input.timeout) || 30000
|
||||
const actualDuration = stepContext.executionInfo.duration
|
||||
|
||||
// If execution duration is close to or exceeds timeout, classify as timeout
|
||||
if (actualDuration >= (timeoutMs * 0.9)) { // 90% of timeout threshold
|
||||
errorType = 'timeout'
|
||||
this.logger.debug({
|
||||
timeoutMs,
|
||||
actualDuration,
|
||||
stepName
|
||||
}, 'Classified error as timeout based on duration analysis')
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate duration from execution info if available
|
||||
const duration = stepContext.executionInfo?.duration || 0
|
||||
|
||||
// Extract attempt count from logs
|
||||
const attempts = job.totalTried || 1
|
||||
|
||||
return {
|
||||
stepId: `${stepName}-${Date.now()}`,
|
||||
errorType,
|
||||
duration,
|
||||
attempts,
|
||||
finalError: errorMessage,
|
||||
context: {
|
||||
url: input.url,
|
||||
method: input.method,
|
||||
timeout: input.timeout,
|
||||
statusCode: latestLog?.output?.status,
|
||||
headers: input.headers
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stepName
|
||||
}, 'Failed to extract error details from job')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a condition value (string literal, number, boolean, or JSONPath)
|
||||
*/
|
||||
private parseConditionValue(expr: string, context: ExecutionContext): any {
|
||||
// Handle string literals
|
||||
if ((expr.startsWith('"') && expr.endsWith('"')) || (expr.startsWith("'") && expr.endsWith("'"))) {
|
||||
return expr.slice(1, -1) // Remove quotes
|
||||
}
|
||||
|
||||
// Handle boolean literals
|
||||
if (expr === 'true') {return true}
|
||||
if (expr === 'false') {return false}
|
||||
|
||||
// Handle number literals
|
||||
if (/^-?\d+(?:\.\d+)?$/.test(expr)) {
|
||||
return Number(expr)
|
||||
}
|
||||
|
||||
// Handle JSONPath expressions
|
||||
if (expr.startsWith('$')) {
|
||||
return this.resolveJSONPathValue(expr, context)
|
||||
}
|
||||
|
||||
// Return as string if nothing else matches
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve step execution order based on dependencies
|
||||
*/
|
||||
@@ -427,6 +531,22 @@ export class WorkflowExecutor {
|
||||
return executionBatches
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a JSONPath value from the context
|
||||
*/
|
||||
private resolveJSONPathValue(expr: string, context: ExecutionContext): any {
|
||||
if (expr.startsWith('$')) {
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: expr,
|
||||
wrap: false
|
||||
})
|
||||
// Return first result if array, otherwise the result itself
|
||||
return Array.isArray(result) && result.length > 0 ? result[0] : result
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve step input using JSONPath expressions
|
||||
*/
|
||||
@@ -456,14 +576,14 @@ export class WorkflowExecutor {
|
||||
path: value,
|
||||
wrap: false
|
||||
})
|
||||
|
||||
|
||||
this.logger.debug({
|
||||
key,
|
||||
jsonPath: value,
|
||||
result: JSON.stringify(result).substring(0, 200),
|
||||
resultType: Array.isArray(result) ? 'array' : typeof result
|
||||
}, 'JSONPath resolved successfully')
|
||||
|
||||
|
||||
resolved[key] = result
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
@@ -480,7 +600,7 @@ export class WorkflowExecutor {
|
||||
key,
|
||||
nestedKeys: Object.keys(value as Record<string, unknown>)
|
||||
}, 'Recursively resolving nested object')
|
||||
|
||||
|
||||
resolved[key] = this.resolveStepInput(value as Record<string, unknown>, context)
|
||||
} else {
|
||||
// Keep literal values as-is
|
||||
@@ -501,22 +621,22 @@ export class WorkflowExecutor {
|
||||
*/
|
||||
private safeSerialize(obj: unknown): unknown {
|
||||
const seen = new WeakSet()
|
||||
|
||||
|
||||
const serialize = (value: unknown): unknown => {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return value
|
||||
}
|
||||
|
||||
if (seen.has(value as object)) {
|
||||
|
||||
if (seen.has(value)) {
|
||||
return '[Circular Reference]'
|
||||
}
|
||||
|
||||
seen.add(value as object)
|
||||
|
||||
|
||||
seen.add(value)
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(serialize)
|
||||
}
|
||||
|
||||
|
||||
const result: Record<string, unknown> = {}
|
||||
for (const [key, val] of Object.entries(value as Record<string, unknown>)) {
|
||||
try {
|
||||
@@ -530,94 +650,13 @@ export class WorkflowExecutor {
|
||||
result[key] = '[Non-serializable]'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
return serialize(obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts detailed error information from job logs and input
|
||||
*/
|
||||
private extractErrorDetailsFromJob(job: any, stepContext: any, stepName: string) {
|
||||
try {
|
||||
// Get error information from multiple sources
|
||||
const input = stepContext.input || {}
|
||||
const logs = job.log || []
|
||||
const latestLog = logs[logs.length - 1]
|
||||
|
||||
// Extract error message from job error or log
|
||||
const errorMessage = job.error?.message || latestLog?.error?.message || 'Unknown error'
|
||||
|
||||
// For timeout scenarios, check if it's a timeout based on duration and timeout setting
|
||||
let errorType = this.classifyErrorType(errorMessage)
|
||||
|
||||
// Special handling for HTTP timeouts - if task failed and duration exceeds timeout, it's likely a timeout
|
||||
if (errorType === 'unknown' && input.timeout && stepContext.executionInfo?.duration) {
|
||||
const timeoutMs = parseInt(input.timeout) || 30000
|
||||
const actualDuration = stepContext.executionInfo.duration
|
||||
|
||||
// If execution duration is close to or exceeds timeout, classify as timeout
|
||||
if (actualDuration >= (timeoutMs * 0.9)) { // 90% of timeout threshold
|
||||
errorType = 'timeout'
|
||||
this.logger.debug({
|
||||
timeoutMs,
|
||||
actualDuration,
|
||||
stepName
|
||||
}, 'Classified error as timeout based on duration analysis')
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate duration from execution info if available
|
||||
const duration = stepContext.executionInfo?.duration || 0
|
||||
|
||||
// Extract attempt count from logs
|
||||
const attempts = job.totalTried || 1
|
||||
|
||||
return {
|
||||
stepId: `${stepName}-${Date.now()}`,
|
||||
errorType,
|
||||
duration,
|
||||
attempts,
|
||||
finalError: errorMessage,
|
||||
context: {
|
||||
url: input.url,
|
||||
method: input.method,
|
||||
timeout: input.timeout,
|
||||
statusCode: latestLog?.output?.status,
|
||||
headers: input.headers
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stepName
|
||||
}, 'Failed to extract error details from job')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Classifies error types based on error messages
|
||||
*/
|
||||
private classifyErrorType(errorMessage: string): string {
|
||||
if (errorMessage.includes('timeout') || errorMessage.includes('ETIMEDOUT')) {
|
||||
return 'timeout'
|
||||
}
|
||||
if (errorMessage.includes('ENOTFOUND') || errorMessage.includes('getaddrinfo')) {
|
||||
return 'dns'
|
||||
}
|
||||
if (errorMessage.includes('ECONNREFUSED') || errorMessage.includes('ECONNRESET')) {
|
||||
return 'connection'
|
||||
}
|
||||
if (errorMessage.includes('network') || errorMessage.includes('fetch')) {
|
||||
return 'network'
|
||||
}
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow run with current context
|
||||
*/
|
||||
@@ -665,16 +704,16 @@ export class WorkflowExecutor {
|
||||
try {
|
||||
// Check if this is a comparison expression
|
||||
const comparisonMatch = condition.match(/^(.+?)\s*(==|!=|>|<|>=|<=)\s*(.+)$/)
|
||||
|
||||
|
||||
if (comparisonMatch) {
|
||||
const [, leftExpr, operator, rightExpr] = comparisonMatch
|
||||
|
||||
|
||||
// Evaluate left side (should be JSONPath)
|
||||
const leftValue = this.resolveJSONPathValue(leftExpr.trim(), context)
|
||||
|
||||
|
||||
// Parse right side (could be string, number, boolean, or JSONPath)
|
||||
const rightValue = this.parseConditionValue(rightExpr.trim(), context)
|
||||
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
leftExpr: leftExpr.trim(),
|
||||
@@ -685,32 +724,32 @@ export class WorkflowExecutor {
|
||||
leftType: typeof leftValue,
|
||||
rightType: typeof rightValue
|
||||
}, 'Evaluating comparison condition')
|
||||
|
||||
|
||||
// Perform comparison
|
||||
let result: boolean
|
||||
switch (operator) {
|
||||
case '==':
|
||||
result = leftValue === rightValue
|
||||
break
|
||||
case '!=':
|
||||
result = leftValue !== rightValue
|
||||
break
|
||||
case '>':
|
||||
result = Number(leftValue) > Number(rightValue)
|
||||
break
|
||||
case '<':
|
||||
result = Number(leftValue) < Number(rightValue)
|
||||
break
|
||||
case '>=':
|
||||
result = Number(leftValue) >= Number(rightValue)
|
||||
break
|
||||
case '<=':
|
||||
result = Number(leftValue) <= Number(rightValue)
|
||||
break
|
||||
case '==':
|
||||
result = leftValue === rightValue
|
||||
break
|
||||
case '>':
|
||||
result = Number(leftValue) > Number(rightValue)
|
||||
break
|
||||
case '>=':
|
||||
result = Number(leftValue) >= Number(rightValue)
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown comparison operator: ${operator}`)
|
||||
}
|
||||
|
||||
|
||||
this.logger.debug({
|
||||
condition,
|
||||
result,
|
||||
@@ -718,7 +757,7 @@ export class WorkflowExecutor {
|
||||
rightValue,
|
||||
operator
|
||||
}, 'Comparison condition evaluation completed')
|
||||
|
||||
|
||||
return result
|
||||
} else {
|
||||
// Treat as simple JSONPath boolean evaluation
|
||||
@@ -762,49 +801,6 @@ export class WorkflowExecutor {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a JSONPath value from the context
|
||||
*/
|
||||
private resolveJSONPathValue(expr: string, context: ExecutionContext): any {
|
||||
if (expr.startsWith('$')) {
|
||||
const result = JSONPath({
|
||||
json: context,
|
||||
path: expr,
|
||||
wrap: false
|
||||
})
|
||||
// Return first result if array, otherwise the result itself
|
||||
return Array.isArray(result) && result.length > 0 ? result[0] : result
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a condition value (string literal, number, boolean, or JSONPath)
|
||||
*/
|
||||
private parseConditionValue(expr: string, context: ExecutionContext): any {
|
||||
// Handle string literals
|
||||
if ((expr.startsWith('"') && expr.endsWith('"')) || (expr.startsWith("'") && expr.endsWith("'"))) {
|
||||
return expr.slice(1, -1) // Remove quotes
|
||||
}
|
||||
|
||||
// Handle boolean literals
|
||||
if (expr === 'true') return true
|
||||
if (expr === 'false') return false
|
||||
|
||||
// Handle number literals
|
||||
if (/^-?\d+(\.\d+)?$/.test(expr)) {
|
||||
return Number(expr)
|
||||
}
|
||||
|
||||
// Handle JSONPath expressions
|
||||
if (expr.startsWith('$')) {
|
||||
return this.resolveJSONPathValue(expr, context)
|
||||
}
|
||||
|
||||
// Return as string if nothing else matches
|
||||
return expr
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a workflow with the given context
|
||||
@@ -947,160 +943,4 @@ export class WorkflowExecutor {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and execute workflows triggered by a collection operation
|
||||
*/
|
||||
async executeTriggeredWorkflows(
|
||||
collection: string,
|
||||
operation: 'create' | 'delete' | 'read' | 'update',
|
||||
doc: unknown,
|
||||
previousDoc: unknown,
|
||||
req: PayloadRequest
|
||||
): Promise<void> {
|
||||
console.log('🚨 EXECUTOR: executeTriggeredWorkflows called!')
|
||||
console.log('🚨 EXECUTOR: Collection =', collection)
|
||||
console.log('🚨 EXECUTOR: Operation =', operation)
|
||||
console.log('🚨 EXECUTOR: Doc ID =', (doc as any)?.id)
|
||||
console.log('🚨 EXECUTOR: Has payload?', !!this.payload)
|
||||
console.log('🚨 EXECUTOR: Has logger?', !!this.logger)
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
operation,
|
||||
docId: (doc as any)?.id
|
||||
}, 'executeTriggeredWorkflows called')
|
||||
|
||||
try {
|
||||
// Find workflows with matching triggers
|
||||
const workflows = await this.payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2, // Include steps and triggers
|
||||
limit: 100,
|
||||
req
|
||||
})
|
||||
|
||||
this.logger.info({
|
||||
workflowCount: workflows.docs.length
|
||||
}, 'Found workflows to check')
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
// Check if this workflow has a matching trigger
|
||||
const triggers = workflow.triggers as Array<{
|
||||
collection?: string
|
||||
collectionSlug?: string
|
||||
condition?: string
|
||||
operation: string
|
||||
type: string
|
||||
}>
|
||||
|
||||
this.logger.debug({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
triggerCount: triggers?.length || 0,
|
||||
triggers: triggers?.map(t => ({
|
||||
type: t.type,
|
||||
collection: t.collection,
|
||||
collectionSlug: t.collectionSlug,
|
||||
operation: t.operation
|
||||
}))
|
||||
}, 'Checking workflow triggers')
|
||||
|
||||
const matchingTriggers = triggers?.filter(trigger =>
|
||||
trigger.type === 'collection-trigger' &&
|
||||
(trigger.collection === collection || trigger.collectionSlug === collection) &&
|
||||
trigger.operation === operation
|
||||
) || []
|
||||
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
matchingTriggerCount: matchingTriggers.length,
|
||||
targetCollection: collection,
|
||||
targetOperation: operation
|
||||
}, 'Matching triggers found')
|
||||
|
||||
for (const trigger of matchingTriggers) {
|
||||
this.logger.info({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
triggerDetails: {
|
||||
type: trigger.type,
|
||||
collection: trigger.collection,
|
||||
collectionSlug: trigger.collectionSlug,
|
||||
operation: trigger.operation,
|
||||
hasCondition: !!trigger.condition
|
||||
}
|
||||
}, 'Processing matching trigger - about to execute workflow')
|
||||
|
||||
// Create execution context for condition evaluation
|
||||
const context: ExecutionContext = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'collection',
|
||||
collection,
|
||||
doc,
|
||||
operation,
|
||||
previousDoc,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
// Check trigger condition if present
|
||||
if (trigger.condition) {
|
||||
this.logger.debug({
|
||||
collection,
|
||||
operation,
|
||||
condition: trigger.condition,
|
||||
docId: (doc as any)?.id,
|
||||
docFields: doc ? Object.keys(doc) : [],
|
||||
previousDocId: (previousDoc as any)?.id,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Evaluating collection trigger condition')
|
||||
|
||||
const conditionMet = this.evaluateCondition(trigger.condition, context)
|
||||
|
||||
if (!conditionMet) {
|
||||
this.logger.info({
|
||||
collection,
|
||||
condition: trigger.condition,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
docSnapshot: JSON.stringify(doc).substring(0, 200)
|
||||
}, 'Trigger condition not met, skipping workflow')
|
||||
continue
|
||||
}
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
condition: trigger.condition,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
docSnapshot: JSON.stringify(doc).substring(0, 200)
|
||||
}, 'Trigger condition met')
|
||||
}
|
||||
|
||||
this.logger.info({
|
||||
collection,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Triggering workflow')
|
||||
|
||||
// Execute the workflow
|
||||
await this.execute(workflow as PayloadWorkflow, context, req)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error({ error: error instanceof Error ? error.message : 'Unknown error' }, 'Workflow execution failed')
|
||||
this.logger.error({
|
||||
collection,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
operation
|
||||
}, 'Failed to execute triggered workflows')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
// Client-side components that may have CSS imports or PayloadCMS UI dependencies
|
||||
// These are separated to avoid CSS import errors during Node.js type generation
|
||||
|
||||
export { TriggerWorkflowButton } from '../components/TriggerWorkflowButton.js'
|
||||
export { StatusCell } from '../components/StatusCell.js'
|
||||
export { ErrorDisplay } from '../components/ErrorDisplay.js'
|
||||
export { WorkflowExecutionStatus } from '../components/WorkflowExecutionStatus.js'
|
||||
|
||||
// Future client components can be added here:
|
||||
// export { default as WorkflowDashboard } from '../components/WorkflowDashboard/index.js'
|
||||
|
||||
32
src/fields/parameter.ts
Normal file
32
src/fields/parameter.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import type {Field} from "payload"
|
||||
|
||||
|
||||
export const parameter = (slug: string, field: {name: string} & Field): Field => ({
|
||||
...field,
|
||||
name: 'parameter' + field.name.replace(/^\w/, c => c.toUpperCase()) + Math.random().toString().replace(/\D/g, ''),
|
||||
admin: {
|
||||
...(field.admin as unknown || {}),
|
||||
condition: (_, siblingData, __) => {
|
||||
const previous = field.admin?.condition?.call(null, _, siblingData, __)
|
||||
return (previous === undefined || previous) && (siblingData?.type === slug)
|
||||
},
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
({ siblingData }) => {
|
||||
const parameters = siblingData?.parameters || {}
|
||||
return parameters[field.name]
|
||||
}
|
||||
],
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (!siblingData.parameters) {
|
||||
siblingData.parameters = {}
|
||||
}
|
||||
siblingData.parameters[field.name] = value
|
||||
return undefined // Virtual field, don't store directly
|
||||
}
|
||||
]
|
||||
},
|
||||
virtual: true,
|
||||
} as Field)
|
||||
16
src/index.ts
16
src/index.ts
@@ -1,20 +1,20 @@
|
||||
// Main export contains only types and client-safe utilities
|
||||
// Server-side functions are exported via '@xtr-dev/payload-automation/server'
|
||||
|
||||
// Pure types only - completely safe for client bundling
|
||||
export type {
|
||||
CustomTriggerOptions,
|
||||
TriggerResult,
|
||||
ExecutionContext,
|
||||
WorkflowsPluginConfig
|
||||
} from './types/index.js'
|
||||
|
||||
export type {
|
||||
PayloadWorkflow as Workflow,
|
||||
WorkflowStep,
|
||||
WorkflowTrigger
|
||||
} from './core/workflow-executor.js'
|
||||
|
||||
// Pure types only - completely safe for client bundling
|
||||
export type {
|
||||
CustomTriggerOptions,
|
||||
ExecutionContext,
|
||||
TriggerResult,
|
||||
WorkflowsPluginConfig
|
||||
} from './types/index.js'
|
||||
|
||||
// Server-side functions are NOT re-exported here to avoid bundling issues
|
||||
// Import server-side functions from the /server export instead
|
||||
|
||||
|
||||
60
src/plugin/collection-hook.ts
Normal file
60
src/plugin/collection-hook.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import {WorkflowExecutor} from "../core/workflow-executor.js"
|
||||
|
||||
export const createCollectionTriggerHook = (collectionSlug: string, hookType: string) => {
|
||||
return async (args: any) => {
|
||||
const req = 'req' in args ? args.req :
|
||||
'args' in args ? args.args.req :
|
||||
undefined
|
||||
if (!req) {
|
||||
throw new Error('No request object found in hook arguments')
|
||||
}
|
||||
const payload = req.payload
|
||||
const {docs: workflows} = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
where: {
|
||||
'triggers.parameters.collectionSlug': {
|
||||
equals: collectionSlug
|
||||
},
|
||||
'triggers.parameters.hook': {
|
||||
equals: hookType
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'collection-hook'
|
||||
}
|
||||
}
|
||||
})
|
||||
const executor = new WorkflowExecutor(payload, payload.logger)
|
||||
// invoke each workflow
|
||||
for (const workflow of workflows) {
|
||||
// Create execution context
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
...args,
|
||||
type: 'collection',
|
||||
collection: collectionSlug,
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
await executor.execute(workflow as any, context, req)
|
||||
payload.logger.info({
|
||||
workflowId: workflow.id,
|
||||
collection: collectionSlug,
|
||||
hookType
|
||||
}, 'Workflow executed successfully')
|
||||
} catch (error) {
|
||||
payload.logger.error({
|
||||
workflowId: workflow.id,
|
||||
collection: collectionSlug,
|
||||
hookType,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Workflow execution failed')
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,25 +1,21 @@
|
||||
import type {Field, TaskConfig} from "payload"
|
||||
import type {CollectionConfig, GlobalConfig, TaskConfig} from "payload"
|
||||
|
||||
export type CollectionTriggerConfigCrud = {
|
||||
create?: true
|
||||
delete?: true
|
||||
read?: true
|
||||
update?: true
|
||||
}
|
||||
import type {Trigger} from "../triggers/types.js"
|
||||
|
||||
export type CollectionTriggerConfig = CollectionTriggerConfigCrud | true
|
||||
export type TriggerConfig = (config: WorkflowsPluginConfig) => Trigger
|
||||
|
||||
export type CustomTriggerConfig = {
|
||||
inputs?: Field[]
|
||||
slug: string,
|
||||
}
|
||||
|
||||
export type WorkflowsPluginConfig<TSlug extends string> = {
|
||||
collectionTriggers: {
|
||||
[key in TSlug]?: CollectionTriggerConfig
|
||||
export type WorkflowsPluginConfig<TSlug extends string = string, TGlobal extends string = string> = {
|
||||
collectionTriggers?: {
|
||||
[key in TSlug]?: {
|
||||
[key in keyof CollectionConfig['hooks']]?: true
|
||||
} | true
|
||||
}
|
||||
globalTriggers?: {
|
||||
[key in TGlobal]?: {
|
||||
[key in keyof GlobalConfig['hooks']]?: true
|
||||
} | true
|
||||
}
|
||||
enabled?: boolean
|
||||
steps: TaskConfig<string>[],
|
||||
triggers?: CustomTriggerConfig[]
|
||||
webhookPrefix?: string
|
||||
steps: TaskConfig<string>[]
|
||||
triggers?: TriggerConfig[]
|
||||
}
|
||||
|
||||
@@ -1,633 +0,0 @@
|
||||
import type {Config, Payload, TaskConfig} from 'payload'
|
||||
|
||||
import cron from 'node-cron'
|
||||
|
||||
import {type PayloadWorkflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {getConfigLogger} from './logger.js'
|
||||
|
||||
/**
|
||||
* Generate dynamic cron tasks for all workflows with cron triggers
|
||||
* This is called at config time to register all scheduled tasks
|
||||
*/
|
||||
export function generateCronTasks(config: Config): void {
|
||||
const logger = getConfigLogger()
|
||||
|
||||
// Note: We can't query the database at config time, so we'll need a different approach
|
||||
// We'll create a single task that handles all cron-triggered workflows
|
||||
const cronTask: TaskConfig = {
|
||||
slug: 'workflow-cron-executor',
|
||||
handler: async ({ input, req }) => {
|
||||
const { cronExpression, timezone, workflowId } = input as {
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
workflowId: string
|
||||
}
|
||||
|
||||
const logger = req.payload.logger.child({ plugin: '@xtr-dev/payload-automation' })
|
||||
|
||||
try {
|
||||
// Get the workflow
|
||||
const workflow = await req.payload.findByID({
|
||||
id: workflowId,
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
req
|
||||
})
|
||||
|
||||
if (!workflow) {
|
||||
throw new Error(`Workflow ${workflowId} not found`)
|
||||
}
|
||||
|
||||
// Create execution context for cron trigger
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'cron',
|
||||
req,
|
||||
triggeredAt: new Date().toISOString()
|
||||
}
|
||||
}
|
||||
|
||||
// Create executor
|
||||
const executor = new WorkflowExecutor(req.payload, logger)
|
||||
|
||||
// Find the matching cron trigger and check its condition if present
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
type: string
|
||||
}>
|
||||
|
||||
const matchingTrigger = triggers?.find(trigger =>
|
||||
trigger.type === 'cron-trigger' &&
|
||||
trigger.cronExpression === cronExpression
|
||||
)
|
||||
|
||||
// Check trigger condition if present
|
||||
if (matchingTrigger?.condition) {
|
||||
const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context)
|
||||
|
||||
if (!conditionMet) {
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Cron trigger condition not met, skipping workflow execution')
|
||||
|
||||
// Re-queue for next execution but don't run workflow
|
||||
if (cronExpression) {
|
||||
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
|
||||
}
|
||||
|
||||
return {
|
||||
output: {
|
||||
executedAt: new Date().toISOString(),
|
||||
reason: 'Condition not met',
|
||||
status: 'skipped',
|
||||
workflowId
|
||||
},
|
||||
state: 'succeeded'
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Cron trigger condition met')
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
|
||||
// Re-queue the job for the next scheduled execution if cronExpression is provided
|
||||
if (cronExpression) {
|
||||
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
|
||||
}
|
||||
|
||||
return {
|
||||
output: {
|
||||
executedAt: new Date().toISOString(),
|
||||
status: 'completed',
|
||||
workflowId
|
||||
},
|
||||
state: 'succeeded'
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Cron job execution failed')
|
||||
|
||||
// Re-queue even on failure to ensure continuity (unless it's a validation error)
|
||||
if (cronExpression && !(error instanceof Error && error.message.includes('Invalid cron'))) {
|
||||
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
|
||||
.catch((requeueError) => {
|
||||
logger.error({
|
||||
error: requeueError instanceof Error ? requeueError.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to re-queue cron job after execution failure')
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
output: {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
},
|
||||
state: 'failed'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the cron task to config if not already present
|
||||
if (!config.jobs) {
|
||||
config.jobs = { tasks: [] }
|
||||
}
|
||||
|
||||
if (!config.jobs.tasks) {
|
||||
config.jobs.tasks = []
|
||||
}
|
||||
|
||||
if (!config.jobs.tasks.find(task => task.slug === cronTask.slug)) {
|
||||
logger.debug(`Registering cron executor task: ${cronTask.slug}`)
|
||||
config.jobs.tasks.push(cronTask)
|
||||
} else {
|
||||
logger.debug(`Cron executor task ${cronTask.slug} already registered, skipping`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register cron jobs for workflows with cron triggers
|
||||
* This is called at runtime after PayloadCMS is initialized
|
||||
*/
|
||||
export async function registerCronJobs(payload: Payload, logger: Payload['logger']): Promise<void> {
|
||||
try {
|
||||
// Find all workflows with cron triggers
|
||||
const workflows = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 0,
|
||||
limit: 1000,
|
||||
where: {
|
||||
'triggers.type': {
|
||||
equals: 'cron-trigger'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`Found ${workflows.docs.length} workflows with cron triggers`)
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
const triggers = workflow.triggers as Array<{
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
type: string
|
||||
}>
|
||||
|
||||
// Find all cron triggers for this workflow
|
||||
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
|
||||
|
||||
for (const trigger of cronTriggers) {
|
||||
if (trigger.cronExpression) {
|
||||
try {
|
||||
// Validate cron expression before queueing
|
||||
if (!validateCronExpression(trigger.cronExpression)) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid cron expression format')
|
||||
continue
|
||||
}
|
||||
|
||||
// Validate timezone if provided
|
||||
if (trigger.timezone) {
|
||||
try {
|
||||
// Test if timezone is valid by trying to create a date with it
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.timezone })
|
||||
} catch {
|
||||
logger.error({
|
||||
timezone: trigger.timezone,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid timezone specified')
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate next execution time
|
||||
const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone)
|
||||
|
||||
// Queue the job
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId: workflow.id },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: nextExecution
|
||||
})
|
||||
|
||||
logger.info({
|
||||
cronExpression: trigger.cronExpression,
|
||||
nextExecution: nextExecution.toISOString(),
|
||||
timezone: trigger.timezone || 'UTC',
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Queued initial cron job for workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timezone: trigger.timezone,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to queue cron job')
|
||||
}
|
||||
} else {
|
||||
logger.warn({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Cron trigger found but no cron expression specified')
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Failed to register cron jobs')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a cron expression
|
||||
*/
|
||||
export function validateCronExpression(cronExpression: string): boolean {
|
||||
return cron.validate(cronExpression)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the next time a cron expression should run
|
||||
*/
|
||||
function getNextCronTime(cronExpression: string, timezone?: string): Date {
|
||||
if (!validateCronExpression(cronExpression)) {
|
||||
throw new Error(`Invalid cron expression: ${cronExpression}`)
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const options: { timezone?: string } = timezone ? { timezone } : {}
|
||||
|
||||
// Create a task to find the next execution time
|
||||
const task = cron.schedule(cronExpression, () => {}, {
|
||||
...options
|
||||
})
|
||||
|
||||
// Parse cron expression parts
|
||||
const cronParts = cronExpression.trim().split(/\s+/)
|
||||
if (cronParts.length !== 5) {
|
||||
void task.destroy()
|
||||
throw new Error(`Invalid cron format: ${cronExpression}. Expected 5 parts.`)
|
||||
}
|
||||
|
||||
const [minutePart, hourPart, dayPart, monthPart, weekdayPart] = cronParts
|
||||
|
||||
// Calculate next execution with proper lookahead for any schedule frequency
|
||||
// Start from next minute and look ahead systematically
|
||||
let testTime = new Date(now.getTime() + 60 * 1000) // Start 1 minute from now
|
||||
testTime.setSeconds(0, 0) // Reset seconds and milliseconds
|
||||
|
||||
// Maximum iterations to prevent infinite loops (covers ~2 years)
|
||||
const maxIterations = 2 * 365 * 24 * 60 // 2 years worth of minutes
|
||||
let iterations = 0
|
||||
|
||||
while (iterations < maxIterations) {
|
||||
const minute = testTime.getMinutes()
|
||||
const hour = testTime.getHours()
|
||||
const dayOfMonth = testTime.getDate()
|
||||
const month = testTime.getMonth() + 1
|
||||
const dayOfWeek = testTime.getDay()
|
||||
|
||||
if (matchesCronPart(minute, minutePart) &&
|
||||
matchesCronPart(hour, hourPart) &&
|
||||
matchesCronPart(dayOfMonth, dayPart) &&
|
||||
matchesCronPart(month, monthPart) &&
|
||||
matchesCronPart(dayOfWeek, weekdayPart)) {
|
||||
void task.destroy()
|
||||
return testTime
|
||||
}
|
||||
|
||||
// Increment time intelligently based on cron pattern
|
||||
testTime = incrementTimeForCronPattern(testTime, cronParts)
|
||||
iterations++
|
||||
}
|
||||
|
||||
void task.destroy()
|
||||
throw new Error(`Could not calculate next execution time for cron expression: ${cronExpression} within reasonable timeframe`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Intelligently increment time based on cron pattern to avoid unnecessary iterations
|
||||
*/
|
||||
function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Date {
|
||||
const [minutePart, hourPart, _dayPart, _monthPart, _weekdayPart] = cronParts
|
||||
const nextTime = new Date(currentTime)
|
||||
|
||||
// If minute is specific (not wildcard), we can jump to next hour
|
||||
if (minutePart !== '*' && !minutePart.includes('/')) {
|
||||
const targetMinute = getNextValidCronValue(currentTime.getMinutes(), minutePart)
|
||||
if (targetMinute <= currentTime.getMinutes()) {
|
||||
// Move to next hour
|
||||
nextTime.setHours(nextTime.getHours() + 1, targetMinute, 0, 0)
|
||||
} else {
|
||||
nextTime.setMinutes(targetMinute, 0, 0)
|
||||
}
|
||||
return nextTime
|
||||
}
|
||||
|
||||
// If hour is specific and we're past it, jump to next day
|
||||
if (hourPart !== '*' && !hourPart.includes('/')) {
|
||||
const targetHour = getNextValidCronValue(currentTime.getHours(), hourPart)
|
||||
if (targetHour <= currentTime.getHours()) {
|
||||
// Move to next day
|
||||
nextTime.setDate(nextTime.getDate() + 1)
|
||||
nextTime.setHours(targetHour, 0, 0, 0)
|
||||
} else {
|
||||
nextTime.setHours(targetHour, 0, 0, 0)
|
||||
}
|
||||
return nextTime
|
||||
}
|
||||
|
||||
// Default: increment by 1 minute
|
||||
nextTime.setTime(nextTime.getTime() + 60 * 1000)
|
||||
return nextTime
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next valid value for a cron part
|
||||
*/
|
||||
function getNextValidCronValue(currentValue: number, cronPart: string): number {
|
||||
if (cronPart === '*') {return currentValue + 1}
|
||||
|
||||
// Handle specific values and ranges
|
||||
const values = parseCronPart(cronPart)
|
||||
return values.find(v => v > currentValue) || values[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a cron part into an array of valid values
|
||||
*/
|
||||
function parseCronPart(cronPart: string): number[] {
|
||||
if (cronPart === '*') {return []}
|
||||
|
||||
const values: number[] = []
|
||||
|
||||
// Handle comma-separated values
|
||||
if (cronPart.includes(',')) {
|
||||
cronPart.split(',').forEach(part => {
|
||||
values.push(...parseCronPart(part.trim()))
|
||||
})
|
||||
return values.sort((a, b) => a - b)
|
||||
}
|
||||
|
||||
// Handle ranges
|
||||
if (cronPart.includes('-')) {
|
||||
const [start, end] = cronPart.split('-').map(n => parseInt(n, 10))
|
||||
for (let i = start; i <= end; i++) {
|
||||
values.push(i)
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
// Handle step values
|
||||
if (cronPart.includes('/')) {
|
||||
const [range, step] = cronPart.split('/')
|
||||
const stepNum = parseInt(step, 10)
|
||||
|
||||
if (range === '*') {
|
||||
// For wildcards with steps, return empty - handled elsewhere
|
||||
return []
|
||||
}
|
||||
|
||||
const baseValues = parseCronPart(range)
|
||||
return baseValues.filter((_, index) => index % stepNum === 0)
|
||||
}
|
||||
|
||||
// Single value
|
||||
values.push(parseInt(cronPart, 10))
|
||||
return values
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value matches a cron expression part
|
||||
*/
|
||||
function matchesCronPart(value: number, cronPart: string): boolean {
|
||||
if (cronPart === '*') {return true}
|
||||
|
||||
// Handle step values (e.g., */5)
|
||||
if (cronPart.includes('/')) {
|
||||
const [range, step] = cronPart.split('/')
|
||||
const stepNum = parseInt(step, 10)
|
||||
|
||||
if (range === '*') {
|
||||
return value % stepNum === 0
|
||||
}
|
||||
}
|
||||
|
||||
// Handle ranges (e.g., 1-5)
|
||||
if (cronPart.includes('-')) {
|
||||
const [start, end] = cronPart.split('-').map(n => parseInt(n, 10))
|
||||
return value >= start && value <= end
|
||||
}
|
||||
|
||||
// Handle comma-separated values (e.g., 1,3,5)
|
||||
if (cronPart.includes(',')) {
|
||||
const values = cronPart.split(',').map(n => parseInt(n, 10))
|
||||
return values.includes(value)
|
||||
}
|
||||
|
||||
// Handle single value
|
||||
const cronValue = parseInt(cronPart, 10)
|
||||
return value === cronValue
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle re-queueing of cron jobs after they execute
|
||||
* This ensures the job runs again at the next scheduled time
|
||||
*/
|
||||
export async function requeueCronJob(
|
||||
workflowId: string,
|
||||
cronExpression: string,
|
||||
timezone: string | undefined,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Queue the job to run at the next scheduled time
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression, timezone, workflowId },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: getNextCronTime(cronExpression, timezone)
|
||||
})
|
||||
|
||||
logger.debug({
|
||||
nextRun: getNextCronTime(cronExpression, timezone),
|
||||
timezone: timezone || 'UTC',
|
||||
workflowId
|
||||
}, 'Re-queued cron job')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to re-queue cron job')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register or update cron jobs for a specific workflow
|
||||
*/
|
||||
export async function updateWorkflowCronJobs(
|
||||
workflowId: string,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): Promise<void> {
|
||||
try {
|
||||
// First, cancel any existing cron jobs for this workflow
|
||||
cancelWorkflowCronJobs(workflowId, payload, logger)
|
||||
|
||||
// Get the workflow
|
||||
const workflow = await payload.findByID({
|
||||
id: workflowId,
|
||||
collection: 'workflows',
|
||||
depth: 0
|
||||
})
|
||||
|
||||
if (!workflow) {
|
||||
logger.warn({ workflowId }, 'Workflow not found for cron job update')
|
||||
return
|
||||
}
|
||||
|
||||
const triggers = workflow.triggers as Array<{
|
||||
cronExpression?: string
|
||||
timezone?: string
|
||||
type: string
|
||||
}>
|
||||
|
||||
// Find all cron triggers for this workflow
|
||||
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
|
||||
|
||||
if (cronTriggers.length === 0) {
|
||||
logger.debug({ workflowId }, 'No cron triggers found for workflow')
|
||||
return
|
||||
}
|
||||
|
||||
let scheduledJobs = 0
|
||||
|
||||
for (const trigger of cronTriggers) {
|
||||
if (trigger.cronExpression) {
|
||||
try {
|
||||
// Validate cron expression before queueing
|
||||
if (!validateCronExpression(trigger.cronExpression)) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid cron expression format')
|
||||
continue
|
||||
}
|
||||
|
||||
// Validate timezone if provided
|
||||
if (trigger.timezone) {
|
||||
try {
|
||||
new Intl.DateTimeFormat('en', { timeZone: trigger.timezone })
|
||||
} catch {
|
||||
logger.error({
|
||||
timezone: trigger.timezone,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Invalid timezone specified')
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate next execution time
|
||||
const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone)
|
||||
|
||||
// Queue the job
|
||||
await payload.jobs.queue({
|
||||
input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId },
|
||||
task: 'workflow-cron-executor',
|
||||
waitUntil: nextExecution
|
||||
})
|
||||
|
||||
scheduledJobs++
|
||||
|
||||
logger.info({
|
||||
cronExpression: trigger.cronExpression,
|
||||
nextExecution: nextExecution.toISOString(),
|
||||
timezone: trigger.timezone || 'UTC',
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Scheduled cron job for workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
cronExpression: trigger.cronExpression,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
timezone: trigger.timezone,
|
||||
workflowId,
|
||||
workflowName: workflow.name
|
||||
}, 'Failed to schedule cron job')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scheduledJobs > 0) {
|
||||
logger.info({ scheduledJobs, workflowId }, 'Updated cron jobs for workflow')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to update workflow cron jobs')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel all cron jobs for a specific workflow
|
||||
*/
|
||||
export function cancelWorkflowCronJobs(
|
||||
workflowId: string,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): void {
|
||||
try {
|
||||
// Note: PayloadCMS job system doesn't have a built-in way to cancel specific jobs by input
|
||||
// This is a limitation we need to work around
|
||||
// For now, we log that we would cancel jobs for this workflow
|
||||
logger.debug({ workflowId }, 'Would cancel existing cron jobs for workflow (PayloadCMS limitation: cannot selectively cancel jobs)')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to cancel workflow cron jobs')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove cron jobs for a deleted workflow
|
||||
*/
|
||||
export function removeWorkflowCronJobs(
|
||||
workflowId: string,
|
||||
payload: Payload,
|
||||
logger: Payload['logger']
|
||||
): void {
|
||||
try {
|
||||
cancelWorkflowCronJobs(workflowId, payload, logger)
|
||||
logger.info({ workflowId }, 'Removed cron jobs for deleted workflow')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
workflowId
|
||||
}, 'Failed to remove workflow cron jobs')
|
||||
}
|
||||
}
|
||||
95
src/plugin/global-hook.ts
Normal file
95
src/plugin/global-hook.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import {WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
|
||||
export const createGlobalTriggerHook = (globalSlug: string, hookType: string) => {
|
||||
return async function payloadGlobalAutomationHook(args: any) {
|
||||
const req = 'req' in args ? args.req :
|
||||
'args' in args ? args.args.req :
|
||||
undefined
|
||||
if (!req) {
|
||||
throw new Error('No request object found in global hook arguments')
|
||||
}
|
||||
|
||||
const payload = req.payload
|
||||
const logger = payload.logger
|
||||
|
||||
try {
|
||||
logger.info({
|
||||
global: globalSlug,
|
||||
hookType,
|
||||
operation: hookType
|
||||
}, 'Global automation hook triggered')
|
||||
|
||||
// Create executor on-demand
|
||||
const executor = new WorkflowExecutor(payload, logger)
|
||||
|
||||
logger.debug('Executing triggered global workflows...')
|
||||
|
||||
// Find workflows with matching global triggers
|
||||
const {docs: workflows} = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
where: {
|
||||
'triggers.parameters.global': {
|
||||
equals: globalSlug
|
||||
},
|
||||
'triggers.parameters.operation': {
|
||||
equals: hookType
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'global-hook'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Execute each matching workflow
|
||||
for (const workflow of workflows) {
|
||||
// Create execution context
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
...args,
|
||||
type: 'global',
|
||||
global: globalSlug,
|
||||
operation: hookType,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await executor.execute(workflow, context, req)
|
||||
logger.info({
|
||||
workflowId: workflow.id,
|
||||
global: globalSlug,
|
||||
hookType
|
||||
}, 'Global workflow executed successfully')
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
workflowId: workflow.id,
|
||||
global: globalSlug,
|
||||
hookType,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Global workflow execution failed')
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({
|
||||
global: globalSlug,
|
||||
hookType
|
||||
}, 'Global workflow execution completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
logger.error({
|
||||
global: globalSlug,
|
||||
hookType,
|
||||
error: errorMessage,
|
||||
errorStack: error instanceof Error ? error.stack : undefined
|
||||
}, 'Global hook execution failed')
|
||||
|
||||
// Don't throw to prevent breaking the original operation
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,122 +1,16 @@
|
||||
import type {Config} from 'payload'
|
||||
import type {CollectionConfig, Config} from 'payload'
|
||||
|
||||
import type {WorkflowsPluginConfig, CollectionTriggerConfigCrud} from "./config-types.js"
|
||||
import type {WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
import {createWorkflowCollection} from '../collections/Workflow.js'
|
||||
import {WorkflowRunsCollection} from '../collections/WorkflowRuns.js'
|
||||
import {WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {generateCronTasks, registerCronJobs} from './cron-scheduler.js'
|
||||
import {initCollectionHooks} from "./init-collection-hooks.js"
|
||||
import {initGlobalHooks} from "./init-global-hooks.js"
|
||||
import {initStepTasks} from "./init-step-tasks.js"
|
||||
import {initWebhookEndpoint} from "./init-webhook.js"
|
||||
import {initWorkflowHooks} from './init-workflow-hooks.js'
|
||||
import {getConfigLogger, initializeLogger} from './logger.js'
|
||||
import {createCollectionTriggerHook} from "./collection-hook.js"
|
||||
import {createGlobalTriggerHook} from "./global-hook.js"
|
||||
|
||||
export {getLogger} from './logger.js'
|
||||
|
||||
// Improved executor registry with proper error handling and logging
|
||||
interface ExecutorRegistry {
|
||||
executor: WorkflowExecutor | null
|
||||
logger: any | null
|
||||
isInitialized: boolean
|
||||
}
|
||||
|
||||
const executorRegistry: ExecutorRegistry = {
|
||||
executor: null,
|
||||
logger: null,
|
||||
isInitialized: false
|
||||
}
|
||||
|
||||
const setWorkflowExecutor = (executor: WorkflowExecutor, logger: any) => {
|
||||
executorRegistry.executor = executor
|
||||
executorRegistry.logger = logger
|
||||
executorRegistry.isInitialized = true
|
||||
|
||||
logger.info('Workflow executor initialized and registered successfully')
|
||||
}
|
||||
|
||||
const getExecutorRegistry = (): ExecutorRegistry => {
|
||||
return executorRegistry
|
||||
}
|
||||
|
||||
// Helper function to create failed workflow runs for tracking errors
|
||||
const createFailedWorkflowRun = async (args: any, errorMessage: string, logger: any) => {
|
||||
try {
|
||||
// Only create failed workflow runs if we have enough context
|
||||
if (!args?.req?.payload || !args?.collection?.slug) {
|
||||
return
|
||||
}
|
||||
|
||||
// Find workflows that should have been triggered
|
||||
const workflows = await args.req.payload.find({
|
||||
collection: 'workflows',
|
||||
where: {
|
||||
'triggers.type': {
|
||||
equals: 'collection-trigger'
|
||||
},
|
||||
'triggers.collectionSlug': {
|
||||
equals: args.collection.slug
|
||||
},
|
||||
'triggers.operation': {
|
||||
equals: args.operation
|
||||
}
|
||||
},
|
||||
limit: 10,
|
||||
req: args.req
|
||||
})
|
||||
|
||||
// Create failed workflow runs for each matching workflow
|
||||
for (const workflow of workflows.docs) {
|
||||
await args.req.payload.create({
|
||||
collection: 'workflow-runs',
|
||||
data: {
|
||||
workflow: workflow.id,
|
||||
workflowVersion: 1,
|
||||
status: 'failed',
|
||||
startedAt: new Date().toISOString(),
|
||||
completedAt: new Date().toISOString(),
|
||||
error: `Hook execution failed: ${errorMessage}`,
|
||||
triggeredBy: args?.req?.user?.email || 'system',
|
||||
context: {
|
||||
trigger: {
|
||||
type: 'collection',
|
||||
collection: args.collection.slug,
|
||||
operation: args.operation,
|
||||
doc: args.doc,
|
||||
previousDoc: args.previousDoc,
|
||||
triggeredAt: new Date().toISOString()
|
||||
},
|
||||
steps: {}
|
||||
},
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
steps: [],
|
||||
logs: [{
|
||||
level: 'error',
|
||||
message: `Hook execution failed: ${errorMessage}`,
|
||||
timestamp: new Date().toISOString()
|
||||
}]
|
||||
},
|
||||
req: args.req
|
||||
})
|
||||
}
|
||||
|
||||
if (workflows.docs.length > 0) {
|
||||
logger.info({
|
||||
workflowCount: workflows.docs.length,
|
||||
errorMessage
|
||||
}, 'Created failed workflow runs for hook execution error')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// Don't let workflow run creation failures break the original operation
|
||||
logger.warn({
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
}, 'Failed to create failed workflow run record')
|
||||
}
|
||||
}
|
||||
|
||||
const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPluginConfig<T>, config: Config) => {
|
||||
// Add workflow collections
|
||||
if (!config.collections) {
|
||||
@@ -129,8 +23,16 @@ const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPlugin
|
||||
)
|
||||
}
|
||||
|
||||
// Removed config-phase hook registration - user collections don't exist during config phase
|
||||
type AnyHook =
|
||||
CollectionConfig['hooks'] extends infer H
|
||||
? H extends Record<string, unknown>
|
||||
? NonNullable<H[keyof H]> extends (infer U)[]
|
||||
? U
|
||||
: never
|
||||
: never
|
||||
: never;
|
||||
|
||||
type HookArgs = Parameters<AnyHook>[0]
|
||||
|
||||
export const workflowsPlugin =
|
||||
<TSlug extends string>(pluginOptions: WorkflowsPluginConfig<TSlug>) =>
|
||||
@@ -141,111 +43,134 @@ export const workflowsPlugin =
|
||||
}
|
||||
|
||||
applyCollectionsConfig<TSlug>(pluginOptions, config)
|
||||
|
||||
|
||||
// CRITICAL: Modify existing collection configs BEFORE PayloadCMS processes them
|
||||
// This is the ONLY time we can add hooks that will actually work
|
||||
const logger = getConfigLogger()
|
||||
logger.info('Attempting to modify collection configs before PayloadCMS initialization...')
|
||||
|
||||
|
||||
if (config.collections && pluginOptions.collectionTriggers) {
|
||||
for (const [triggerSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) continue
|
||||
|
||||
// Find the collection config that matches
|
||||
const collectionIndex = config.collections.findIndex(c => c.slug === triggerSlug)
|
||||
if (collectionIndex === -1) {
|
||||
logger.warn(`Collection '${triggerSlug}' not found in config.collections`)
|
||||
for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
// Find the collection config that matches
|
||||
const collectionIndex = config.collections.findIndex(c => c.slug === collectionSlug)
|
||||
if (collectionIndex === -1) {
|
||||
logger.warn(`Collection '${collectionSlug}' not found in config.collections`)
|
||||
continue
|
||||
}
|
||||
|
||||
const collection = config.collections[collectionIndex]
|
||||
logger.info(`Found collection '${triggerSlug}' - modifying its hooks...`)
|
||||
|
||||
|
||||
// Initialize hooks if needed
|
||||
if (!collection.hooks) {
|
||||
collection.hooks = {}
|
||||
}
|
||||
if (!collection.hooks.afterChange) {
|
||||
collection.hooks.afterChange = []
|
||||
}
|
||||
|
||||
// Create a reliable hook function with proper dependency injection
|
||||
const automationHook = Object.assign(
|
||||
async function payloadAutomationHook(args: any) {
|
||||
const registry = getExecutorRegistry()
|
||||
|
||||
// Use proper logger if available, fallback to args.req.payload.logger
|
||||
const logger = registry.logger || args?.req?.payload?.logger || console
|
||||
|
||||
try {
|
||||
logger.info({
|
||||
collection: args?.collection?.slug,
|
||||
operation: args?.operation,
|
||||
docId: args?.doc?.id,
|
||||
hookType: 'automation'
|
||||
}, 'Collection automation hook triggered')
|
||||
|
||||
if (!registry.isInitialized) {
|
||||
logger.warn('Workflow executor not yet initialized, skipping execution')
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!registry.executor) {
|
||||
logger.error('Workflow executor is null despite being marked as initialized')
|
||||
// Create a failed workflow run to track this issue
|
||||
await createFailedWorkflowRun(args, 'Executor not available', logger)
|
||||
return undefined
|
||||
}
|
||||
|
||||
logger.debug('Executing triggered workflows...')
|
||||
await registry.executor.executeTriggeredWorkflows(
|
||||
args.collection.slug,
|
||||
args.operation,
|
||||
args.doc,
|
||||
args.previousDoc,
|
||||
args.req
|
||||
)
|
||||
|
||||
logger.info({
|
||||
collection: args?.collection?.slug,
|
||||
operation: args?.operation,
|
||||
docId: args?.doc?.id
|
||||
}, 'Workflow execution completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
logger.error({
|
||||
error: errorMessage,
|
||||
errorStack: error instanceof Error ? error.stack : undefined,
|
||||
collection: args?.collection?.slug,
|
||||
operation: args?.operation,
|
||||
docId: args?.doc?.id
|
||||
}, 'Hook execution failed')
|
||||
|
||||
// Create a failed workflow run to track this error
|
||||
try {
|
||||
await createFailedWorkflowRun(args, errorMessage, logger)
|
||||
} catch (createError) {
|
||||
logger.error({
|
||||
error: createError instanceof Error ? createError.message : 'Unknown error'
|
||||
}, 'Failed to create workflow run for hook error')
|
||||
}
|
||||
|
||||
// Don't throw to prevent breaking the original operation
|
||||
|
||||
// Determine which hooks to register based on config
|
||||
const hooksToRegister = triggerConfig === true
|
||||
? {
|
||||
afterChange: true,
|
||||
afterDelete: true,
|
||||
afterRead: true,
|
||||
}
|
||||
|
||||
return undefined
|
||||
},
|
||||
{
|
||||
__isAutomationHook: true,
|
||||
__version: '0.0.22'
|
||||
: triggerConfig
|
||||
|
||||
// Register each configured hook
|
||||
Object.entries(hooksToRegister).forEach(([hookName, enabled]) => {
|
||||
if (!enabled) {
|
||||
return
|
||||
}
|
||||
)
|
||||
|
||||
// Add the hook to the collection config
|
||||
collection.hooks.afterChange.push(automationHook)
|
||||
logger.info(`Added automation hook to '${triggerSlug}' - hook count: ${collection.hooks.afterChange.length}`)
|
||||
|
||||
const hookKey = hookName as keyof typeof collection.hooks
|
||||
|
||||
// Initialize the hook array if needed
|
||||
if (!collection.hooks![hookKey]) {
|
||||
collection.hooks![hookKey] = []
|
||||
}
|
||||
|
||||
// Create the automation hook for this specific collection and hook type
|
||||
const automationHook = createCollectionTriggerHook(collectionSlug, hookKey)
|
||||
|
||||
// Mark it for debugging
|
||||
Object.defineProperty(automationHook, '__isAutomationHook', {
|
||||
value: true,
|
||||
enumerable: false
|
||||
})
|
||||
Object.defineProperty(automationHook, '__hookType', {
|
||||
value: hookKey,
|
||||
enumerable: false
|
||||
})
|
||||
|
||||
// Add the hook to the collection
|
||||
;(collection.hooks![hookKey] as Array<unknown>).push(automationHook)
|
||||
|
||||
logger.debug(`Registered ${hookKey} hook for collection '${collectionSlug}'`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Handle global triggers similarly to collection triggers
|
||||
if (config.globals && pluginOptions.globalTriggers) {
|
||||
for (const [globalSlug, triggerConfig] of Object.entries(pluginOptions.globalTriggers)) {
|
||||
if (!triggerConfig) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find the global config that matches
|
||||
const globalIndex = config.globals.findIndex(g => g.slug === globalSlug)
|
||||
if (globalIndex === -1) {
|
||||
logger.warn(`Global '${globalSlug}' not found in config.globals`)
|
||||
continue
|
||||
}
|
||||
|
||||
const global = config.globals[globalIndex]
|
||||
|
||||
// Initialize hooks if needed
|
||||
if (!global.hooks) {
|
||||
global.hooks = {}
|
||||
}
|
||||
|
||||
// Determine which hooks to register based on config
|
||||
const hooksToRegister = triggerConfig === true
|
||||
? {
|
||||
afterChange: true,
|
||||
afterRead: true,
|
||||
}
|
||||
: triggerConfig
|
||||
|
||||
// Register each configured hook
|
||||
Object.entries(hooksToRegister).forEach(([hookName, enabled]) => {
|
||||
if (!enabled) {
|
||||
return
|
||||
}
|
||||
|
||||
const hookKey = hookName as keyof typeof global.hooks
|
||||
|
||||
// Initialize the hook array if needed
|
||||
if (!global.hooks![hookKey]) {
|
||||
global.hooks![hookKey] = []
|
||||
}
|
||||
|
||||
// Create the automation hook for this specific global and hook type
|
||||
const automationHook = createGlobalTriggerHook(globalSlug, hookKey)
|
||||
|
||||
// Mark it for debugging
|
||||
Object.defineProperty(automationHook, '__isAutomationHook', {
|
||||
value: true,
|
||||
enumerable: false
|
||||
})
|
||||
Object.defineProperty(automationHook, '__hookType', {
|
||||
value: hookKey,
|
||||
enumerable: false
|
||||
})
|
||||
|
||||
// Add the hook to the global
|
||||
;(global.hooks![hookKey] as Array<unknown>).push(automationHook)
|
||||
|
||||
logger.debug(`Registered ${hookKey} hook for global '${globalSlug}'`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,32 +178,18 @@ export const workflowsPlugin =
|
||||
config.jobs = {tasks: []}
|
||||
}
|
||||
|
||||
const configLogger = getConfigLogger()
|
||||
configLogger.info(`Configuring workflow plugin with ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers`)
|
||||
|
||||
// Generate cron tasks for workflows with cron triggers
|
||||
generateCronTasks(config)
|
||||
|
||||
for (const step of pluginOptions.steps) {
|
||||
if (!config.jobs?.tasks?.find(task => task.slug === step.slug)) {
|
||||
configLogger.debug(`Registering task: ${step.slug}`)
|
||||
config.jobs?.tasks?.push(step)
|
||||
} else {
|
||||
configLogger.debug(`Task ${step.slug} already registered, skipping`)
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize webhook endpoint
|
||||
initWebhookEndpoint(config, pluginOptions.webhookPrefix || 'webhook')
|
||||
|
||||
// Set up onInit to register collection hooks and initialize features
|
||||
// Set up onInit to initialize features
|
||||
const incomingOnInit = config.onInit
|
||||
config.onInit = async (payload) => {
|
||||
configLogger.info(`onInit called - collections: ${Object.keys(payload.collections).length}`)
|
||||
|
||||
// Execute any existing onInit functions first
|
||||
if (incomingOnInit) {
|
||||
configLogger.debug('Executing existing onInit function')
|
||||
await incomingOnInit(payload)
|
||||
}
|
||||
|
||||
@@ -286,33 +197,8 @@ export const workflowsPlugin =
|
||||
const logger = initializeLogger(payload)
|
||||
logger.info('Logger initialized with payload instance')
|
||||
|
||||
// Log collection trigger configuration
|
||||
logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${pluginOptions.steps?.length || 0} steps`)
|
||||
|
||||
// Create workflow executor instance
|
||||
console.log('🚨 CREATING WORKFLOW EXECUTOR INSTANCE')
|
||||
const executor = new WorkflowExecutor(payload, logger)
|
||||
console.log('🚨 EXECUTOR CREATED:', typeof executor)
|
||||
console.log('🚨 EXECUTOR METHODS:', Object.getOwnPropertyNames(Object.getPrototypeOf(executor)))
|
||||
|
||||
// Register executor with proper dependency injection
|
||||
setWorkflowExecutor(executor, logger)
|
||||
|
||||
// Hooks are now registered during config phase - just log status
|
||||
logger.info('Hooks were registered during config phase - executor now available')
|
||||
|
||||
logger.info('Initializing global hooks...')
|
||||
initGlobalHooks(payload, logger, executor)
|
||||
|
||||
logger.info('Initializing workflow hooks...')
|
||||
initWorkflowHooks(payload, logger)
|
||||
|
||||
logger.info('Initializing step tasks...')
|
||||
initStepTasks(pluginOptions, payload, logger)
|
||||
|
||||
// Register cron jobs for workflows with cron triggers
|
||||
logger.info('Registering cron jobs...')
|
||||
await registerCronJobs(payload, logger)
|
||||
// Log trigger configuration
|
||||
logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${Object.keys(pluginOptions.globalTriggers || {}).length} global triggers, ${pluginOptions.steps?.length || 0} steps`)
|
||||
|
||||
logger.info('Plugin initialized successfully - all hooks registered')
|
||||
}
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
import type {Payload} from "payload"
|
||||
import type {Logger} from "pino"
|
||||
|
||||
import type { WorkflowExecutor } from "../core/workflow-executor.js"
|
||||
import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) {
|
||||
|
||||
if (!pluginOptions.collectionTriggers || Object.keys(pluginOptions.collectionTriggers).length === 0) {
|
||||
logger.warn('No collection triggers configured in plugin options')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info({
|
||||
configuredCollections: Object.keys(pluginOptions.collectionTriggers),
|
||||
availableCollections: Object.keys(payload.collections)
|
||||
}, 'Starting collection hook registration')
|
||||
|
||||
// Add hooks to configured collections
|
||||
for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
|
||||
if (!triggerConfig) {
|
||||
logger.debug({collectionSlug}, 'Skipping collection with falsy trigger config')
|
||||
continue
|
||||
}
|
||||
|
||||
const collection = payload.collections[collectionSlug as T]
|
||||
const crud: CollectionTriggerConfigCrud = triggerConfig === true ? {
|
||||
create: true,
|
||||
delete: true,
|
||||
read: true,
|
||||
update: true,
|
||||
} : triggerConfig
|
||||
|
||||
if (!collection.config.hooks) {
|
||||
collection.config.hooks = {} as typeof collection.config.hooks
|
||||
}
|
||||
|
||||
if (crud.update || crud.create) {
|
||||
collection.config.hooks.afterChange = collection.config.hooks.afterChange || []
|
||||
collection.config.hooks.afterChange.push(async (change) => {
|
||||
const operation = change.operation as 'create' | 'update'
|
||||
|
||||
// AGGRESSIVE LOGGING - this should ALWAYS appear
|
||||
console.log('🚨 AUTOMATION PLUGIN HOOK CALLED! 🚨')
|
||||
console.log('Collection:', change.collection.slug)
|
||||
console.log('Operation:', operation)
|
||||
console.log('Doc ID:', change.doc?.id)
|
||||
console.log('Has executor?', !!executor)
|
||||
console.log('Executor type:', typeof executor)
|
||||
|
||||
logger.info({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id,
|
||||
previousDocId: change.previousDoc?.id,
|
||||
hasExecutor: !!executor,
|
||||
executorType: typeof executor
|
||||
}, 'AUTOMATION PLUGIN: Collection hook triggered')
|
||||
|
||||
try {
|
||||
console.log('🚨 About to call executeTriggeredWorkflows')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
operation,
|
||||
change.doc,
|
||||
change.previousDoc,
|
||||
change.req
|
||||
)
|
||||
|
||||
console.log('🚨 executeTriggeredWorkflows completed without error')
|
||||
|
||||
logger.info({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id
|
||||
}, 'AUTOMATION PLUGIN: executeTriggeredWorkflows completed successfully')
|
||||
} catch (error) {
|
||||
console.log('🚨 AUTOMATION PLUGIN ERROR:', error)
|
||||
|
||||
logger.error({
|
||||
slug: change.collection.slug,
|
||||
operation,
|
||||
docId: change.doc?.id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined
|
||||
}, 'AUTOMATION PLUGIN: executeTriggeredWorkflows failed')
|
||||
// Don't re-throw to avoid breaking other hooks
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (crud.read) {
|
||||
collection.config.hooks.afterRead = collection.config.hooks.afterRead || []
|
||||
collection.config.hooks.afterRead.push(async (change) => {
|
||||
logger.debug({
|
||||
slug: change.collection.slug,
|
||||
operation: 'read',
|
||||
}, 'Collection hook triggered')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
'read',
|
||||
change.doc,
|
||||
undefined,
|
||||
change.req
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
if (crud.delete) {
|
||||
collection.config.hooks.afterDelete = collection.config.hooks.afterDelete || []
|
||||
collection.config.hooks.afterDelete.push(async (change) => {
|
||||
logger.debug({
|
||||
slug: change.collection.slug,
|
||||
operation: 'delete',
|
||||
}, 'Collection hook triggered')
|
||||
|
||||
// Execute workflows for this trigger
|
||||
await executor.executeTriggeredWorkflows(
|
||||
change.collection.slug,
|
||||
'delete',
|
||||
change.doc,
|
||||
undefined,
|
||||
change.req
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
if (collection) {
|
||||
logger.info({
|
||||
collectionSlug,
|
||||
hooksRegistered: {
|
||||
afterChange: crud.update || crud.create,
|
||||
afterRead: crud.read,
|
||||
afterDelete: crud.delete
|
||||
}
|
||||
}, 'Collection hooks registered successfully')
|
||||
} else {
|
||||
logger.error({
|
||||
collectionSlug,
|
||||
availableCollections: Object.keys(payload.collections)
|
||||
}, 'Collection not found for trigger configuration - check collection slug spelling')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
import type { Payload, PayloadRequest } from "payload"
|
||||
import type { Logger } from "pino"
|
||||
|
||||
import type { WorkflowExecutor, PayloadWorkflow } from "../core/workflow-executor.js"
|
||||
|
||||
export function initGlobalHooks(payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) {
|
||||
// Get all globals from the config
|
||||
const globals = payload.config.globals || []
|
||||
|
||||
for (const globalConfig of globals) {
|
||||
const globalSlug = globalConfig.slug
|
||||
|
||||
// Add afterChange hook to global
|
||||
if (!globalConfig.hooks) {
|
||||
globalConfig.hooks = {
|
||||
afterChange: [],
|
||||
afterRead: [],
|
||||
beforeChange: [],
|
||||
beforeRead: [],
|
||||
beforeValidate: []
|
||||
}
|
||||
}
|
||||
|
||||
if (!globalConfig.hooks.afterChange) {
|
||||
globalConfig.hooks.afterChange = []
|
||||
}
|
||||
|
||||
globalConfig.hooks.afterChange.push(async (change) => {
|
||||
logger.debug({
|
||||
global: globalSlug,
|
||||
operation: 'update'
|
||||
}, 'Global hook triggered')
|
||||
|
||||
// Execute workflows for this global trigger
|
||||
await executeTriggeredGlobalWorkflows(
|
||||
globalSlug,
|
||||
'update',
|
||||
change.doc,
|
||||
change.previousDoc,
|
||||
change.req,
|
||||
payload,
|
||||
logger,
|
||||
executor
|
||||
)
|
||||
})
|
||||
|
||||
logger.info({ globalSlug }, 'Global hooks registered')
|
||||
}
|
||||
}
|
||||
|
||||
async function executeTriggeredGlobalWorkflows(
|
||||
globalSlug: string,
|
||||
operation: 'update',
|
||||
doc: Record<string, any>,
|
||||
previousDoc: Record<string, any>,
|
||||
req: PayloadRequest,
|
||||
payload: Payload,
|
||||
logger: Payload['logger'],
|
||||
executor: WorkflowExecutor
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Find workflows with matching global triggers
|
||||
const workflows = await payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
req,
|
||||
where: {
|
||||
'triggers.global': {
|
||||
equals: globalSlug
|
||||
},
|
||||
'triggers.globalOperation': {
|
||||
equals: operation
|
||||
},
|
||||
'triggers.type': {
|
||||
equals: 'global-trigger'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for (const workflow of workflows.docs) {
|
||||
logger.info({
|
||||
globalSlug,
|
||||
operation,
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Triggering global workflow')
|
||||
|
||||
// Create execution context
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'global',
|
||||
doc,
|
||||
global: globalSlug,
|
||||
operation,
|
||||
previousDoc,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
globalSlug,
|
||||
operation
|
||||
}, 'Failed to execute triggered global workflows')
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import type {Payload} from "payload"
|
||||
import type {Logger} from "pino"
|
||||
|
||||
import type {WorkflowsPluginConfig} from "./config-types.js"
|
||||
|
||||
export function initStepTasks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger']) {
|
||||
logger.info({ stepCount: pluginOptions.steps.length, steps: pluginOptions.steps.map(s => s.slug) }, 'Step tasks were registered during config phase')
|
||||
|
||||
// Verify that the tasks are available in the job system
|
||||
const availableTasks = payload.config.jobs?.tasks?.map(t => t.slug) || []
|
||||
const pluginTasks = pluginOptions.steps.map(s => s.slug)
|
||||
|
||||
pluginTasks.forEach(taskSlug => {
|
||||
if (availableTasks.includes(taskSlug)) {
|
||||
logger.info({ taskSlug }, 'Step task confirmed available in job system')
|
||||
} else {
|
||||
logger.error({ taskSlug }, 'Step task not found in job system - this will cause execution failures')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,176 +0,0 @@
|
||||
import type {Config, PayloadRequest} from 'payload'
|
||||
|
||||
import {type PayloadWorkflow, WorkflowExecutor} from '../core/workflow-executor.js'
|
||||
import {getConfigLogger, initializeLogger} from './logger.js'
|
||||
|
||||
export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'): void {
|
||||
const logger = getConfigLogger()
|
||||
// Ensure the prefix starts with a slash
|
||||
const normalizedPrefix = webhookPrefix.startsWith('/') ? webhookPrefix : `/${webhookPrefix}`
|
||||
logger.debug(`Adding webhook endpoint to config with prefix: ${normalizedPrefix}`)
|
||||
logger.debug('Current config.endpoints length:', config.endpoints?.length || 0)
|
||||
|
||||
// Define webhook endpoint
|
||||
const webhookEndpoint = {
|
||||
handler: async (req: PayloadRequest) => {
|
||||
const {path} = req.routeParams as { path: string }
|
||||
const webhookData = req.body || {}
|
||||
|
||||
logger.debug('Webhook endpoint handler called, path: ' + path)
|
||||
|
||||
try {
|
||||
// Find workflows with matching webhook triggers
|
||||
const workflows = await req.payload.find({
|
||||
collection: 'workflows',
|
||||
depth: 2,
|
||||
limit: 100,
|
||||
req,
|
||||
where: {
|
||||
'triggers.type': {
|
||||
equals: 'webhook-trigger'
|
||||
},
|
||||
'triggers.webhookPath': {
|
||||
equals: path
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (workflows.docs.length === 0) {
|
||||
return new Response(
|
||||
JSON.stringify({error: 'No workflows found for this webhook path'}),
|
||||
{
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
status: 404
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Create a workflow executor for this request
|
||||
const logger = initializeLogger(req.payload)
|
||||
const executor = new WorkflowExecutor(req.payload, logger)
|
||||
|
||||
const executionPromises = workflows.docs.map(async (workflow) => {
|
||||
try {
|
||||
// Create execution context for the webhook trigger
|
||||
const context = {
|
||||
steps: {},
|
||||
trigger: {
|
||||
type: 'webhook',
|
||||
data: webhookData,
|
||||
headers: Object.fromEntries(req.headers?.entries() || []),
|
||||
path,
|
||||
req
|
||||
}
|
||||
}
|
||||
|
||||
// Find the matching trigger and check its condition if present
|
||||
const triggers = workflow.triggers as Array<{
|
||||
condition?: string
|
||||
type: string
|
||||
webhookPath?: string
|
||||
}>
|
||||
|
||||
const matchingTrigger = triggers?.find(trigger =>
|
||||
trigger.type === 'webhook-trigger' &&
|
||||
trigger.webhookPath === path
|
||||
)
|
||||
|
||||
// Check trigger condition if present
|
||||
if (matchingTrigger?.condition) {
|
||||
logger.debug({
|
||||
condition: matchingTrigger.condition,
|
||||
path,
|
||||
webhookData: JSON.stringify(webhookData).substring(0, 200),
|
||||
headers: Object.keys(context.trigger.headers || {}),
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Evaluating webhook trigger condition')
|
||||
|
||||
const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context)
|
||||
|
||||
if (!conditionMet) {
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
path,
|
||||
webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200),
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Webhook trigger condition not met, skipping workflow')
|
||||
|
||||
return { reason: 'Condition not met', status: 'skipped', workflowId: workflow.id }
|
||||
}
|
||||
|
||||
logger.info({
|
||||
condition: matchingTrigger.condition,
|
||||
path,
|
||||
webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200),
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name
|
||||
}, 'Webhook trigger condition met')
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
await executor.execute(workflow as PayloadWorkflow, context, req)
|
||||
|
||||
return { status: 'triggered', workflowId: workflow.id }
|
||||
} catch (error) {
|
||||
return {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
status: 'failed',
|
||||
workflowId: workflow.id
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const results = await Promise.allSettled(executionPromises)
|
||||
const resultsData = results.map((result, index) => {
|
||||
const baseResult = { workflowId: workflows.docs[index].id }
|
||||
if (result.status === 'fulfilled') {
|
||||
return { ...baseResult, ...result.value }
|
||||
} else {
|
||||
return { ...baseResult, error: result.reason, status: 'failed' }
|
||||
}
|
||||
})
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
message: `Triggered ${workflows.docs.length} workflow(s)`,
|
||||
results: resultsData
|
||||
}),
|
||||
{
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
status: 200
|
||||
}
|
||||
)
|
||||
|
||||
} catch (error) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
error: 'Failed to process webhook'
|
||||
}),
|
||||
{
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
status: 500
|
||||
}
|
||||
)
|
||||
}
|
||||
},
|
||||
method: 'post' as const,
|
||||
path: `${normalizedPrefix}/:path`
|
||||
}
|
||||
|
||||
// Check if the webhook endpoint already exists to avoid duplicates
|
||||
const existingEndpoint = config.endpoints?.find(endpoint =>
|
||||
endpoint.path === webhookEndpoint.path && endpoint.method === webhookEndpoint.method
|
||||
)
|
||||
|
||||
if (!existingEndpoint) {
|
||||
// Combine existing endpoints with the webhook endpoint
|
||||
config.endpoints = [...(config.endpoints || []), webhookEndpoint]
|
||||
logger.debug(`Webhook endpoint added at path: ${webhookEndpoint.path}`)
|
||||
logger.debug('New config.endpoints length:', config.endpoints.length)
|
||||
} else {
|
||||
logger.debug(`Webhook endpoint already exists at path: ${webhookEndpoint.path}`)
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import type {Payload} from 'payload'
|
||||
|
||||
import {updateWorkflowCronJobs, removeWorkflowCronJobs} from './cron-scheduler.js'
|
||||
|
||||
/**
|
||||
* Initialize hooks for the workflows collection itself
|
||||
* to manage cron jobs when workflows are created/updated
|
||||
*/
|
||||
export function initWorkflowHooks(payload: Payload, logger: Payload['logger']): void {
|
||||
// Add afterChange hook to workflows collection to update cron jobs
|
||||
const workflowsCollection = payload.collections.workflows
|
||||
|
||||
if (!workflowsCollection) {
|
||||
logger.warn('Workflows collection not found, cannot initialize workflow hooks')
|
||||
return
|
||||
}
|
||||
|
||||
// Add afterChange hook to register/update cron jobs
|
||||
if (!workflowsCollection.config.hooks?.afterChange) {
|
||||
if (!workflowsCollection.config.hooks) {
|
||||
// @ts-expect-error - hooks object will be populated by Payload
|
||||
workflowsCollection.config.hooks = {}
|
||||
}
|
||||
workflowsCollection.config.hooks.afterChange = []
|
||||
}
|
||||
|
||||
workflowsCollection.config.hooks.afterChange.push(async ({ doc, operation }) => {
|
||||
if (operation === 'create' || operation === 'update') {
|
||||
logger.debug({
|
||||
operation,
|
||||
workflowId: doc.id,
|
||||
workflowName: doc.name
|
||||
}, 'Workflow changed, updating cron jobs selectively')
|
||||
|
||||
// Update cron jobs for this specific workflow only
|
||||
await updateWorkflowCronJobs(doc.id, payload, logger)
|
||||
}
|
||||
})
|
||||
|
||||
// Add afterDelete hook to clean up cron jobs
|
||||
if (!workflowsCollection.config.hooks?.afterDelete) {
|
||||
workflowsCollection.config.hooks.afterDelete = []
|
||||
}
|
||||
|
||||
workflowsCollection.config.hooks.afterDelete.push(async ({ doc }) => {
|
||||
logger.debug({
|
||||
workflowId: doc.id,
|
||||
workflowName: doc.name
|
||||
}, 'Workflow deleted, removing cron jobs')
|
||||
|
||||
// Remove cron jobs for the deleted workflow
|
||||
removeWorkflowCronJobs(doc.id, payload, logger)
|
||||
})
|
||||
|
||||
logger.info('Workflow hooks initialized for cron job management')
|
||||
}
|
||||
@@ -3,25 +3,40 @@ import type { Payload } from 'payload'
|
||||
// Global logger instance - use Payload's logger type
|
||||
let pluginLogger: null | Payload['logger'] = null
|
||||
|
||||
/**
|
||||
* Get the configured log level from environment variables
|
||||
* Supports: PAYLOAD_AUTOMATION_LOG_LEVEL for unified control
|
||||
* Or separate: PAYLOAD_AUTOMATION_CONFIG_LOG_LEVEL and PAYLOAD_AUTOMATION_LOG_LEVEL
|
||||
*/
|
||||
function getConfigLogLevel(): string {
|
||||
return process.env.PAYLOAD_AUTOMATION_CONFIG_LOG_LEVEL ||
|
||||
process.env.PAYLOAD_AUTOMATION_LOG_LEVEL ||
|
||||
'warn' // Default to warn level for production
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple config-time logger for use during plugin configuration
|
||||
* Uses console with plugin prefix since Payload logger isn't available yet
|
||||
*/
|
||||
const configLogger = {
|
||||
debug: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
console.log(`[payload-automation] ${message}`, ...args)
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent' || (level !== 'debug' && level !== 'trace')) {return}
|
||||
console.debug(`[payload-automation] ${message}`, ...args)
|
||||
},
|
||||
error: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent') {return}
|
||||
console.error(`[payload-automation] ${message}`, ...args)
|
||||
},
|
||||
info: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
console.log(`[payload-automation] ${message}`, ...args)
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent' || level === 'error' || level === 'warn') {return}
|
||||
console.info(`[payload-automation] ${message}`, ...args)
|
||||
},
|
||||
warn: <T>(message: string, ...args: T[]) => {
|
||||
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
|
||||
const level = getConfigLogLevel()
|
||||
if (level === 'silent' || level === 'error') {return}
|
||||
console.warn(`[payload-automation] ${message}`, ...args)
|
||||
}
|
||||
}
|
||||
@@ -39,8 +54,13 @@ export function getConfigLogger() {
|
||||
*/
|
||||
export function initializeLogger(payload: Payload): Payload['logger'] {
|
||||
// Create a child logger with plugin identification
|
||||
// Use PAYLOAD_AUTOMATION_LOG_LEVEL as the primary env var
|
||||
const logLevel = process.env.PAYLOAD_AUTOMATION_LOG_LEVEL ||
|
||||
process.env.PAYLOAD_AUTOMATION_LOGGING || // Legacy support
|
||||
'warn' // Default to warn level for production
|
||||
|
||||
pluginLogger = payload.logger.child({
|
||||
level: process.env.PAYLOAD_AUTOMATION_LOGGING || 'silent',
|
||||
level: logLevel,
|
||||
plugin: '@xtr-dev/payload-automation'
|
||||
})
|
||||
return pluginLogger
|
||||
|
||||
@@ -19,6 +19,8 @@ interface HttpRequestInput {
|
||||
}
|
||||
|
||||
export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input, req}) => {
|
||||
const startTime = Date.now() // Move startTime to outer scope
|
||||
|
||||
try {
|
||||
if (!input || !input.url) {
|
||||
return {
|
||||
@@ -36,7 +38,6 @@ export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input,
|
||||
}
|
||||
|
||||
const typedInput = input as HttpRequestInput
|
||||
const startTime = Date.now()
|
||||
|
||||
// Validate URL
|
||||
try {
|
||||
@@ -260,7 +261,7 @@ export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input,
|
||||
req?.payload?.logger?.error({
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
input: typedInput?.url || 'unknown'
|
||||
input: (input as any)?.url || 'unknown'
|
||||
}, 'Unexpected error in HTTP request handler')
|
||||
|
||||
return {
|
||||
@@ -270,7 +271,7 @@ export const httpStepHandler: TaskHandler<'http-request-step'> = async ({input,
|
||||
headers: {},
|
||||
body: '',
|
||||
data: null,
|
||||
duration: Date.now() - (startTime || Date.now()),
|
||||
duration: Date.now() - startTime,
|
||||
error: `HTTP request handler error: ${error.message}`
|
||||
},
|
||||
state: 'failed'
|
||||
|
||||
@@ -72,7 +72,7 @@ describe('WorkflowExecutor', () => {
|
||||
describe('resolveStepInput', () => {
|
||||
it('should resolve all JSONPath expressions in step config', () => {
|
||||
const config = {
|
||||
url: '$.trigger.webhook.url',
|
||||
url: '$.trigger.data.url',
|
||||
message: 'Static message',
|
||||
data: {
|
||||
id: '$.trigger.doc.id',
|
||||
@@ -83,7 +83,7 @@ describe('WorkflowExecutor', () => {
|
||||
const context = {
|
||||
trigger: {
|
||||
doc: { id: 'doc-123', title: 'Doc Title' },
|
||||
webhook: { url: 'https://example.com/webhook' }
|
||||
data: { url: 'https://example.com/webhook' }
|
||||
},
|
||||
steps: {}
|
||||
}
|
||||
|
||||
36
src/triggers/collection-trigger.ts
Normal file
36
src/triggers/collection-trigger.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import type {TriggerConfig} from '../plugin/config-types.js'
|
||||
|
||||
export const collectionTrigger: TriggerConfig = ({collectionTriggers}) => ({
|
||||
slug: 'collection-hook',
|
||||
parameters: [
|
||||
{
|
||||
name: 'collectionSlug',
|
||||
type: 'select',
|
||||
options: Object.keys(collectionTriggers || {}),
|
||||
},
|
||||
{
|
||||
name: 'hook',
|
||||
type: 'select',
|
||||
options: [
|
||||
"afterChange",
|
||||
"afterDelete",
|
||||
"afterError",
|
||||
"afterForgotPassword",
|
||||
"afterLogin",
|
||||
"afterLogout",
|
||||
"afterMe",
|
||||
"afterOperation",
|
||||
"afterRead",
|
||||
"afterRefresh",
|
||||
"beforeChange",
|
||||
"beforeDelete",
|
||||
"beforeLogin",
|
||||
"beforeOperation",
|
||||
"beforeRead",
|
||||
"beforeValidate",
|
||||
"me",
|
||||
"refresh"
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
29
src/triggers/global-trigger.ts
Normal file
29
src/triggers/global-trigger.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import type {TriggerConfig} from '../plugin/config-types.js'
|
||||
|
||||
export const globalTrigger: TriggerConfig = ({globalTriggers}) => ({
|
||||
slug: 'global-hook',
|
||||
parameters: [
|
||||
{
|
||||
name: 'global',
|
||||
type: 'select',
|
||||
admin: {
|
||||
description: 'Global that triggers the workflow',
|
||||
},
|
||||
options: Object.keys(globalTriggers || {}),
|
||||
},
|
||||
{
|
||||
name: 'operation',
|
||||
type: 'select',
|
||||
admin: {
|
||||
description: 'Global hook that triggers the workflow',
|
||||
},
|
||||
options: [
|
||||
"afterChange",
|
||||
"afterRead",
|
||||
"beforeChange",
|
||||
"beforeRead",
|
||||
"beforeValidate"
|
||||
],
|
||||
}
|
||||
]
|
||||
})
|
||||
2
src/triggers/index.ts
Normal file
2
src/triggers/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { collectionTrigger } from './collection-trigger.js'
|
||||
export { globalTrigger } from './global-trigger.js'
|
||||
6
src/triggers/types.ts
Normal file
6
src/triggers/types.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type {Field} from "payload"
|
||||
|
||||
export type Trigger = {
|
||||
slug: string
|
||||
parameters: Field[]
|
||||
}
|
||||
@@ -31,4 +31,8 @@
|
||||
"./src/**/*.tsx",
|
||||
"./dev/next-env.d.ts",
|
||||
],
|
||||
"exclude": [
|
||||
"./src/test",
|
||||
"./test-results"
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user