Rename 'collection' field to 'collectionSlug' to avoid PayloadCMS reserved field conflicts

- Updated Workflow collection trigger field from 'collection' to 'collectionSlug'
- Updated all document operation steps (create, read, update, delete) to use 'collectionSlug'
- Updated corresponding handlers to destructure 'collectionSlug' instead of 'collection'
- Removed debug console.log statements from logger configLogger methods
- Fixed collection hook debug logs to use 'slug' instead of reserved 'collection' field

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-08-31 17:14:35 +02:00
parent 2d84f535f4
commit 592536f61a
18 changed files with 297 additions and 368 deletions

2
.env.local Normal file
View File

@@ -0,0 +1,2 @@
PAYLOAD_AUTOMATION_LOGGING=debug
PAYLOAD_AUTOMATION_CONFIG_LOGGING=true

View File

@@ -1,6 +1,6 @@
{ {
"name": "@xtr-dev/payload-automation", "name": "@xtr-dev/payload-automation",
"version": "0.0.1", "version": "0.0.7",
"description": "PayloadCMS Automation Plugin - Comprehensive workflow automation system with visual workflow building, execution tracking, and step types", "description": "PayloadCMS Automation Plugin - Comprehensive workflow automation system with visual workflow building, execution tracking, and step types",
"license": "MIT", "license": "MIT",
"type": "module", "type": "module",
@@ -84,7 +84,7 @@
"react": "19.1.0", "react": "19.1.0",
"react-dom": "19.1.0", "react-dom": "19.1.0",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"sharp": "0.34.2", "sharp": "0.34.3",
"typescript": "5.7.3", "typescript": "5.7.3",
"vitest": "^3.1.2" "vitest": "^3.1.2"
}, },

258
pnpm-lock.yaml generated
View File

@@ -29,7 +29,7 @@ importers:
version: 3.45.0(@types/pg@8.10.2)(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))(pg@8.16.3) version: 3.45.0(@types/pg@8.10.2)(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))(pg@8.16.3)
'@payloadcms/eslint-config': '@payloadcms/eslint-config':
specifier: 3.9.0 specifier: 3.9.0
version: 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1) version: 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)
'@payloadcms/next': '@payloadcms/next':
specifier: 3.45.0 specifier: 3.45.0
version: 3.45.0(@types/react@19.1.8)(graphql@16.11.0)(monaco-editor@0.52.2)(next@15.4.4(@playwright/test@1.55.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4))(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(typescript@5.7.3) version: 3.45.0(@types/react@19.1.8)(graphql@16.11.0)(monaco-editor@0.52.2)(next@15.4.4(@playwright/test@1.55.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4))(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(typescript@5.7.3)
@@ -88,8 +88,8 @@ importers:
specifier: 3.0.2 specifier: 3.0.2
version: 3.0.2 version: 3.0.2
sharp: sharp:
specifier: 0.34.2 specifier: 0.34.3
version: 0.34.2 version: 0.34.3
typescript: typescript:
specifier: 5.7.3 specifier: 5.7.3
version: 5.7.3 version: 5.7.3
@@ -812,138 +812,69 @@ packages:
resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==}
engines: {node: '>=18.18'} engines: {node: '>=18.18'}
'@img/sharp-darwin-arm64@0.34.2':
resolution: {integrity: sha512-OfXHZPppddivUJnqyKoi5YVeHRkkNE2zUFT2gbpKxp/JZCFYEYubnMg+gOp6lWfasPrTS+KPosKqdI+ELYVDtg==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [darwin]
'@img/sharp-darwin-arm64@0.34.3': '@img/sharp-darwin-arm64@0.34.3':
resolution: {integrity: sha512-ryFMfvxxpQRsgZJqBd4wsttYQbCxsJksrv9Lw/v798JcQ8+w84mBWuXwl+TT0WJ/WrYOLaYpwQXi3sA9nTIaIg==} resolution: {integrity: sha512-ryFMfvxxpQRsgZJqBd4wsttYQbCxsJksrv9Lw/v798JcQ8+w84mBWuXwl+TT0WJ/WrYOLaYpwQXi3sA9nTIaIg==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64] cpu: [arm64]
os: [darwin] os: [darwin]
'@img/sharp-darwin-x64@0.34.2':
resolution: {integrity: sha512-dYvWqmjU9VxqXmjEtjmvHnGqF8GrVjM2Epj9rJ6BUIXvk8slvNDJbhGFvIoXzkDhrJC2jUxNLz/GUjjvSzfw+g==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [darwin]
'@img/sharp-darwin-x64@0.34.3': '@img/sharp-darwin-x64@0.34.3':
resolution: {integrity: sha512-yHpJYynROAj12TA6qil58hmPmAwxKKC7reUqtGLzsOHfP7/rniNGTL8tjWX6L3CTV4+5P4ypcS7Pp+7OB+8ihA==} resolution: {integrity: sha512-yHpJYynROAj12TA6qil58hmPmAwxKKC7reUqtGLzsOHfP7/rniNGTL8tjWX6L3CTV4+5P4ypcS7Pp+7OB+8ihA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64] cpu: [x64]
os: [darwin] os: [darwin]
'@img/sharp-libvips-darwin-arm64@1.1.0':
resolution: {integrity: sha512-HZ/JUmPwrJSoM4DIQPv/BfNh9yrOA8tlBbqbLz4JZ5uew2+o22Ik+tHQJcih7QJuSa0zo5coHTfD5J8inqj9DA==}
cpu: [arm64]
os: [darwin]
'@img/sharp-libvips-darwin-arm64@1.2.0': '@img/sharp-libvips-darwin-arm64@1.2.0':
resolution: {integrity: sha512-sBZmpwmxqwlqG9ueWFXtockhsxefaV6O84BMOrhtg/YqbTaRdqDE7hxraVE3y6gVM4eExmfzW4a8el9ArLeEiQ==} resolution: {integrity: sha512-sBZmpwmxqwlqG9ueWFXtockhsxefaV6O84BMOrhtg/YqbTaRdqDE7hxraVE3y6gVM4eExmfzW4a8el9ArLeEiQ==}
cpu: [arm64] cpu: [arm64]
os: [darwin] os: [darwin]
'@img/sharp-libvips-darwin-x64@1.1.0':
resolution: {integrity: sha512-Xzc2ToEmHN+hfvsl9wja0RlnXEgpKNmftriQp6XzY/RaSfwD9th+MSh0WQKzUreLKKINb3afirxW7A0fz2YWuQ==}
cpu: [x64]
os: [darwin]
'@img/sharp-libvips-darwin-x64@1.2.0': '@img/sharp-libvips-darwin-x64@1.2.0':
resolution: {integrity: sha512-M64XVuL94OgiNHa5/m2YvEQI5q2cl9d/wk0qFTDVXcYzi43lxuiFTftMR1tOnFQovVXNZJ5TURSDK2pNe9Yzqg==} resolution: {integrity: sha512-M64XVuL94OgiNHa5/m2YvEQI5q2cl9d/wk0qFTDVXcYzi43lxuiFTftMR1tOnFQovVXNZJ5TURSDK2pNe9Yzqg==}
cpu: [x64] cpu: [x64]
os: [darwin] os: [darwin]
'@img/sharp-libvips-linux-arm64@1.1.0':
resolution: {integrity: sha512-IVfGJa7gjChDET1dK9SekxFFdflarnUB8PwW8aGwEoF3oAsSDuNUTYS+SKDOyOJxQyDC1aPFMuRYLoDInyV9Ew==}
cpu: [arm64]
os: [linux]
'@img/sharp-libvips-linux-arm64@1.2.0': '@img/sharp-libvips-linux-arm64@1.2.0':
resolution: {integrity: sha512-RXwd0CgG+uPRX5YYrkzKyalt2OJYRiJQ8ED/fi1tq9WQW2jsQIn0tqrlR5l5dr/rjqq6AHAxURhj2DVjyQWSOA==} resolution: {integrity: sha512-RXwd0CgG+uPRX5YYrkzKyalt2OJYRiJQ8ED/fi1tq9WQW2jsQIn0tqrlR5l5dr/rjqq6AHAxURhj2DVjyQWSOA==}
cpu: [arm64] cpu: [arm64]
os: [linux] os: [linux]
'@img/sharp-libvips-linux-arm@1.1.0':
resolution: {integrity: sha512-s8BAd0lwUIvYCJyRdFqvsj+BJIpDBSxs6ivrOPm/R7piTs5UIwY5OjXrP2bqXC9/moGsyRa37eYWYCOGVXxVrA==}
cpu: [arm]
os: [linux]
'@img/sharp-libvips-linux-arm@1.2.0': '@img/sharp-libvips-linux-arm@1.2.0':
resolution: {integrity: sha512-mWd2uWvDtL/nvIzThLq3fr2nnGfyr/XMXlq8ZJ9WMR6PXijHlC3ksp0IpuhK6bougvQrchUAfzRLnbsen0Cqvw==} resolution: {integrity: sha512-mWd2uWvDtL/nvIzThLq3fr2nnGfyr/XMXlq8ZJ9WMR6PXijHlC3ksp0IpuhK6bougvQrchUAfzRLnbsen0Cqvw==}
cpu: [arm] cpu: [arm]
os: [linux] os: [linux]
'@img/sharp-libvips-linux-ppc64@1.1.0':
resolution: {integrity: sha512-tiXxFZFbhnkWE2LA8oQj7KYR+bWBkiV2nilRldT7bqoEZ4HiDOcePr9wVDAZPi/Id5fT1oY9iGnDq20cwUz8lQ==}
cpu: [ppc64]
os: [linux]
'@img/sharp-libvips-linux-ppc64@1.2.0': '@img/sharp-libvips-linux-ppc64@1.2.0':
resolution: {integrity: sha512-Xod/7KaDDHkYu2phxxfeEPXfVXFKx70EAFZ0qyUdOjCcxbjqyJOEUpDe6RIyaunGxT34Anf9ue/wuWOqBW2WcQ==} resolution: {integrity: sha512-Xod/7KaDDHkYu2phxxfeEPXfVXFKx70EAFZ0qyUdOjCcxbjqyJOEUpDe6RIyaunGxT34Anf9ue/wuWOqBW2WcQ==}
cpu: [ppc64] cpu: [ppc64]
os: [linux] os: [linux]
'@img/sharp-libvips-linux-s390x@1.1.0':
resolution: {integrity: sha512-xukSwvhguw7COyzvmjydRb3x/09+21HykyapcZchiCUkTThEQEOMtBj9UhkaBRLuBrgLFzQ2wbxdeCCJW/jgJA==}
cpu: [s390x]
os: [linux]
'@img/sharp-libvips-linux-s390x@1.2.0': '@img/sharp-libvips-linux-s390x@1.2.0':
resolution: {integrity: sha512-eMKfzDxLGT8mnmPJTNMcjfO33fLiTDsrMlUVcp6b96ETbnJmd4uvZxVJSKPQfS+odwfVaGifhsB07J1LynFehw==} resolution: {integrity: sha512-eMKfzDxLGT8mnmPJTNMcjfO33fLiTDsrMlUVcp6b96ETbnJmd4uvZxVJSKPQfS+odwfVaGifhsB07J1LynFehw==}
cpu: [s390x] cpu: [s390x]
os: [linux] os: [linux]
'@img/sharp-libvips-linux-x64@1.1.0':
resolution: {integrity: sha512-yRj2+reB8iMg9W5sULM3S74jVS7zqSzHG3Ol/twnAAkAhnGQnpjj6e4ayUz7V+FpKypwgs82xbRdYtchTTUB+Q==}
cpu: [x64]
os: [linux]
'@img/sharp-libvips-linux-x64@1.2.0': '@img/sharp-libvips-linux-x64@1.2.0':
resolution: {integrity: sha512-ZW3FPWIc7K1sH9E3nxIGB3y3dZkpJlMnkk7z5tu1nSkBoCgw2nSRTFHI5pB/3CQaJM0pdzMF3paf9ckKMSE9Tg==} resolution: {integrity: sha512-ZW3FPWIc7K1sH9E3nxIGB3y3dZkpJlMnkk7z5tu1nSkBoCgw2nSRTFHI5pB/3CQaJM0pdzMF3paf9ckKMSE9Tg==}
cpu: [x64] cpu: [x64]
os: [linux] os: [linux]
'@img/sharp-libvips-linuxmusl-arm64@1.1.0':
resolution: {integrity: sha512-jYZdG+whg0MDK+q2COKbYidaqW/WTz0cc1E+tMAusiDygrM4ypmSCjOJPmFTvHHJ8j/6cAGyeDWZOsK06tP33w==}
cpu: [arm64]
os: [linux]
'@img/sharp-libvips-linuxmusl-arm64@1.2.0': '@img/sharp-libvips-linuxmusl-arm64@1.2.0':
resolution: {integrity: sha512-UG+LqQJbf5VJ8NWJ5Z3tdIe/HXjuIdo4JeVNADXBFuG7z9zjoegpzzGIyV5zQKi4zaJjnAd2+g2nna8TZvuW9Q==} resolution: {integrity: sha512-UG+LqQJbf5VJ8NWJ5Z3tdIe/HXjuIdo4JeVNADXBFuG7z9zjoegpzzGIyV5zQKi4zaJjnAd2+g2nna8TZvuW9Q==}
cpu: [arm64] cpu: [arm64]
os: [linux] os: [linux]
'@img/sharp-libvips-linuxmusl-x64@1.1.0':
resolution: {integrity: sha512-wK7SBdwrAiycjXdkPnGCPLjYb9lD4l6Ze2gSdAGVZrEL05AOUJESWU2lhlC+Ffn5/G+VKuSm6zzbQSzFX/P65A==}
cpu: [x64]
os: [linux]
'@img/sharp-libvips-linuxmusl-x64@1.2.0': '@img/sharp-libvips-linuxmusl-x64@1.2.0':
resolution: {integrity: sha512-SRYOLR7CXPgNze8akZwjoGBoN1ThNZoqpOgfnOxmWsklTGVfJiGJoC/Lod7aNMGA1jSsKWM1+HRX43OP6p9+6Q==} resolution: {integrity: sha512-SRYOLR7CXPgNze8akZwjoGBoN1ThNZoqpOgfnOxmWsklTGVfJiGJoC/Lod7aNMGA1jSsKWM1+HRX43OP6p9+6Q==}
cpu: [x64] cpu: [x64]
os: [linux] os: [linux]
'@img/sharp-linux-arm64@0.34.2':
resolution: {integrity: sha512-D8n8wgWmPDakc83LORcfJepdOSN6MvWNzzz2ux0MnIbOqdieRZwVYY32zxVx+IFUT8er5KPcyU3XXsn+GzG/0Q==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [linux]
'@img/sharp-linux-arm64@0.34.3': '@img/sharp-linux-arm64@0.34.3':
resolution: {integrity: sha512-QdrKe3EvQrqwkDrtuTIjI0bu6YEJHTgEeqdzI3uWJOH6G1O8Nl1iEeVYRGdj1h5I21CqxSvQp1Yv7xeU3ZewbA==} resolution: {integrity: sha512-QdrKe3EvQrqwkDrtuTIjI0bu6YEJHTgEeqdzI3uWJOH6G1O8Nl1iEeVYRGdj1h5I21CqxSvQp1Yv7xeU3ZewbA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64] cpu: [arm64]
os: [linux] os: [linux]
'@img/sharp-linux-arm@0.34.2':
resolution: {integrity: sha512-0DZzkvuEOqQUP9mo2kjjKNok5AmnOr1jB2XYjkaoNRwpAYMDzRmAqUIa1nRi58S2WswqSfPOWLNOr0FDT3H5RQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm]
os: [linux]
'@img/sharp-linux-arm@0.34.3': '@img/sharp-linux-arm@0.34.3':
resolution: {integrity: sha512-oBK9l+h6KBN0i3dC8rYntLiVfW8D8wH+NPNT3O/WBHeW0OQWCjfWksLUaPidsrDKpJgXp3G3/hkmhptAW0I3+A==} resolution: {integrity: sha512-oBK9l+h6KBN0i3dC8rYntLiVfW8D8wH+NPNT3O/WBHeW0OQWCjfWksLUaPidsrDKpJgXp3G3/hkmhptAW0I3+A==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
@@ -956,94 +887,47 @@ packages:
cpu: [ppc64] cpu: [ppc64]
os: [linux] os: [linux]
'@img/sharp-linux-s390x@0.34.2':
resolution: {integrity: sha512-EGZ1xwhBI7dNISwxjChqBGELCWMGDvmxZXKjQRuqMrakhO8QoMgqCrdjnAqJq/CScxfRn+Bb7suXBElKQpPDiw==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [s390x]
os: [linux]
'@img/sharp-linux-s390x@0.34.3': '@img/sharp-linux-s390x@0.34.3':
resolution: {integrity: sha512-3gahT+A6c4cdc2edhsLHmIOXMb17ltffJlxR0aC2VPZfwKoTGZec6u5GrFgdR7ciJSsHT27BD3TIuGcuRT0KmQ==} resolution: {integrity: sha512-3gahT+A6c4cdc2edhsLHmIOXMb17ltffJlxR0aC2VPZfwKoTGZec6u5GrFgdR7ciJSsHT27BD3TIuGcuRT0KmQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [s390x] cpu: [s390x]
os: [linux] os: [linux]
'@img/sharp-linux-x64@0.34.2':
resolution: {integrity: sha512-sD7J+h5nFLMMmOXYH4DD9UtSNBD05tWSSdWAcEyzqW8Cn5UxXvsHAxmxSesYUsTOBmUnjtxghKDl15EvfqLFbQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [linux]
'@img/sharp-linux-x64@0.34.3': '@img/sharp-linux-x64@0.34.3':
resolution: {integrity: sha512-8kYso8d806ypnSq3/Ly0QEw90V5ZoHh10yH0HnrzOCr6DKAPI6QVHvwleqMkVQ0m+fc7EH8ah0BB0QPuWY6zJQ==} resolution: {integrity: sha512-8kYso8d806ypnSq3/Ly0QEw90V5ZoHh10yH0HnrzOCr6DKAPI6QVHvwleqMkVQ0m+fc7EH8ah0BB0QPuWY6zJQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64] cpu: [x64]
os: [linux] os: [linux]
'@img/sharp-linuxmusl-arm64@0.34.2':
resolution: {integrity: sha512-NEE2vQ6wcxYav1/A22OOxoSOGiKnNmDzCYFOZ949xFmrWZOVII1Bp3NqVVpvj+3UeHMFyN5eP/V5hzViQ5CZNA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [linux]
'@img/sharp-linuxmusl-arm64@0.34.3': '@img/sharp-linuxmusl-arm64@0.34.3':
resolution: {integrity: sha512-vAjbHDlr4izEiXM1OTggpCcPg9tn4YriK5vAjowJsHwdBIdx0fYRsURkxLG2RLm9gyBq66gwtWI8Gx0/ov+JKQ==} resolution: {integrity: sha512-vAjbHDlr4izEiXM1OTggpCcPg9tn4YriK5vAjowJsHwdBIdx0fYRsURkxLG2RLm9gyBq66gwtWI8Gx0/ov+JKQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64] cpu: [arm64]
os: [linux] os: [linux]
'@img/sharp-linuxmusl-x64@0.34.2':
resolution: {integrity: sha512-DOYMrDm5E6/8bm/yQLCWyuDJwUnlevR8xtF8bs+gjZ7cyUNYXiSf/E8Kp0Ss5xasIaXSHzb888V1BE4i1hFhAA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [linux]
'@img/sharp-linuxmusl-x64@0.34.3': '@img/sharp-linuxmusl-x64@0.34.3':
resolution: {integrity: sha512-gCWUn9547K5bwvOn9l5XGAEjVTTRji4aPTqLzGXHvIr6bIDZKNTA34seMPgM0WmSf+RYBH411VavCejp3PkOeQ==} resolution: {integrity: sha512-gCWUn9547K5bwvOn9l5XGAEjVTTRji4aPTqLzGXHvIr6bIDZKNTA34seMPgM0WmSf+RYBH411VavCejp3PkOeQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64] cpu: [x64]
os: [linux] os: [linux]
'@img/sharp-wasm32@0.34.2':
resolution: {integrity: sha512-/VI4mdlJ9zkaq53MbIG6rZY+QRN3MLbR6usYlgITEzi4Rpx5S6LFKsycOQjkOGmqTNmkIdLjEvooFKwww6OpdQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [wasm32]
'@img/sharp-wasm32@0.34.3': '@img/sharp-wasm32@0.34.3':
resolution: {integrity: sha512-+CyRcpagHMGteySaWos8IbnXcHgfDn7pO2fiC2slJxvNq9gDipYBN42/RagzctVRKgxATmfqOSulgZv5e1RdMg==} resolution: {integrity: sha512-+CyRcpagHMGteySaWos8IbnXcHgfDn7pO2fiC2slJxvNq9gDipYBN42/RagzctVRKgxATmfqOSulgZv5e1RdMg==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [wasm32] cpu: [wasm32]
'@img/sharp-win32-arm64@0.34.2':
resolution: {integrity: sha512-cfP/r9FdS63VA5k0xiqaNaEoGxBg9k7uE+RQGzuK9fHt7jib4zAVVseR9LsE4gJcNWgT6APKMNnCcnyOtmSEUQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [win32]
'@img/sharp-win32-arm64@0.34.3': '@img/sharp-win32-arm64@0.34.3':
resolution: {integrity: sha512-MjnHPnbqMXNC2UgeLJtX4XqoVHHlZNd+nPt1kRPmj63wURegwBhZlApELdtxM2OIZDRv/DFtLcNhVbd1z8GYXQ==} resolution: {integrity: sha512-MjnHPnbqMXNC2UgeLJtX4XqoVHHlZNd+nPt1kRPmj63wURegwBhZlApELdtxM2OIZDRv/DFtLcNhVbd1z8GYXQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64] cpu: [arm64]
os: [win32] os: [win32]
'@img/sharp-win32-ia32@0.34.2':
resolution: {integrity: sha512-QLjGGvAbj0X/FXl8n1WbtQ6iVBpWU7JO94u/P2M4a8CFYsvQi4GW2mRy/JqkRx0qpBzaOdKJKw8uc930EX2AHw==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [ia32]
os: [win32]
'@img/sharp-win32-ia32@0.34.3': '@img/sharp-win32-ia32@0.34.3':
resolution: {integrity: sha512-xuCdhH44WxuXgOM714hn4amodJMZl3OEvf0GVTm0BEyMeA2to+8HEdRPShH0SLYptJY1uBw+SCFP9WVQi1Q/cw==} resolution: {integrity: sha512-xuCdhH44WxuXgOM714hn4amodJMZl3OEvf0GVTm0BEyMeA2to+8HEdRPShH0SLYptJY1uBw+SCFP9WVQi1Q/cw==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [ia32] cpu: [ia32]
os: [win32] os: [win32]
'@img/sharp-win32-x64@0.34.2':
resolution: {integrity: sha512-aUdT6zEYtDKCaxkofmmJDJYGCf0+pJg3eU9/oBuqvEeoB9dKI6ZLc/1iLJCTuJQDO4ptntAlkUmHgGjyuobZbw==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [win32]
'@img/sharp-win32-x64@0.34.3': '@img/sharp-win32-x64@0.34.3':
resolution: {integrity: sha512-OWwz05d++TxzLEv4VnsTz5CmZ6mI6S05sfQGEMrNrQcOEERbX46332IvE7pO/EUiw7jUrrS40z/M7kPyjfl04g==} resolution: {integrity: sha512-OWwz05d++TxzLEv4VnsTz5CmZ6mI6S05sfQGEMrNrQcOEERbX46332IvE7pO/EUiw7jUrrS40z/M7kPyjfl04g==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
@@ -4190,10 +4074,6 @@ packages:
resolution: {integrity: sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==} resolution: {integrity: sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==}
engines: {node: '>= 0.4'} engines: {node: '>= 0.4'}
sharp@0.34.2:
resolution: {integrity: sha512-lszvBmB9QURERtyKT2bNmsgxXK0ShJrL/fvqlonCo7e6xBF8nT8xU6pW+PMIbLsz0RxQk3rgH9kd8UmvOzlMJg==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
sharp@0.34.3: sharp@0.34.3:
resolution: {integrity: sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg==} resolution: {integrity: sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
@@ -5412,95 +5292,48 @@ snapshots:
'@humanwhocodes/retry@0.4.3': {} '@humanwhocodes/retry@0.4.3': {}
'@img/sharp-darwin-arm64@0.34.2':
optionalDependencies:
'@img/sharp-libvips-darwin-arm64': 1.1.0
optional: true
'@img/sharp-darwin-arm64@0.34.3': '@img/sharp-darwin-arm64@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-darwin-arm64': 1.2.0 '@img/sharp-libvips-darwin-arm64': 1.2.0
optional: true optional: true
'@img/sharp-darwin-x64@0.34.2':
optionalDependencies:
'@img/sharp-libvips-darwin-x64': 1.1.0
optional: true
'@img/sharp-darwin-x64@0.34.3': '@img/sharp-darwin-x64@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-darwin-x64': 1.2.0 '@img/sharp-libvips-darwin-x64': 1.2.0
optional: true optional: true
'@img/sharp-libvips-darwin-arm64@1.1.0':
optional: true
'@img/sharp-libvips-darwin-arm64@1.2.0': '@img/sharp-libvips-darwin-arm64@1.2.0':
optional: true optional: true
'@img/sharp-libvips-darwin-x64@1.1.0':
optional: true
'@img/sharp-libvips-darwin-x64@1.2.0': '@img/sharp-libvips-darwin-x64@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linux-arm64@1.1.0':
optional: true
'@img/sharp-libvips-linux-arm64@1.2.0': '@img/sharp-libvips-linux-arm64@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linux-arm@1.1.0':
optional: true
'@img/sharp-libvips-linux-arm@1.2.0': '@img/sharp-libvips-linux-arm@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linux-ppc64@1.1.0':
optional: true
'@img/sharp-libvips-linux-ppc64@1.2.0': '@img/sharp-libvips-linux-ppc64@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linux-s390x@1.1.0':
optional: true
'@img/sharp-libvips-linux-s390x@1.2.0': '@img/sharp-libvips-linux-s390x@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linux-x64@1.1.0':
optional: true
'@img/sharp-libvips-linux-x64@1.2.0': '@img/sharp-libvips-linux-x64@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linuxmusl-arm64@1.1.0':
optional: true
'@img/sharp-libvips-linuxmusl-arm64@1.2.0': '@img/sharp-libvips-linuxmusl-arm64@1.2.0':
optional: true optional: true
'@img/sharp-libvips-linuxmusl-x64@1.1.0':
optional: true
'@img/sharp-libvips-linuxmusl-x64@1.2.0': '@img/sharp-libvips-linuxmusl-x64@1.2.0':
optional: true optional: true
'@img/sharp-linux-arm64@0.34.2':
optionalDependencies:
'@img/sharp-libvips-linux-arm64': 1.1.0
optional: true
'@img/sharp-linux-arm64@0.34.3': '@img/sharp-linux-arm64@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-linux-arm64': 1.2.0 '@img/sharp-libvips-linux-arm64': 1.2.0
optional: true optional: true
'@img/sharp-linux-arm@0.34.2':
optionalDependencies:
'@img/sharp-libvips-linux-arm': 1.1.0
optional: true
'@img/sharp-linux-arm@0.34.3': '@img/sharp-linux-arm@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-linux-arm': 1.2.0 '@img/sharp-libvips-linux-arm': 1.2.0
@@ -5511,71 +5344,37 @@ snapshots:
'@img/sharp-libvips-linux-ppc64': 1.2.0 '@img/sharp-libvips-linux-ppc64': 1.2.0
optional: true optional: true
'@img/sharp-linux-s390x@0.34.2':
optionalDependencies:
'@img/sharp-libvips-linux-s390x': 1.1.0
optional: true
'@img/sharp-linux-s390x@0.34.3': '@img/sharp-linux-s390x@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-linux-s390x': 1.2.0 '@img/sharp-libvips-linux-s390x': 1.2.0
optional: true optional: true
'@img/sharp-linux-x64@0.34.2':
optionalDependencies:
'@img/sharp-libvips-linux-x64': 1.1.0
optional: true
'@img/sharp-linux-x64@0.34.3': '@img/sharp-linux-x64@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-linux-x64': 1.2.0 '@img/sharp-libvips-linux-x64': 1.2.0
optional: true optional: true
'@img/sharp-linuxmusl-arm64@0.34.2':
optionalDependencies:
'@img/sharp-libvips-linuxmusl-arm64': 1.1.0
optional: true
'@img/sharp-linuxmusl-arm64@0.34.3': '@img/sharp-linuxmusl-arm64@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-linuxmusl-arm64': 1.2.0 '@img/sharp-libvips-linuxmusl-arm64': 1.2.0
optional: true optional: true
'@img/sharp-linuxmusl-x64@0.34.2':
optionalDependencies:
'@img/sharp-libvips-linuxmusl-x64': 1.1.0
optional: true
'@img/sharp-linuxmusl-x64@0.34.3': '@img/sharp-linuxmusl-x64@0.34.3':
optionalDependencies: optionalDependencies:
'@img/sharp-libvips-linuxmusl-x64': 1.2.0 '@img/sharp-libvips-linuxmusl-x64': 1.2.0
optional: true optional: true
'@img/sharp-wasm32@0.34.2':
dependencies:
'@emnapi/runtime': 1.4.5
optional: true
'@img/sharp-wasm32@0.34.3': '@img/sharp-wasm32@0.34.3':
dependencies: dependencies:
'@emnapi/runtime': 1.4.5 '@emnapi/runtime': 1.4.5
optional: true optional: true
'@img/sharp-win32-arm64@0.34.2':
optional: true
'@img/sharp-win32-arm64@0.34.3': '@img/sharp-win32-arm64@0.34.3':
optional: true optional: true
'@img/sharp-win32-ia32@0.34.2':
optional: true
'@img/sharp-win32-ia32@0.34.3': '@img/sharp-win32-ia32@0.34.3':
optional: true optional: true
'@img/sharp-win32-x64@0.34.2':
optional: true
'@img/sharp-win32-x64@0.34.3': '@img/sharp-win32-x64@0.34.3':
optional: true optional: true
@@ -6081,18 +5880,18 @@ snapshots:
- sql.js - sql.js
- sqlite3 - sqlite3
'@payloadcms/eslint-config@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)': '@payloadcms/eslint-config@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)':
dependencies: dependencies:
'@eslint-react/eslint-plugin': 1.16.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@eslint-react/eslint-plugin': 1.16.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
'@eslint/js': 9.14.0 '@eslint/js': 9.14.0
'@payloadcms/eslint-plugin': 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1) '@payloadcms/eslint-plugin': 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)
'@types/eslint': 9.6.1 '@types/eslint': 9.6.1
'@types/eslint__js': 8.42.3 '@types/eslint__js': 8.42.3
'@typescript-eslint/parser': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@typescript-eslint/parser': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
eslint: 9.14.0(jiti@2.5.1) eslint: 9.14.0(jiti@2.5.1)
eslint-config-prettier: 9.1.0(eslint@9.14.0(jiti@2.5.1)) eslint-config-prettier: 9.1.0(eslint@9.14.0(jiti@2.5.1))
eslint-plugin-import-x: 4.4.2(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-import-x: 4.4.2(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
eslint-plugin-jest-dom: 5.4.0(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-jest-dom: 5.4.0(eslint@9.14.0(jiti@2.5.1))
eslint-plugin-jsx-a11y: 6.10.2(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.14.0(jiti@2.5.1))
eslint-plugin-perfectionist: 3.9.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-perfectionist: 3.9.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
@@ -6112,7 +5911,7 @@ snapshots:
- svelte-eslint-parser - svelte-eslint-parser
- vue-eslint-parser - vue-eslint-parser
'@payloadcms/eslint-plugin@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)': '@payloadcms/eslint-plugin@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)':
dependencies: dependencies:
'@eslint-react/eslint-plugin': 1.16.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@eslint-react/eslint-plugin': 1.16.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
'@eslint/js': 9.14.0 '@eslint/js': 9.14.0
@@ -6122,7 +5921,7 @@ snapshots:
eslint: 9.14.0(jiti@2.5.1) eslint: 9.14.0(jiti@2.5.1)
eslint-config-prettier: 9.1.0(eslint@9.14.0(jiti@2.5.1)) eslint-config-prettier: 9.1.0(eslint@9.14.0(jiti@2.5.1))
eslint-plugin-import-x: 4.4.2(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-import-x: 4.4.2(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
eslint-plugin-jest-dom: 5.4.0(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-jest-dom: 5.4.0(eslint@9.14.0(jiti@2.5.1))
eslint-plugin-jsx-a11y: 6.10.2(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.14.0(jiti@2.5.1))
eslint-plugin-perfectionist: 3.9.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-perfectionist: 3.9.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
@@ -6504,7 +6303,7 @@ snapshots:
dependencies: dependencies:
'@types/node': 22.17.2 '@types/node': 22.17.2
'@typescript-eslint/eslint-plugin@8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)': '@typescript-eslint/eslint-plugin@8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)':
dependencies: dependencies:
'@eslint-community/regexpp': 4.12.1 '@eslint-community/regexpp': 4.12.1
'@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) '@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)
@@ -6522,7 +6321,7 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
'@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)': '@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)':
dependencies: dependencies:
'@eslint-community/regexpp': 4.12.1 '@eslint-community/regexpp': 4.12.1
'@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) '@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)
@@ -7539,12 +7338,12 @@ snapshots:
eslint: 9.14.0(jiti@2.5.1) eslint: 9.14.0(jiti@2.5.1)
requireindex: 1.2.0 requireindex: 1.2.0
eslint-plugin-jest@28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2): eslint-plugin-jest@28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2):
dependencies: dependencies:
'@typescript-eslint/utils': 8.40.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@typescript-eslint/utils': 8.40.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
eslint: 9.14.0(jiti@2.5.1) eslint: 9.14.0(jiti@2.5.1)
optionalDependencies: optionalDependencies:
'@typescript-eslint/eslint-plugin': 8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) '@typescript-eslint/eslint-plugin': 8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
- typescript - typescript
@@ -9481,34 +9280,6 @@ snapshots:
es-errors: 1.3.0 es-errors: 1.3.0
es-object-atoms: 1.1.1 es-object-atoms: 1.1.1
sharp@0.34.2:
dependencies:
color: 4.2.3
detect-libc: 2.0.4
semver: 7.7.2
optionalDependencies:
'@img/sharp-darwin-arm64': 0.34.2
'@img/sharp-darwin-x64': 0.34.2
'@img/sharp-libvips-darwin-arm64': 1.1.0
'@img/sharp-libvips-darwin-x64': 1.1.0
'@img/sharp-libvips-linux-arm': 1.1.0
'@img/sharp-libvips-linux-arm64': 1.1.0
'@img/sharp-libvips-linux-ppc64': 1.1.0
'@img/sharp-libvips-linux-s390x': 1.1.0
'@img/sharp-libvips-linux-x64': 1.1.0
'@img/sharp-libvips-linuxmusl-arm64': 1.1.0
'@img/sharp-libvips-linuxmusl-x64': 1.1.0
'@img/sharp-linux-arm': 0.34.2
'@img/sharp-linux-arm64': 0.34.2
'@img/sharp-linux-s390x': 0.34.2
'@img/sharp-linux-x64': 0.34.2
'@img/sharp-linuxmusl-arm64': 0.34.2
'@img/sharp-linuxmusl-x64': 0.34.2
'@img/sharp-wasm32': 0.34.2
'@img/sharp-win32-arm64': 0.34.2
'@img/sharp-win32-ia32': 0.34.2
'@img/sharp-win32-x64': 0.34.2
sharp@0.34.3: sharp@0.34.3:
dependencies: dependencies:
color: 4.2.3 color: 4.2.3
@@ -9537,7 +9308,6 @@ snapshots:
'@img/sharp-win32-arm64': 0.34.3 '@img/sharp-win32-arm64': 0.34.3
'@img/sharp-win32-ia32': 0.34.3 '@img/sharp-win32-ia32': 0.34.3
'@img/sharp-win32-x64': 0.34.3 '@img/sharp-win32-x64': 0.34.3
optional: true
shebang-command@2.0.0: shebang-command@2.0.0:
dependencies: dependencies:
@@ -9888,8 +9658,8 @@ snapshots:
typescript-eslint@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2): typescript-eslint@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2):
dependencies: dependencies:
'@typescript-eslint/eslint-plugin': 8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@typescript-eslint/eslint-plugin': 8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
'@typescript-eslint/parser': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)
'@typescript-eslint/utils': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@typescript-eslint/utils': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)
optionalDependencies: optionalDependencies:
typescript: 5.7.2 typescript: 5.7.2

View File

@@ -52,7 +52,7 @@ export const createWorkflowCollection: <T extends string>(options: WorkflowsPlug
] ]
}, },
{ {
name: 'collection', name: 'collectionSlug',
type: 'select', type: 'select',
admin: { admin: {
condition: (_, siblingData) => siblingData?.type === 'collection-trigger', condition: (_, siblingData) => siblingData?.type === 'collection-trigger',

View File

@@ -87,12 +87,29 @@ export class WorkflowExecutor {
// Check step condition if present // Check step condition if present
if (step.condition) { if (step.condition) {
this.logger.debug({
condition: step.condition,
stepName,
availableSteps: Object.keys(context.steps),
completedSteps: Object.entries(context.steps)
.filter(([_, s]) => s.state === 'succeeded')
.map(([name]) => name),
triggerType: context.trigger?.type
}, 'Evaluating step condition')
const conditionMet = this.evaluateStepCondition(step.condition, context) const conditionMet = this.evaluateStepCondition(step.condition, context)
if (!conditionMet) { if (!conditionMet) {
this.logger.info({ this.logger.info({
condition: step.condition, condition: step.condition,
stepName stepName,
contextSnapshot: JSON.stringify({
stepOutputs: Object.entries(context.steps).reduce((acc, [name, step]) => {
acc[name] = { state: step.state, hasOutput: !!step.output }
return acc
}, {} as Record<string, any>),
triggerData: context.trigger?.data ? 'present' : 'absent'
})
}, 'Step condition not met, skipping') }, 'Step condition not met, skipping')
// Mark step as completed but skipped // Mark step as completed but skipped
@@ -113,7 +130,14 @@ export class WorkflowExecutor {
this.logger.info({ this.logger.info({
condition: step.condition, condition: step.condition,
stepName stepName,
contextSnapshot: JSON.stringify({
stepOutputs: Object.entries(context.steps).reduce((acc, [name, step]) => {
acc[name] = { state: step.state, hasOutput: !!step.output }
return acc
}, {} as Record<string, any>),
triggerData: context.trigger?.data ? 'present' : 'absent'
})
}, 'Step condition met, proceeding with execution') }, 'Step condition met, proceeding with execution')
} }
@@ -311,26 +335,54 @@ export class WorkflowExecutor {
private resolveStepInput(config: Record<string, unknown>, context: ExecutionContext): Record<string, unknown> { private resolveStepInput(config: Record<string, unknown>, context: ExecutionContext): Record<string, unknown> {
const resolved: Record<string, unknown> = {} const resolved: Record<string, unknown> = {}
this.logger.debug({
configKeys: Object.keys(config),
contextSteps: Object.keys(context.steps),
triggerType: context.trigger?.type
}, 'Starting step input resolution')
for (const [key, value] of Object.entries(config)) { for (const [key, value] of Object.entries(config)) {
if (typeof value === 'string' && value.startsWith('$')) { if (typeof value === 'string' && value.startsWith('$')) {
// This is a JSONPath expression // This is a JSONPath expression
this.logger.debug({
key,
jsonPath: value,
availableSteps: Object.keys(context.steps),
hasTriggerData: !!context.trigger?.data,
hasTriggerDoc: !!context.trigger?.doc
}, 'Resolving JSONPath expression')
try { try {
const result = JSONPath({ const result = JSONPath({
json: context, json: context,
path: value, path: value,
wrap: false wrap: false
}) })
this.logger.debug({
key,
jsonPath: value,
result: JSON.stringify(result).substring(0, 200),
resultType: Array.isArray(result) ? 'array' : typeof result
}, 'JSONPath resolved successfully')
resolved[key] = result resolved[key] = result
} catch (error) { } catch (error) {
this.logger.warn({ this.logger.warn({
error: error instanceof Error ? error.message : 'Unknown error', error: error instanceof Error ? error.message : 'Unknown error',
key, key,
path: value path: value,
contextSnapshot: JSON.stringify(context).substring(0, 500)
}, 'Failed to resolve JSONPath') }, 'Failed to resolve JSONPath')
resolved[key] = value // Keep original value if resolution fails resolved[key] = value // Keep original value if resolution fails
} }
} else if (typeof value === 'object' && value !== null) { } else if (typeof value === 'object' && value !== null) {
// Recursively resolve nested objects // Recursively resolve nested objects
this.logger.debug({
key,
nestedKeys: Object.keys(value as Record<string, unknown>)
}, 'Recursively resolving nested object')
resolved[key] = this.resolveStepInput(value as Record<string, unknown>, context) resolved[key] = this.resolveStepInput(value as Record<string, unknown>, context)
} else { } else {
// Keep literal values as-is // Keep literal values as-is
@@ -338,6 +390,11 @@ export class WorkflowExecutor {
} }
} }
this.logger.debug({
resolvedKeys: Object.keys(resolved),
originalKeys: Object.keys(config)
}, 'Step input resolution completed')
return resolved return resolved
} }
@@ -377,6 +434,14 @@ export class WorkflowExecutor {
* Evaluate a condition using JSONPath * Evaluate a condition using JSONPath
*/ */
public evaluateCondition(condition: string, context: ExecutionContext): boolean { public evaluateCondition(condition: string, context: ExecutionContext): boolean {
this.logger.debug({
condition,
contextKeys: Object.keys(context),
triggerType: context.trigger?.type,
triggerData: context.trigger?.data,
triggerDoc: context.trigger?.doc ? 'present' : 'absent'
}, 'Starting condition evaluation')
try { try {
const result = JSONPath({ const result = JSONPath({
json: context, json: context,
@@ -384,16 +449,33 @@ export class WorkflowExecutor {
wrap: false wrap: false
}) })
this.logger.debug({
condition,
result,
resultType: Array.isArray(result) ? 'array' : typeof result,
resultLength: Array.isArray(result) ? result.length : undefined
}, 'JSONPath evaluation result')
// Handle different result types // Handle different result types
let finalResult: boolean
if (Array.isArray(result)) { if (Array.isArray(result)) {
return result.length > 0 && Boolean(result[0]) finalResult = result.length > 0 && Boolean(result[0])
} else {
finalResult = Boolean(result)
} }
return Boolean(result) this.logger.debug({
condition,
finalResult,
originalResult: result
}, 'Condition evaluation completed')
return finalResult
} catch (error) { } catch (error) {
this.logger.warn({ this.logger.warn({
condition, condition,
error: error instanceof Error ? error.message : 'Unknown error' error: error instanceof Error ? error.message : 'Unknown error',
errorStack: error instanceof Error ? error.stack : undefined
}, 'Failed to evaluate condition') }, 'Failed to evaluate condition')
// If condition evaluation fails, assume false // If condition evaluation fails, assume false
@@ -564,6 +646,17 @@ export class WorkflowExecutor {
// Check trigger condition if present // Check trigger condition if present
if (trigger.condition) { if (trigger.condition) {
this.logger.debug({
collection,
operation,
condition: trigger.condition,
docId: (doc as any)?.id,
docFields: doc ? Object.keys(doc) : [],
previousDocId: (previousDoc as any)?.id,
workflowId: workflow.id,
workflowName: workflow.name
}, 'Evaluating collection trigger condition')
const conditionMet = this.evaluateCondition(trigger.condition, context) const conditionMet = this.evaluateCondition(trigger.condition, context)
if (!conditionMet) { if (!conditionMet) {
@@ -572,7 +665,8 @@ export class WorkflowExecutor {
condition: trigger.condition, condition: trigger.condition,
operation, operation,
workflowId: workflow.id, workflowId: workflow.id,
workflowName: workflow.name workflowName: workflow.name,
docSnapshot: JSON.stringify(doc).substring(0, 200)
}, 'Trigger condition not met, skipping workflow') }, 'Trigger condition not met, skipping workflow')
continue continue
} }
@@ -582,7 +676,8 @@ export class WorkflowExecutor {
condition: trigger.condition, condition: trigger.condition,
operation, operation,
workflowId: workflow.id, workflowId: workflow.id,
workflowName: workflow.name workflowName: workflow.name,
docSnapshot: JSON.stringify(doc).substring(0, 200)
}, 'Trigger condition met') }, 'Trigger condition met')
} }

View File

@@ -1,4 +1,5 @@
import type {Config, Payload, TaskConfig} from 'payload' import type {Config, Payload, TaskConfig} from 'payload'
import * as cron from 'node-cron' import * as cron from 'node-cron'
import {type Workflow, WorkflowExecutor} from '../core/workflow-executor.js' import {type Workflow, WorkflowExecutor} from '../core/workflow-executor.js'
@@ -10,20 +11,20 @@ import {getConfigLogger} from './logger.js'
*/ */
export function generateCronTasks(config: Config): void { export function generateCronTasks(config: Config): void {
const logger = getConfigLogger() const logger = getConfigLogger()
// Note: We can't query the database at config time, so we'll need a different approach // Note: We can't query the database at config time, so we'll need a different approach
// We'll create a single task that handles all cron-triggered workflows // We'll create a single task that handles all cron-triggered workflows
const cronTask: TaskConfig = { const cronTask: TaskConfig = {
slug: 'workflow-cron-executor', slug: 'workflow-cron-executor',
handler: async ({ input, req }) => { handler: async ({ input, req }) => {
const { cronExpression, timezone, workflowId } = input as { const { cronExpression, timezone, workflowId } = input as {
cronExpression?: string cronExpression?: string
timezone?: string timezone?: string
workflowId: string workflowId: string
} }
const logger = req.payload.logger.child({ plugin: '@xtr-dev/payload-automation' }) const logger = req.payload.logger.child({ plugin: '@xtr-dev/payload-automation' })
try { try {
// Get the workflow // Get the workflow
const workflow = await req.payload.findByID({ const workflow = await req.payload.findByID({
@@ -32,11 +33,11 @@ export function generateCronTasks(config: Config): void {
depth: 2, depth: 2,
req req
}) })
if (!workflow) { if (!workflow) {
throw new Error(`Workflow ${workflowId} not found`) throw new Error(`Workflow ${workflowId} not found`)
} }
// Create execution context for cron trigger // Create execution context for cron trigger
const context = { const context = {
steps: {}, steps: {},
@@ -46,10 +47,10 @@ export function generateCronTasks(config: Config): void {
triggeredAt: new Date().toISOString() triggeredAt: new Date().toISOString()
} }
} }
// Create executor // Create executor
const executor = new WorkflowExecutor(req.payload, logger) const executor = new WorkflowExecutor(req.payload, logger)
// Find the matching cron trigger and check its condition if present // Find the matching cron trigger and check its condition if present
const triggers = workflow.triggers as Array<{ const triggers = workflow.triggers as Array<{
condition?: string condition?: string
@@ -66,7 +67,7 @@ export function generateCronTasks(config: Config): void {
// Check trigger condition if present // Check trigger condition if present
if (matchingTrigger?.condition) { if (matchingTrigger?.condition) {
const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context) const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context)
if (!conditionMet) { if (!conditionMet) {
logger.info({ logger.info({
condition: matchingTrigger.condition, condition: matchingTrigger.condition,
@@ -74,23 +75,23 @@ export function generateCronTasks(config: Config): void {
workflowId, workflowId,
workflowName: workflow.name workflowName: workflow.name
}, 'Cron trigger condition not met, skipping workflow execution') }, 'Cron trigger condition not met, skipping workflow execution')
// Re-queue for next execution but don't run workflow // Re-queue for next execution but don't run workflow
if (cronExpression) { if (cronExpression) {
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger) void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
} }
return { return {
output: { output: {
executedAt: new Date().toISOString(), executedAt: new Date().toISOString(),
status: 'skipped',
reason: 'Condition not met', reason: 'Condition not met',
status: 'skipped',
workflowId workflowId
}, },
state: 'succeeded' state: 'succeeded'
} }
} }
logger.info({ logger.info({
condition: matchingTrigger.condition, condition: matchingTrigger.condition,
cronExpression, cronExpression,
@@ -98,15 +99,15 @@ export function generateCronTasks(config: Config): void {
workflowName: workflow.name workflowName: workflow.name
}, 'Cron trigger condition met') }, 'Cron trigger condition met')
} }
// Execute the workflow // Execute the workflow
await executor.execute(workflow as Workflow, context, req) await executor.execute(workflow as Workflow, context, req)
// Re-queue the job for the next scheduled execution if cronExpression is provided // Re-queue the job for the next scheduled execution if cronExpression is provided
if (cronExpression) { if (cronExpression) {
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger) void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
} }
return { return {
output: { output: {
executedAt: new Date().toISOString(), executedAt: new Date().toISOString(),
@@ -120,7 +121,7 @@ export function generateCronTasks(config: Config): void {
error: error instanceof Error ? error.message : 'Unknown error', error: error instanceof Error ? error.message : 'Unknown error',
workflowId workflowId
}, 'Cron job execution failed') }, 'Cron job execution failed')
// Re-queue even on failure to ensure continuity (unless it's a validation error) // Re-queue even on failure to ensure continuity (unless it's a validation error)
if (cronExpression && !(error instanceof Error && error.message.includes('Invalid cron'))) { if (cronExpression && !(error instanceof Error && error.message.includes('Invalid cron'))) {
void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger) void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger)
@@ -131,7 +132,7 @@ export function generateCronTasks(config: Config): void {
}, 'Failed to re-queue cron job after execution failure') }, 'Failed to re-queue cron job after execution failure')
}) })
} }
return { return {
output: { output: {
error: error instanceof Error ? error.message : 'Unknown error', error: error instanceof Error ? error.message : 'Unknown error',
@@ -142,16 +143,16 @@ export function generateCronTasks(config: Config): void {
} }
} }
} }
// Add the cron task to config if not already present // Add the cron task to config if not already present
if (!config.jobs) { if (!config.jobs) {
config.jobs = { tasks: [] } config.jobs = { tasks: [] }
} }
if (!config.jobs.tasks) { if (!config.jobs.tasks) {
config.jobs.tasks = [] config.jobs.tasks = []
} }
if (!config.jobs.tasks.find(task => task.slug === cronTask.slug)) { if (!config.jobs.tasks.find(task => task.slug === cronTask.slug)) {
logger.debug(`Registering cron executor task: ${cronTask.slug}`) logger.debug(`Registering cron executor task: ${cronTask.slug}`)
config.jobs.tasks.push(cronTask) config.jobs.tasks.push(cronTask)
@@ -177,19 +178,19 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
} }
} }
}) })
logger.info(`Found ${workflows.docs.length} workflows with cron triggers`) logger.info(`Found ${workflows.docs.length} workflows with cron triggers`)
for (const workflow of workflows.docs) { for (const workflow of workflows.docs) {
const triggers = workflow.triggers as Array<{ const triggers = workflow.triggers as Array<{
cronExpression?: string cronExpression?: string
timezone?: string timezone?: string
type: string type: string
}> }>
// Find all cron triggers for this workflow // Find all cron triggers for this workflow
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || [] const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
for (const trigger of cronTriggers) { for (const trigger of cronTriggers) {
if (trigger.cronExpression) { if (trigger.cronExpression) {
try { try {
@@ -202,7 +203,7 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
}, 'Invalid cron expression format') }, 'Invalid cron expression format')
continue continue
} }
// Validate timezone if provided // Validate timezone if provided
if (trigger.timezone) { if (trigger.timezone) {
try { try {
@@ -217,17 +218,17 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger
continue continue
} }
} }
// Calculate next execution time // Calculate next execution time
const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone) const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone)
// Queue the job // Queue the job
await payload.jobs.queue({ await payload.jobs.queue({
input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId: workflow.id }, input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId: workflow.id },
task: 'workflow-cron-executor', task: 'workflow-cron-executor',
waitUntil: nextExecution waitUntil: nextExecution
}) })
logger.info({ logger.info({
cronExpression: trigger.cronExpression, cronExpression: trigger.cronExpression,
nextExecution: nextExecution.toISOString(), nextExecution: nextExecution.toISOString(),
@@ -276,37 +277,37 @@ function getNextCronTime(cronExpression: string, timezone?: string): Date {
const now = new Date() const now = new Date()
const options: { timezone?: string } = timezone ? { timezone } : {} const options: { timezone?: string } = timezone ? { timezone } : {}
// Create a task to find the next execution time // Create a task to find the next execution time
const task = cron.schedule(cronExpression, () => {}, { const task = cron.schedule(cronExpression, () => {}, {
...options ...options
}) })
// Parse cron expression parts // Parse cron expression parts
const cronParts = cronExpression.trim().split(/\s+/) const cronParts = cronExpression.trim().split(/\s+/)
if (cronParts.length !== 5) { if (cronParts.length !== 5) {
void task.destroy() void task.destroy()
throw new Error(`Invalid cron format: ${cronExpression}. Expected 5 parts.`) throw new Error(`Invalid cron format: ${cronExpression}. Expected 5 parts.`)
} }
const [minutePart, hourPart, dayPart, monthPart, weekdayPart] = cronParts const [minutePart, hourPart, dayPart, monthPart, weekdayPart] = cronParts
// Calculate next execution with proper lookahead for any schedule frequency // Calculate next execution with proper lookahead for any schedule frequency
// Start from next minute and look ahead systematically // Start from next minute and look ahead systematically
let testTime = new Date(now.getTime() + 60 * 1000) // Start 1 minute from now let testTime = new Date(now.getTime() + 60 * 1000) // Start 1 minute from now
testTime.setSeconds(0, 0) // Reset seconds and milliseconds testTime.setSeconds(0, 0) // Reset seconds and milliseconds
// Maximum iterations to prevent infinite loops (covers ~2 years) // Maximum iterations to prevent infinite loops (covers ~2 years)
const maxIterations = 2 * 365 * 24 * 60 // 2 years worth of minutes const maxIterations = 2 * 365 * 24 * 60 // 2 years worth of minutes
let iterations = 0 let iterations = 0
while (iterations < maxIterations) { while (iterations < maxIterations) {
const minute = testTime.getMinutes() const minute = testTime.getMinutes()
const hour = testTime.getHours() const hour = testTime.getHours()
const dayOfMonth = testTime.getDate() const dayOfMonth = testTime.getDate()
const month = testTime.getMonth() + 1 const month = testTime.getMonth() + 1
const dayOfWeek = testTime.getDay() const dayOfWeek = testTime.getDay()
if (matchesCronPart(minute, minutePart) && if (matchesCronPart(minute, minutePart) &&
matchesCronPart(hour, hourPart) && matchesCronPart(hour, hourPart) &&
matchesCronPart(dayOfMonth, dayPart) && matchesCronPart(dayOfMonth, dayPart) &&
@@ -315,12 +316,12 @@ function getNextCronTime(cronExpression: string, timezone?: string): Date {
void task.destroy() void task.destroy()
return testTime return testTime
} }
// Increment time intelligently based on cron pattern // Increment time intelligently based on cron pattern
testTime = incrementTimeForCronPattern(testTime, cronParts) testTime = incrementTimeForCronPattern(testTime, cronParts)
iterations++ iterations++
} }
void task.destroy() void task.destroy()
throw new Error(`Could not calculate next execution time for cron expression: ${cronExpression} within reasonable timeframe`) throw new Error(`Could not calculate next execution time for cron expression: ${cronExpression} within reasonable timeframe`)
} }
@@ -331,7 +332,7 @@ function getNextCronTime(cronExpression: string, timezone?: string): Date {
function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Date { function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Date {
const [minutePart, hourPart, _dayPart, _monthPart, _weekdayPart] = cronParts const [minutePart, hourPart, _dayPart, _monthPart, _weekdayPart] = cronParts
const nextTime = new Date(currentTime) const nextTime = new Date(currentTime)
// If minute is specific (not wildcard), we can jump to next hour // If minute is specific (not wildcard), we can jump to next hour
if (minutePart !== '*' && !minutePart.includes('/')) { if (minutePart !== '*' && !minutePart.includes('/')) {
const targetMinute = getNextValidCronValue(currentTime.getMinutes(), minutePart) const targetMinute = getNextValidCronValue(currentTime.getMinutes(), minutePart)
@@ -343,7 +344,7 @@ function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Da
} }
return nextTime return nextTime
} }
// If hour is specific and we're past it, jump to next day // If hour is specific and we're past it, jump to next day
if (hourPart !== '*' && !hourPart.includes('/')) { if (hourPart !== '*' && !hourPart.includes('/')) {
const targetHour = getNextValidCronValue(currentTime.getHours(), hourPart) const targetHour = getNextValidCronValue(currentTime.getHours(), hourPart)
@@ -356,7 +357,7 @@ function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Da
} }
return nextTime return nextTime
} }
// Default: increment by 1 minute // Default: increment by 1 minute
nextTime.setTime(nextTime.getTime() + 60 * 1000) nextTime.setTime(nextTime.getTime() + 60 * 1000)
return nextTime return nextTime
@@ -367,7 +368,7 @@ function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Da
*/ */
function getNextValidCronValue(currentValue: number, cronPart: string): number { function getNextValidCronValue(currentValue: number, cronPart: string): number {
if (cronPart === '*') {return currentValue + 1} if (cronPart === '*') {return currentValue + 1}
// Handle specific values and ranges // Handle specific values and ranges
const values = parseCronPart(cronPart) const values = parseCronPart(cronPart)
return values.find(v => v > currentValue) || values[0] return values.find(v => v > currentValue) || values[0]
@@ -378,9 +379,9 @@ function getNextValidCronValue(currentValue: number, cronPart: string): number {
*/ */
function parseCronPart(cronPart: string): number[] { function parseCronPart(cronPart: string): number[] {
if (cronPart === '*') {return []} if (cronPart === '*') {return []}
const values: number[] = [] const values: number[] = []
// Handle comma-separated values // Handle comma-separated values
if (cronPart.includes(',')) { if (cronPart.includes(',')) {
cronPart.split(',').forEach(part => { cronPart.split(',').forEach(part => {
@@ -388,7 +389,7 @@ function parseCronPart(cronPart: string): number[] {
}) })
return values.sort((a, b) => a - b) return values.sort((a, b) => a - b)
} }
// Handle ranges // Handle ranges
if (cronPart.includes('-')) { if (cronPart.includes('-')) {
const [start, end] = cronPart.split('-').map(n => parseInt(n, 10)) const [start, end] = cronPart.split('-').map(n => parseInt(n, 10))
@@ -397,21 +398,21 @@ function parseCronPart(cronPart: string): number[] {
} }
return values return values
} }
// Handle step values // Handle step values
if (cronPart.includes('/')) { if (cronPart.includes('/')) {
const [range, step] = cronPart.split('/') const [range, step] = cronPart.split('/')
const stepNum = parseInt(step, 10) const stepNum = parseInt(step, 10)
if (range === '*') { if (range === '*') {
// For wildcards with steps, return empty - handled elsewhere // For wildcards with steps, return empty - handled elsewhere
return [] return []
} }
const baseValues = parseCronPart(range) const baseValues = parseCronPart(range)
return baseValues.filter((_, index) => index % stepNum === 0) return baseValues.filter((_, index) => index % stepNum === 0)
} }
// Single value // Single value
values.push(parseInt(cronPart, 10)) values.push(parseInt(cronPart, 10))
return values return values
@@ -422,29 +423,29 @@ function parseCronPart(cronPart: string): number[] {
*/ */
function matchesCronPart(value: number, cronPart: string): boolean { function matchesCronPart(value: number, cronPart: string): boolean {
if (cronPart === '*') {return true} if (cronPart === '*') {return true}
// Handle step values (e.g., */5) // Handle step values (e.g., */5)
if (cronPart.includes('/')) { if (cronPart.includes('/')) {
const [range, step] = cronPart.split('/') const [range, step] = cronPart.split('/')
const stepNum = parseInt(step, 10) const stepNum = parseInt(step, 10)
if (range === '*') { if (range === '*') {
return value % stepNum === 0 return value % stepNum === 0
} }
} }
// Handle ranges (e.g., 1-5) // Handle ranges (e.g., 1-5)
if (cronPart.includes('-')) { if (cronPart.includes('-')) {
const [start, end] = cronPart.split('-').map(n => parseInt(n, 10)) const [start, end] = cronPart.split('-').map(n => parseInt(n, 10))
return value >= start && value <= end return value >= start && value <= end
} }
// Handle comma-separated values (e.g., 1,3,5) // Handle comma-separated values (e.g., 1,3,5)
if (cronPart.includes(',')) { if (cronPart.includes(',')) {
const values = cronPart.split(',').map(n => parseInt(n, 10)) const values = cronPart.split(',').map(n => parseInt(n, 10))
return values.includes(value) return values.includes(value)
} }
// Handle single value // Handle single value
const cronValue = parseInt(cronPart, 10) const cronValue = parseInt(cronPart, 10)
return value === cronValue return value === cronValue
@@ -468,7 +469,7 @@ export async function requeueCronJob(
task: 'workflow-cron-executor', task: 'workflow-cron-executor',
waitUntil: getNextCronTime(cronExpression, timezone) waitUntil: getNextCronTime(cronExpression, timezone)
}) })
logger.debug({ logger.debug({
nextRun: getNextCronTime(cronExpression, timezone), nextRun: getNextCronTime(cronExpression, timezone),
timezone: timezone || 'UTC', timezone: timezone || 'UTC',
@@ -487,41 +488,41 @@ export async function requeueCronJob(
*/ */
export async function updateWorkflowCronJobs( export async function updateWorkflowCronJobs(
workflowId: string, workflowId: string,
payload: Payload, payload: Payload,
logger: Payload['logger'] logger: Payload['logger']
): Promise<void> { ): Promise<void> {
try { try {
// First, cancel any existing cron jobs for this workflow // First, cancel any existing cron jobs for this workflow
cancelWorkflowCronJobs(workflowId, payload, logger) cancelWorkflowCronJobs(workflowId, payload, logger)
// Get the workflow // Get the workflow
const workflow = await payload.findByID({ const workflow = await payload.findByID({
id: workflowId, id: workflowId,
collection: 'workflows', collection: 'workflows',
depth: 0 depth: 0
}) })
if (!workflow) { if (!workflow) {
logger.warn({ workflowId }, 'Workflow not found for cron job update') logger.warn({ workflowId }, 'Workflow not found for cron job update')
return return
} }
const triggers = workflow.triggers as Array<{ const triggers = workflow.triggers as Array<{
cronExpression?: string cronExpression?: string
timezone?: string timezone?: string
type: string type: string
}> }>
// Find all cron triggers for this workflow // Find all cron triggers for this workflow
const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || [] const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || []
if (cronTriggers.length === 0) { if (cronTriggers.length === 0) {
logger.debug({ workflowId }, 'No cron triggers found for workflow') logger.debug({ workflowId }, 'No cron triggers found for workflow')
return return
} }
let scheduledJobs = 0 let scheduledJobs = 0
for (const trigger of cronTriggers) { for (const trigger of cronTriggers) {
if (trigger.cronExpression) { if (trigger.cronExpression) {
try { try {
@@ -534,7 +535,7 @@ export async function updateWorkflowCronJobs(
}, 'Invalid cron expression format') }, 'Invalid cron expression format')
continue continue
} }
// Validate timezone if provided // Validate timezone if provided
if (trigger.timezone) { if (trigger.timezone) {
try { try {
@@ -548,19 +549,19 @@ export async function updateWorkflowCronJobs(
continue continue
} }
} }
// Calculate next execution time // Calculate next execution time
const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone) const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone)
// Queue the job // Queue the job
await payload.jobs.queue({ await payload.jobs.queue({
input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId }, input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId },
task: 'workflow-cron-executor', task: 'workflow-cron-executor',
waitUntil: nextExecution waitUntil: nextExecution
}) })
scheduledJobs++ scheduledJobs++
logger.info({ logger.info({
cronExpression: trigger.cronExpression, cronExpression: trigger.cronExpression,
nextExecution: nextExecution.toISOString(), nextExecution: nextExecution.toISOString(),
@@ -579,7 +580,7 @@ export async function updateWorkflowCronJobs(
} }
} }
} }
if (scheduledJobs > 0) { if (scheduledJobs > 0) {
logger.info({ scheduledJobs, workflowId }, 'Updated cron jobs for workflow') logger.info({ scheduledJobs, workflowId }, 'Updated cron jobs for workflow')
} }

View File

@@ -27,6 +27,9 @@ const applyCollectionsConfig = <T extends string>(pluginOptions: WorkflowsPlugin
) )
} }
// Track if hooks have been initialized to prevent double registration
let hooksInitialized = false
export const workflowsPlugin = export const workflowsPlugin =
<TSlug extends string>(pluginOptions: WorkflowsPluginConfig<TSlug>) => <TSlug extends string>(pluginOptions: WorkflowsPluginConfig<TSlug>) =>
(config: Config): Config => { (config: Config): Config => {
@@ -42,6 +45,7 @@ export const workflowsPlugin =
} }
const configLogger = getConfigLogger() const configLogger = getConfigLogger()
configLogger.info(`Configuring workflow plugin with ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers`)
// Generate cron tasks for workflows with cron triggers // Generate cron tasks for workflows with cron triggers
generateCronTasks(config) generateCronTasks(config)
@@ -61,27 +65,49 @@ export const workflowsPlugin =
// Set up onInit to register collection hooks and initialize features // Set up onInit to register collection hooks and initialize features
const incomingOnInit = config.onInit const incomingOnInit = config.onInit
config.onInit = async (payload) => { config.onInit = async (payload) => {
configLogger.info(`onInit called - hooks already initialized: ${hooksInitialized}, collections: ${Object.keys(payload.collections).length}`)
// Prevent double initialization in dev mode
if (hooksInitialized) {
configLogger.warn('Hooks already initialized, skipping to prevent duplicate registration')
return
}
// Execute any existing onInit functions first // Execute any existing onInit functions first
if (incomingOnInit) { if (incomingOnInit) {
configLogger.debug('Executing existing onInit function')
await incomingOnInit(payload) await incomingOnInit(payload)
} }
// Initialize the logger with the payload instance // Initialize the logger with the payload instance
const logger = initializeLogger(payload) const logger = initializeLogger(payload)
logger.info('Logger initialized with payload instance')
// Log collection trigger configuration
logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${pluginOptions.steps?.length || 0} steps`)
// Create workflow executor instance // Create workflow executor instance
const executor = new WorkflowExecutor(payload, logger) const executor = new WorkflowExecutor(payload, logger)
// Initialize hooks // Initialize hooks
logger.info('Initializing collection hooks...')
initCollectionHooks(pluginOptions, payload, logger, executor) initCollectionHooks(pluginOptions, payload, logger, executor)
logger.info('Initializing global hooks...')
initGlobalHooks(payload, logger, executor) initGlobalHooks(payload, logger, executor)
logger.info('Initializing workflow hooks...')
initWorkflowHooks(payload, logger) initWorkflowHooks(payload, logger)
logger.info('Initializing step tasks...')
initStepTasks(pluginOptions, payload, logger) initStepTasks(pluginOptions, payload, logger)
// Register cron jobs for workflows with cron triggers // Register cron jobs for workflows with cron triggers
logger.info('Registering cron jobs...')
await registerCronJobs(payload, logger) await registerCronJobs(payload, logger)
logger.info('Plugin initialized successfully') logger.info('Plugin initialized successfully - all hooks registered')
hooksInitialized = true
} }
return config return config

View File

@@ -5,10 +5,21 @@ import type { WorkflowExecutor } from "../core/workflow-executor.js"
import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js" import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js"
export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) { export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPluginConfig<T>, payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) {
if (!pluginOptions.collectionTriggers || Object.keys(pluginOptions.collectionTriggers).length === 0) {
logger.warn('No collection triggers configured in plugin options')
return
}
logger.info({
configuredCollections: Object.keys(pluginOptions.collectionTriggers),
availableCollections: Object.keys(payload.collections)
}, 'Starting collection hook registration')
// Add hooks to configured collections // Add hooks to configured collections
for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) { for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) {
if (!triggerConfig) { if (!triggerConfig) {
logger.debug({collectionSlug}, 'Skipping collection with falsy trigger config')
continue continue
} }
@@ -29,7 +40,7 @@ export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPl
collection.config.hooks.afterChange.push(async (change) => { collection.config.hooks.afterChange.push(async (change) => {
const operation = change.operation as 'create' | 'update' const operation = change.operation as 'create' | 'update'
logger.debug({ logger.debug({
collection: change.collection.slug, slug: change.collection.slug,
operation, operation,
}, 'Collection hook triggered') }, 'Collection hook triggered')
@@ -48,7 +59,7 @@ export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPl
collection.config.hooks.afterRead = collection.config.hooks.afterRead || [] collection.config.hooks.afterRead = collection.config.hooks.afterRead || []
collection.config.hooks.afterRead.push(async (change) => { collection.config.hooks.afterRead.push(async (change) => {
logger.debug({ logger.debug({
collection: change.collection.slug, slug: change.collection.slug,
operation: 'read', operation: 'read',
}, 'Collection hook triggered') }, 'Collection hook triggered')
@@ -67,7 +78,7 @@ export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPl
collection.config.hooks.afterDelete = collection.config.hooks.afterDelete || [] collection.config.hooks.afterDelete = collection.config.hooks.afterDelete || []
collection.config.hooks.afterDelete.push(async (change) => { collection.config.hooks.afterDelete.push(async (change) => {
logger.debug({ logger.debug({
collection: change.collection.slug, slug: change.collection.slug,
operation: 'delete', operation: 'delete',
}, 'Collection hook triggered') }, 'Collection hook triggered')
@@ -83,9 +94,19 @@ export function initCollectionHooks<T extends string>(pluginOptions: WorkflowsPl
} }
if (collection) { if (collection) {
logger.info({collectionSlug}, 'Collection hooks registered') logger.info({
collectionSlug,
hooksRegistered: {
afterChange: crud.update || crud.create,
afterRead: crud.read,
afterDelete: crud.delete
}
}, 'Collection hooks registered successfully')
} else { } else {
logger.warn({collectionSlug}, 'Collection not found for trigger configuration') logger.error({
collectionSlug,
availableCollections: Object.keys(payload.collections)
}, 'Collection not found for trigger configuration - check collection slug spelling')
} }
} }
} }

View File

@@ -45,7 +45,7 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'):
) )
} }
// Create workflow executor for this request // Create a workflow executor for this request
const logger = initializeLogger(req.payload) const logger = initializeLogger(req.payload)
const executor = new WorkflowExecutor(req.payload, logger) const executor = new WorkflowExecutor(req.payload, logger)
@@ -77,22 +77,33 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'):
// Check trigger condition if present // Check trigger condition if present
if (matchingTrigger?.condition) { if (matchingTrigger?.condition) {
logger.debug({
condition: matchingTrigger.condition,
path,
webhookData: JSON.stringify(webhookData).substring(0, 200),
headers: Object.keys(context.trigger.headers || {}),
workflowId: workflow.id,
workflowName: workflow.name
}, 'Evaluating webhook trigger condition')
const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context) const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context)
if (!conditionMet) { if (!conditionMet) {
logger.info({ logger.info({
condition: matchingTrigger.condition, condition: matchingTrigger.condition,
path, path,
webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200),
workflowId: workflow.id, workflowId: workflow.id,
workflowName: workflow.name workflowName: workflow.name
}, 'Webhook trigger condition not met, skipping workflow') }, 'Webhook trigger condition not met, skipping workflow')
return { status: 'skipped', workflowId: workflow.id, reason: 'Condition not met' } return { reason: 'Condition not met', status: 'skipped', workflowId: workflow.id }
} }
logger.info({ logger.info({
condition: matchingTrigger.condition, condition: matchingTrigger.condition,
path, path,
webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200),
workflowId: workflow.id, workflowId: workflow.id,
workflowName: workflow.name workflowName: workflow.name
}, 'Webhook trigger condition met') }, 'Webhook trigger condition met')
@@ -149,11 +160,11 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'):
path: `${normalizedPrefix}/:path` path: `${normalizedPrefix}/:path`
} }
// Check if webhook endpoint already exists to avoid duplicates // Check if the webhook endpoint already exists to avoid duplicates
const existingEndpoint = config.endpoints?.find(endpoint => const existingEndpoint = config.endpoints?.find(endpoint =>
endpoint.path === webhookEndpoint.path && endpoint.method === webhookEndpoint.method endpoint.path === webhookEndpoint.path && endpoint.method === webhookEndpoint.method
) )
if (!existingEndpoint) { if (!existingEndpoint) {
// Combine existing endpoints with the webhook endpoint // Combine existing endpoints with the webhook endpoint
config.endpoints = [...(config.endpoints || []), webhookEndpoint] config.endpoints = [...(config.endpoints || []), webhookEndpoint]

View File

@@ -1,25 +1,27 @@
import type { Payload } from 'payload' import type { Payload } from 'payload'
// Global logger instance - use Payload's logger type // Global logger instance - use Payload's logger type
let pluginLogger: Payload['logger'] | null = null let pluginLogger: null | Payload['logger'] = null
/** /**
* Simple config-time logger for use during plugin configuration * Simple config-time logger for use during plugin configuration
* Uses console with plugin prefix since Payload logger isn't available yet * Uses console with plugin prefix since Payload logger isn't available yet
*/ */
const configLogger = { const configLogger = {
debug: (message: string, ...args: any[]) => { debug: <T>(message: string, ...args: T[]) => {
if (process.env.NODE_ENV === 'development') { if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
console.log(`[payload-automation] ${message}`, ...args)
}
},
error: (message: string, ...args: any[]) => {
console.error(`[payload-automation] ${message}`, ...args)
},
info: (message: string, ...args: any[]) => {
console.log(`[payload-automation] ${message}`, ...args) console.log(`[payload-automation] ${message}`, ...args)
}, },
warn: (message: string, ...args: any[]) => { error: <T>(message: string, ...args: T[]) => {
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
console.error(`[payload-automation] ${message}`, ...args)
},
info: <T>(message: string, ...args: T[]) => {
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
console.log(`[payload-automation] ${message}`, ...args)
},
warn: <T>(message: string, ...args: T[]) => {
if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return}
console.warn(`[payload-automation] ${message}`, ...args) console.warn(`[payload-automation] ${message}`, ...args)
} }
} }
@@ -38,6 +40,7 @@ export function getConfigLogger() {
export function initializeLogger(payload: Payload): Payload['logger'] { export function initializeLogger(payload: Payload): Payload['logger'] {
// Create a child logger with plugin identification // Create a child logger with plugin identification
pluginLogger = payload.logger.child({ pluginLogger = payload.logger.child({
level: process.env.PAYLOAD_AUTOMATION_LOGGING || 'silent',
plugin: '@xtr-dev/payload-automation' plugin: '@xtr-dev/payload-automation'
}) })
return pluginLogger return pluginLogger

View File

@@ -5,9 +5,9 @@ export const createDocumentHandler: TaskHandler<'create-document'> = async ({ in
throw new Error('No input provided') throw new Error('No input provided')
} }
const { collection, data, draft, locale } = input const { collectionSlug, data, draft, locale } = input
if (!collection || typeof collection !== 'string') { if (!collectionSlug || typeof collectionSlug !== 'string') {
throw new Error('Collection slug is required') throw new Error('Collection slug is required')
} }
@@ -19,7 +19,7 @@ export const createDocumentHandler: TaskHandler<'create-document'> = async ({ in
const parsedData = typeof data === 'string' ? JSON.parse(data) : data const parsedData = typeof data === 'string' ? JSON.parse(data) : data
const result = await req.payload.create({ const result = await req.payload.create({
collection, collection: collectionSlug,
data: parsedData, data: parsedData,
draft: draft || false, draft: draft || false,
locale: locale || undefined, locale: locale || undefined,

View File

@@ -7,7 +7,7 @@ export const CreateDocumentStepTask = {
handler: createDocumentHandler, handler: createDocumentHandler,
inputSchema: [ inputSchema: [
{ {
name: 'collection', name: 'collectionSlug',
type: 'text', type: 'text',
admin: { admin: {
description: 'The collection slug to create a document in' description: 'The collection slug to create a document in'

View File

@@ -5,9 +5,9 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in
throw new Error('No input provided') throw new Error('No input provided')
} }
const { id, collection, where } = input const { id, collectionSlug, where } = input
if (!collection || typeof collection !== 'string') { if (!collectionSlug || typeof collectionSlug !== 'string') {
throw new Error('Collection slug is required') throw new Error('Collection slug is required')
} }
@@ -16,7 +16,7 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in
if (id) { if (id) {
const result = await req.payload.delete({ const result = await req.payload.delete({
id: id.toString(), id: id.toString(),
collection, collection: collectionSlug,
req req
}) })
@@ -38,7 +38,7 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in
// First find the documents to delete // First find the documents to delete
const toDelete = await req.payload.find({ const toDelete = await req.payload.find({
collection, collection: collectionSlug,
limit: 1000, // Set a reasonable limit limit: 1000, // Set a reasonable limit
req, req,
where: parsedWhere where: parsedWhere
@@ -49,7 +49,7 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in
for (const doc of toDelete.docs) { for (const doc of toDelete.docs) {
const result = await req.payload.delete({ const result = await req.payload.delete({
id: doc.id, id: doc.id,
collection, collection: collectionSlug,
req req
}) })
deleted.push(result) deleted.push(result)

View File

@@ -7,7 +7,7 @@ export const DeleteDocumentStepTask = {
handler: deleteDocumentHandler, handler: deleteDocumentHandler,
inputSchema: [ inputSchema: [
{ {
name: 'collection', name: 'collectionSlug',
type: 'text', type: 'text',
admin: { admin: {
description: 'The collection slug to delete from' description: 'The collection slug to delete from'

View File

@@ -5,9 +5,9 @@ export const readDocumentHandler: TaskHandler<'read-document'> = async ({ input,
throw new Error('No input provided') throw new Error('No input provided')
} }
const { id, collection, depth, limit, locale, sort, where } = input const { id, collectionSlug, depth, limit, locale, sort, where } = input
if (!collection || typeof collection !== 'string') { if (!collectionSlug || typeof collectionSlug !== 'string') {
throw new Error('Collection slug is required') throw new Error('Collection slug is required')
} }
@@ -16,7 +16,7 @@ export const readDocumentHandler: TaskHandler<'read-document'> = async ({ input,
if (id) { if (id) {
const result = await req.payload.findByID({ const result = await req.payload.findByID({
id: id.toString(), id: id.toString(),
collection, collection: collectionSlug,
depth: typeof depth === 'number' ? depth : undefined, depth: typeof depth === 'number' ? depth : undefined,
locale: locale || undefined, locale: locale || undefined,
req req
@@ -35,7 +35,7 @@ export const readDocumentHandler: TaskHandler<'read-document'> = async ({ input,
const parsedWhere = where ? (typeof where === 'string' ? JSON.parse(where) : where) : {} const parsedWhere = where ? (typeof where === 'string' ? JSON.parse(where) : where) : {}
const result = await req.payload.find({ const result = await req.payload.find({
collection, collection: collectionSlug,
depth: typeof depth === 'number' ? depth : undefined, depth: typeof depth === 'number' ? depth : undefined,
limit: typeof limit === 'number' ? limit : 10, limit: typeof limit === 'number' ? limit : 10,
locale: locale || undefined, locale: locale || undefined,

View File

@@ -7,7 +7,7 @@ export const ReadDocumentStepTask = {
handler: readDocumentHandler, handler: readDocumentHandler,
inputSchema: [ inputSchema: [
{ {
name: 'collection', name: 'collectionSlug',
type: 'text', type: 'text',
admin: { admin: {
description: 'The collection slug to read from' description: 'The collection slug to read from'

View File

@@ -5,9 +5,9 @@ export const updateDocumentHandler: TaskHandler<'update-document'> = async ({ in
throw new Error('No input provided') throw new Error('No input provided')
} }
const { id, collection, data, draft, locale } = input const { id, collectionSlug, data, draft, locale } = input
if (!collection || typeof collection !== 'string') { if (!collectionSlug || typeof collectionSlug !== 'string') {
throw new Error('Collection slug is required') throw new Error('Collection slug is required')
} }
@@ -24,7 +24,7 @@ export const updateDocumentHandler: TaskHandler<'update-document'> = async ({ in
const result = await req.payload.update({ const result = await req.payload.update({
id: id.toString(), id: id.toString(),
collection, collection: collectionSlug,
data: parsedData, data: parsedData,
draft: draft || false, draft: draft || false,
locale: locale || undefined, locale: locale || undefined,

View File

@@ -7,7 +7,7 @@ export const UpdateDocumentStepTask = {
handler: updateDocumentHandler, handler: updateDocumentHandler,
inputSchema: [ inputSchema: [
{ {
name: 'collection', name: 'collectionSlug',
type: 'text', type: 'text',
admin: { admin: {
description: 'The collection slug to update a document in' description: 'The collection slug to update a document in'