diff --git a/.env.local b/.env.local new file mode 100644 index 0000000..49e7b5f --- /dev/null +++ b/.env.local @@ -0,0 +1,2 @@ +PAYLOAD_AUTOMATION_LOGGING=debug +PAYLOAD_AUTOMATION_CONFIG_LOGGING=true diff --git a/package.json b/package.json index fc700a4..72e1d25 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@xtr-dev/payload-automation", - "version": "0.0.1", + "version": "0.0.7", "description": "PayloadCMS Automation Plugin - Comprehensive workflow automation system with visual workflow building, execution tracking, and step types", "license": "MIT", "type": "module", @@ -84,7 +84,7 @@ "react": "19.1.0", "react-dom": "19.1.0", "rimraf": "3.0.2", - "sharp": "0.34.2", + "sharp": "0.34.3", "typescript": "5.7.3", "vitest": "^3.1.2" }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c91f92c..2c0d391 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -29,7 +29,7 @@ importers: version: 3.45.0(@types/pg@8.10.2)(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))(pg@8.16.3) '@payloadcms/eslint-config': specifier: 3.9.0 - version: 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1) + version: 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1) '@payloadcms/next': specifier: 3.45.0 version: 3.45.0(@types/react@19.1.8)(graphql@16.11.0)(monaco-editor@0.52.2)(next@15.4.4(@playwright/test@1.55.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4))(payload@3.45.0(graphql@16.11.0)(typescript@5.7.3))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(typescript@5.7.3) @@ -88,8 +88,8 @@ importers: specifier: 3.0.2 version: 3.0.2 sharp: - specifier: 0.34.2 - version: 0.34.2 + specifier: 0.34.3 + version: 0.34.3 typescript: specifier: 5.7.3 version: 5.7.3 @@ -812,138 +812,69 @@ packages: resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} engines: {node: '>=18.18'} - '@img/sharp-darwin-arm64@0.34.2': - resolution: {integrity: sha512-OfXHZPppddivUJnqyKoi5YVeHRkkNE2zUFT2gbpKxp/JZCFYEYubnMg+gOp6lWfasPrTS+KPosKqdI+ELYVDtg==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [arm64] - os: [darwin] - '@img/sharp-darwin-arm64@0.34.3': resolution: {integrity: sha512-ryFMfvxxpQRsgZJqBd4wsttYQbCxsJksrv9Lw/v798JcQ8+w84mBWuXwl+TT0WJ/WrYOLaYpwQXi3sA9nTIaIg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [darwin] - '@img/sharp-darwin-x64@0.34.2': - resolution: {integrity: sha512-dYvWqmjU9VxqXmjEtjmvHnGqF8GrVjM2Epj9rJ6BUIXvk8slvNDJbhGFvIoXzkDhrJC2jUxNLz/GUjjvSzfw+g==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [x64] - os: [darwin] - '@img/sharp-darwin-x64@0.34.3': resolution: {integrity: sha512-yHpJYynROAj12TA6qil58hmPmAwxKKC7reUqtGLzsOHfP7/rniNGTL8tjWX6L3CTV4+5P4ypcS7Pp+7OB+8ihA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [darwin] - '@img/sharp-libvips-darwin-arm64@1.1.0': - resolution: {integrity: sha512-HZ/JUmPwrJSoM4DIQPv/BfNh9yrOA8tlBbqbLz4JZ5uew2+o22Ik+tHQJcih7QJuSa0zo5coHTfD5J8inqj9DA==} - cpu: [arm64] - os: [darwin] - '@img/sharp-libvips-darwin-arm64@1.2.0': resolution: {integrity: sha512-sBZmpwmxqwlqG9ueWFXtockhsxefaV6O84BMOrhtg/YqbTaRdqDE7hxraVE3y6gVM4eExmfzW4a8el9ArLeEiQ==} cpu: [arm64] os: [darwin] - '@img/sharp-libvips-darwin-x64@1.1.0': - resolution: {integrity: sha512-Xzc2ToEmHN+hfvsl9wja0RlnXEgpKNmftriQp6XzY/RaSfwD9th+MSh0WQKzUreLKKINb3afirxW7A0fz2YWuQ==} - cpu: [x64] - os: [darwin] - '@img/sharp-libvips-darwin-x64@1.2.0': resolution: {integrity: sha512-M64XVuL94OgiNHa5/m2YvEQI5q2cl9d/wk0qFTDVXcYzi43lxuiFTftMR1tOnFQovVXNZJ5TURSDK2pNe9Yzqg==} cpu: [x64] os: [darwin] - '@img/sharp-libvips-linux-arm64@1.1.0': - resolution: {integrity: sha512-IVfGJa7gjChDET1dK9SekxFFdflarnUB8PwW8aGwEoF3oAsSDuNUTYS+SKDOyOJxQyDC1aPFMuRYLoDInyV9Ew==} - cpu: [arm64] - os: [linux] - '@img/sharp-libvips-linux-arm64@1.2.0': resolution: {integrity: sha512-RXwd0CgG+uPRX5YYrkzKyalt2OJYRiJQ8ED/fi1tq9WQW2jsQIn0tqrlR5l5dr/rjqq6AHAxURhj2DVjyQWSOA==} cpu: [arm64] os: [linux] - '@img/sharp-libvips-linux-arm@1.1.0': - resolution: {integrity: sha512-s8BAd0lwUIvYCJyRdFqvsj+BJIpDBSxs6ivrOPm/R7piTs5UIwY5OjXrP2bqXC9/moGsyRa37eYWYCOGVXxVrA==} - cpu: [arm] - os: [linux] - '@img/sharp-libvips-linux-arm@1.2.0': resolution: {integrity: sha512-mWd2uWvDtL/nvIzThLq3fr2nnGfyr/XMXlq8ZJ9WMR6PXijHlC3ksp0IpuhK6bougvQrchUAfzRLnbsen0Cqvw==} cpu: [arm] os: [linux] - '@img/sharp-libvips-linux-ppc64@1.1.0': - resolution: {integrity: sha512-tiXxFZFbhnkWE2LA8oQj7KYR+bWBkiV2nilRldT7bqoEZ4HiDOcePr9wVDAZPi/Id5fT1oY9iGnDq20cwUz8lQ==} - cpu: [ppc64] - os: [linux] - '@img/sharp-libvips-linux-ppc64@1.2.0': resolution: {integrity: sha512-Xod/7KaDDHkYu2phxxfeEPXfVXFKx70EAFZ0qyUdOjCcxbjqyJOEUpDe6RIyaunGxT34Anf9ue/wuWOqBW2WcQ==} cpu: [ppc64] os: [linux] - '@img/sharp-libvips-linux-s390x@1.1.0': - resolution: {integrity: sha512-xukSwvhguw7COyzvmjydRb3x/09+21HykyapcZchiCUkTThEQEOMtBj9UhkaBRLuBrgLFzQ2wbxdeCCJW/jgJA==} - cpu: [s390x] - os: [linux] - '@img/sharp-libvips-linux-s390x@1.2.0': resolution: {integrity: sha512-eMKfzDxLGT8mnmPJTNMcjfO33fLiTDsrMlUVcp6b96ETbnJmd4uvZxVJSKPQfS+odwfVaGifhsB07J1LynFehw==} cpu: [s390x] os: [linux] - '@img/sharp-libvips-linux-x64@1.1.0': - resolution: {integrity: sha512-yRj2+reB8iMg9W5sULM3S74jVS7zqSzHG3Ol/twnAAkAhnGQnpjj6e4ayUz7V+FpKypwgs82xbRdYtchTTUB+Q==} - cpu: [x64] - os: [linux] - '@img/sharp-libvips-linux-x64@1.2.0': resolution: {integrity: sha512-ZW3FPWIc7K1sH9E3nxIGB3y3dZkpJlMnkk7z5tu1nSkBoCgw2nSRTFHI5pB/3CQaJM0pdzMF3paf9ckKMSE9Tg==} cpu: [x64] os: [linux] - '@img/sharp-libvips-linuxmusl-arm64@1.1.0': - resolution: {integrity: sha512-jYZdG+whg0MDK+q2COKbYidaqW/WTz0cc1E+tMAusiDygrM4ypmSCjOJPmFTvHHJ8j/6cAGyeDWZOsK06tP33w==} - cpu: [arm64] - os: [linux] - '@img/sharp-libvips-linuxmusl-arm64@1.2.0': resolution: {integrity: sha512-UG+LqQJbf5VJ8NWJ5Z3tdIe/HXjuIdo4JeVNADXBFuG7z9zjoegpzzGIyV5zQKi4zaJjnAd2+g2nna8TZvuW9Q==} cpu: [arm64] os: [linux] - '@img/sharp-libvips-linuxmusl-x64@1.1.0': - resolution: {integrity: sha512-wK7SBdwrAiycjXdkPnGCPLjYb9lD4l6Ze2gSdAGVZrEL05AOUJESWU2lhlC+Ffn5/G+VKuSm6zzbQSzFX/P65A==} - cpu: [x64] - os: [linux] - '@img/sharp-libvips-linuxmusl-x64@1.2.0': resolution: {integrity: sha512-SRYOLR7CXPgNze8akZwjoGBoN1ThNZoqpOgfnOxmWsklTGVfJiGJoC/Lod7aNMGA1jSsKWM1+HRX43OP6p9+6Q==} cpu: [x64] os: [linux] - '@img/sharp-linux-arm64@0.34.2': - resolution: {integrity: sha512-D8n8wgWmPDakc83LORcfJepdOSN6MvWNzzz2ux0MnIbOqdieRZwVYY32zxVx+IFUT8er5KPcyU3XXsn+GzG/0Q==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [arm64] - os: [linux] - '@img/sharp-linux-arm64@0.34.3': resolution: {integrity: sha512-QdrKe3EvQrqwkDrtuTIjI0bu6YEJHTgEeqdzI3uWJOH6G1O8Nl1iEeVYRGdj1h5I21CqxSvQp1Yv7xeU3ZewbA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] - '@img/sharp-linux-arm@0.34.2': - resolution: {integrity: sha512-0DZzkvuEOqQUP9mo2kjjKNok5AmnOr1jB2XYjkaoNRwpAYMDzRmAqUIa1nRi58S2WswqSfPOWLNOr0FDT3H5RQ==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [arm] - os: [linux] - '@img/sharp-linux-arm@0.34.3': resolution: {integrity: sha512-oBK9l+h6KBN0i3dC8rYntLiVfW8D8wH+NPNT3O/WBHeW0OQWCjfWksLUaPidsrDKpJgXp3G3/hkmhptAW0I3+A==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} @@ -956,94 +887,47 @@ packages: cpu: [ppc64] os: [linux] - '@img/sharp-linux-s390x@0.34.2': - resolution: {integrity: sha512-EGZ1xwhBI7dNISwxjChqBGELCWMGDvmxZXKjQRuqMrakhO8QoMgqCrdjnAqJq/CScxfRn+Bb7suXBElKQpPDiw==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [s390x] - os: [linux] - '@img/sharp-linux-s390x@0.34.3': resolution: {integrity: sha512-3gahT+A6c4cdc2edhsLHmIOXMb17ltffJlxR0aC2VPZfwKoTGZec6u5GrFgdR7ciJSsHT27BD3TIuGcuRT0KmQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [s390x] os: [linux] - '@img/sharp-linux-x64@0.34.2': - resolution: {integrity: sha512-sD7J+h5nFLMMmOXYH4DD9UtSNBD05tWSSdWAcEyzqW8Cn5UxXvsHAxmxSesYUsTOBmUnjtxghKDl15EvfqLFbQ==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [x64] - os: [linux] - '@img/sharp-linux-x64@0.34.3': resolution: {integrity: sha512-8kYso8d806ypnSq3/Ly0QEw90V5ZoHh10yH0HnrzOCr6DKAPI6QVHvwleqMkVQ0m+fc7EH8ah0BB0QPuWY6zJQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] - '@img/sharp-linuxmusl-arm64@0.34.2': - resolution: {integrity: sha512-NEE2vQ6wcxYav1/A22OOxoSOGiKnNmDzCYFOZ949xFmrWZOVII1Bp3NqVVpvj+3UeHMFyN5eP/V5hzViQ5CZNA==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [arm64] - os: [linux] - '@img/sharp-linuxmusl-arm64@0.34.3': resolution: {integrity: sha512-vAjbHDlr4izEiXM1OTggpCcPg9tn4YriK5vAjowJsHwdBIdx0fYRsURkxLG2RLm9gyBq66gwtWI8Gx0/ov+JKQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] - '@img/sharp-linuxmusl-x64@0.34.2': - resolution: {integrity: sha512-DOYMrDm5E6/8bm/yQLCWyuDJwUnlevR8xtF8bs+gjZ7cyUNYXiSf/E8Kp0Ss5xasIaXSHzb888V1BE4i1hFhAA==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [x64] - os: [linux] - '@img/sharp-linuxmusl-x64@0.34.3': resolution: {integrity: sha512-gCWUn9547K5bwvOn9l5XGAEjVTTRji4aPTqLzGXHvIr6bIDZKNTA34seMPgM0WmSf+RYBH411VavCejp3PkOeQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] - '@img/sharp-wasm32@0.34.2': - resolution: {integrity: sha512-/VI4mdlJ9zkaq53MbIG6rZY+QRN3MLbR6usYlgITEzi4Rpx5S6LFKsycOQjkOGmqTNmkIdLjEvooFKwww6OpdQ==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [wasm32] - '@img/sharp-wasm32@0.34.3': resolution: {integrity: sha512-+CyRcpagHMGteySaWos8IbnXcHgfDn7pO2fiC2slJxvNq9gDipYBN42/RagzctVRKgxATmfqOSulgZv5e1RdMg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [wasm32] - '@img/sharp-win32-arm64@0.34.2': - resolution: {integrity: sha512-cfP/r9FdS63VA5k0xiqaNaEoGxBg9k7uE+RQGzuK9fHt7jib4zAVVseR9LsE4gJcNWgT6APKMNnCcnyOtmSEUQ==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [arm64] - os: [win32] - '@img/sharp-win32-arm64@0.34.3': resolution: {integrity: sha512-MjnHPnbqMXNC2UgeLJtX4XqoVHHlZNd+nPt1kRPmj63wURegwBhZlApELdtxM2OIZDRv/DFtLcNhVbd1z8GYXQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [win32] - '@img/sharp-win32-ia32@0.34.2': - resolution: {integrity: sha512-QLjGGvAbj0X/FXl8n1WbtQ6iVBpWU7JO94u/P2M4a8CFYsvQi4GW2mRy/JqkRx0qpBzaOdKJKw8uc930EX2AHw==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [ia32] - os: [win32] - '@img/sharp-win32-ia32@0.34.3': resolution: {integrity: sha512-xuCdhH44WxuXgOM714hn4amodJMZl3OEvf0GVTm0BEyMeA2to+8HEdRPShH0SLYptJY1uBw+SCFP9WVQi1Q/cw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [ia32] os: [win32] - '@img/sharp-win32-x64@0.34.2': - resolution: {integrity: sha512-aUdT6zEYtDKCaxkofmmJDJYGCf0+pJg3eU9/oBuqvEeoB9dKI6ZLc/1iLJCTuJQDO4ptntAlkUmHgGjyuobZbw==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - cpu: [x64] - os: [win32] - '@img/sharp-win32-x64@0.34.3': resolution: {integrity: sha512-OWwz05d++TxzLEv4VnsTz5CmZ6mI6S05sfQGEMrNrQcOEERbX46332IvE7pO/EUiw7jUrrS40z/M7kPyjfl04g==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} @@ -4190,10 +4074,6 @@ packages: resolution: {integrity: sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==} engines: {node: '>= 0.4'} - sharp@0.34.2: - resolution: {integrity: sha512-lszvBmB9QURERtyKT2bNmsgxXK0ShJrL/fvqlonCo7e6xBF8nT8xU6pW+PMIbLsz0RxQk3rgH9kd8UmvOzlMJg==} - engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - sharp@0.34.3: resolution: {integrity: sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} @@ -5412,95 +5292,48 @@ snapshots: '@humanwhocodes/retry@0.4.3': {} - '@img/sharp-darwin-arm64@0.34.2': - optionalDependencies: - '@img/sharp-libvips-darwin-arm64': 1.1.0 - optional: true - '@img/sharp-darwin-arm64@0.34.3': optionalDependencies: '@img/sharp-libvips-darwin-arm64': 1.2.0 optional: true - '@img/sharp-darwin-x64@0.34.2': - optionalDependencies: - '@img/sharp-libvips-darwin-x64': 1.1.0 - optional: true - '@img/sharp-darwin-x64@0.34.3': optionalDependencies: '@img/sharp-libvips-darwin-x64': 1.2.0 optional: true - '@img/sharp-libvips-darwin-arm64@1.1.0': - optional: true - '@img/sharp-libvips-darwin-arm64@1.2.0': optional: true - '@img/sharp-libvips-darwin-x64@1.1.0': - optional: true - '@img/sharp-libvips-darwin-x64@1.2.0': optional: true - '@img/sharp-libvips-linux-arm64@1.1.0': - optional: true - '@img/sharp-libvips-linux-arm64@1.2.0': optional: true - '@img/sharp-libvips-linux-arm@1.1.0': - optional: true - '@img/sharp-libvips-linux-arm@1.2.0': optional: true - '@img/sharp-libvips-linux-ppc64@1.1.0': - optional: true - '@img/sharp-libvips-linux-ppc64@1.2.0': optional: true - '@img/sharp-libvips-linux-s390x@1.1.0': - optional: true - '@img/sharp-libvips-linux-s390x@1.2.0': optional: true - '@img/sharp-libvips-linux-x64@1.1.0': - optional: true - '@img/sharp-libvips-linux-x64@1.2.0': optional: true - '@img/sharp-libvips-linuxmusl-arm64@1.1.0': - optional: true - '@img/sharp-libvips-linuxmusl-arm64@1.2.0': optional: true - '@img/sharp-libvips-linuxmusl-x64@1.1.0': - optional: true - '@img/sharp-libvips-linuxmusl-x64@1.2.0': optional: true - '@img/sharp-linux-arm64@0.34.2': - optionalDependencies: - '@img/sharp-libvips-linux-arm64': 1.1.0 - optional: true - '@img/sharp-linux-arm64@0.34.3': optionalDependencies: '@img/sharp-libvips-linux-arm64': 1.2.0 optional: true - '@img/sharp-linux-arm@0.34.2': - optionalDependencies: - '@img/sharp-libvips-linux-arm': 1.1.0 - optional: true - '@img/sharp-linux-arm@0.34.3': optionalDependencies: '@img/sharp-libvips-linux-arm': 1.2.0 @@ -5511,71 +5344,37 @@ snapshots: '@img/sharp-libvips-linux-ppc64': 1.2.0 optional: true - '@img/sharp-linux-s390x@0.34.2': - optionalDependencies: - '@img/sharp-libvips-linux-s390x': 1.1.0 - optional: true - '@img/sharp-linux-s390x@0.34.3': optionalDependencies: '@img/sharp-libvips-linux-s390x': 1.2.0 optional: true - '@img/sharp-linux-x64@0.34.2': - optionalDependencies: - '@img/sharp-libvips-linux-x64': 1.1.0 - optional: true - '@img/sharp-linux-x64@0.34.3': optionalDependencies: '@img/sharp-libvips-linux-x64': 1.2.0 optional: true - '@img/sharp-linuxmusl-arm64@0.34.2': - optionalDependencies: - '@img/sharp-libvips-linuxmusl-arm64': 1.1.0 - optional: true - '@img/sharp-linuxmusl-arm64@0.34.3': optionalDependencies: '@img/sharp-libvips-linuxmusl-arm64': 1.2.0 optional: true - '@img/sharp-linuxmusl-x64@0.34.2': - optionalDependencies: - '@img/sharp-libvips-linuxmusl-x64': 1.1.0 - optional: true - '@img/sharp-linuxmusl-x64@0.34.3': optionalDependencies: '@img/sharp-libvips-linuxmusl-x64': 1.2.0 optional: true - '@img/sharp-wasm32@0.34.2': - dependencies: - '@emnapi/runtime': 1.4.5 - optional: true - '@img/sharp-wasm32@0.34.3': dependencies: '@emnapi/runtime': 1.4.5 optional: true - '@img/sharp-win32-arm64@0.34.2': - optional: true - '@img/sharp-win32-arm64@0.34.3': optional: true - '@img/sharp-win32-ia32@0.34.2': - optional: true - '@img/sharp-win32-ia32@0.34.3': optional: true - '@img/sharp-win32-x64@0.34.2': - optional: true - '@img/sharp-win32-x64@0.34.3': optional: true @@ -6081,18 +5880,18 @@ snapshots: - sql.js - sqlite3 - '@payloadcms/eslint-config@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)': + '@payloadcms/eslint-config@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)': dependencies: '@eslint-react/eslint-plugin': 1.16.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@eslint/js': 9.14.0 - '@payloadcms/eslint-plugin': 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1) + '@payloadcms/eslint-plugin': 3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1) '@types/eslint': 9.6.1 '@types/eslint__js': 8.42.3 '@typescript-eslint/parser': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint: 9.14.0(jiti@2.5.1) eslint-config-prettier: 9.1.0(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-import-x: 4.4.2(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) - eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) + eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-jest-dom: 5.4.0(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-perfectionist: 3.9.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) @@ -6112,7 +5911,7 @@ snapshots: - svelte-eslint-parser - vue-eslint-parser - '@payloadcms/eslint-plugin@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)': + '@payloadcms/eslint-plugin@3.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(jiti@2.5.1)': dependencies: '@eslint-react/eslint-plugin': 1.16.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) '@eslint/js': 9.14.0 @@ -6122,7 +5921,7 @@ snapshots: eslint: 9.14.0(jiti@2.5.1) eslint-config-prettier: 9.1.0(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-import-x: 4.4.2(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) - eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) + eslint-plugin-jest: 28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint-plugin-jest-dom: 5.4.0(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.14.0(jiti@2.5.1)) eslint-plugin-perfectionist: 3.9.1(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) @@ -6504,7 +6303,7 @@ snapshots: dependencies: '@types/node': 22.17.2 - '@typescript-eslint/eslint-plugin@8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)': + '@typescript-eslint/eslint-plugin@8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2)': dependencies: '@eslint-community/regexpp': 4.12.1 '@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) @@ -6522,7 +6321,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)': + '@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3)': dependencies: '@eslint-community/regexpp': 4.12.1 '@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) @@ -7539,12 +7338,12 @@ snapshots: eslint: 9.14.0(jiti@2.5.1) requireindex: 1.2.0 - eslint-plugin-jest@28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2): + eslint-plugin-jest@28.9.0(@typescript-eslint/eslint-plugin@8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2): dependencies: '@typescript-eslint/utils': 8.40.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) eslint: 9.14.0(jiti@2.5.1) optionalDependencies: - '@typescript-eslint/eslint-plugin': 8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) + '@typescript-eslint/eslint-plugin': 8.40.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) transitivePeerDependencies: - supports-color - typescript @@ -9481,34 +9280,6 @@ snapshots: es-errors: 1.3.0 es-object-atoms: 1.1.1 - sharp@0.34.2: - dependencies: - color: 4.2.3 - detect-libc: 2.0.4 - semver: 7.7.2 - optionalDependencies: - '@img/sharp-darwin-arm64': 0.34.2 - '@img/sharp-darwin-x64': 0.34.2 - '@img/sharp-libvips-darwin-arm64': 1.1.0 - '@img/sharp-libvips-darwin-x64': 1.1.0 - '@img/sharp-libvips-linux-arm': 1.1.0 - '@img/sharp-libvips-linux-arm64': 1.1.0 - '@img/sharp-libvips-linux-ppc64': 1.1.0 - '@img/sharp-libvips-linux-s390x': 1.1.0 - '@img/sharp-libvips-linux-x64': 1.1.0 - '@img/sharp-libvips-linuxmusl-arm64': 1.1.0 - '@img/sharp-libvips-linuxmusl-x64': 1.1.0 - '@img/sharp-linux-arm': 0.34.2 - '@img/sharp-linux-arm64': 0.34.2 - '@img/sharp-linux-s390x': 0.34.2 - '@img/sharp-linux-x64': 0.34.2 - '@img/sharp-linuxmusl-arm64': 0.34.2 - '@img/sharp-linuxmusl-x64': 0.34.2 - '@img/sharp-wasm32': 0.34.2 - '@img/sharp-win32-arm64': 0.34.2 - '@img/sharp-win32-ia32': 0.34.2 - '@img/sharp-win32-x64': 0.34.2 - sharp@0.34.3: dependencies: color: 4.2.3 @@ -9537,7 +9308,6 @@ snapshots: '@img/sharp-win32-arm64': 0.34.3 '@img/sharp-win32-ia32': 0.34.3 '@img/sharp-win32-x64': 0.34.3 - optional: true shebang-command@2.0.0: dependencies: @@ -9888,8 +9658,8 @@ snapshots: typescript-eslint@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2): dependencies: - '@typescript-eslint/eslint-plugin': 8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) - '@typescript-eslint/parser': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) + '@typescript-eslint/eslint-plugin': 8.14.0(@typescript-eslint/parser@8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2))(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) + '@typescript-eslint/parser': 8.14.0(eslint@9.34.0(jiti@2.5.1))(typescript@5.7.3) '@typescript-eslint/utils': 8.14.0(eslint@9.14.0(jiti@2.5.1))(typescript@5.7.2) optionalDependencies: typescript: 5.7.2 diff --git a/src/collections/Workflow.ts b/src/collections/Workflow.ts index 4f5c4f0..225843a 100644 --- a/src/collections/Workflow.ts +++ b/src/collections/Workflow.ts @@ -52,7 +52,7 @@ export const createWorkflowCollection: (options: WorkflowsPlug ] }, { - name: 'collection', + name: 'collectionSlug', type: 'select', admin: { condition: (_, siblingData) => siblingData?.type === 'collection-trigger', diff --git a/src/core/workflow-executor.ts b/src/core/workflow-executor.ts index 4d2f9cc..787af81 100644 --- a/src/core/workflow-executor.ts +++ b/src/core/workflow-executor.ts @@ -87,12 +87,29 @@ export class WorkflowExecutor { // Check step condition if present if (step.condition) { + this.logger.debug({ + condition: step.condition, + stepName, + availableSteps: Object.keys(context.steps), + completedSteps: Object.entries(context.steps) + .filter(([_, s]) => s.state === 'succeeded') + .map(([name]) => name), + triggerType: context.trigger?.type + }, 'Evaluating step condition') + const conditionMet = this.evaluateStepCondition(step.condition, context) if (!conditionMet) { this.logger.info({ condition: step.condition, - stepName + stepName, + contextSnapshot: JSON.stringify({ + stepOutputs: Object.entries(context.steps).reduce((acc, [name, step]) => { + acc[name] = { state: step.state, hasOutput: !!step.output } + return acc + }, {} as Record), + triggerData: context.trigger?.data ? 'present' : 'absent' + }) }, 'Step condition not met, skipping') // Mark step as completed but skipped @@ -113,7 +130,14 @@ export class WorkflowExecutor { this.logger.info({ condition: step.condition, - stepName + stepName, + contextSnapshot: JSON.stringify({ + stepOutputs: Object.entries(context.steps).reduce((acc, [name, step]) => { + acc[name] = { state: step.state, hasOutput: !!step.output } + return acc + }, {} as Record), + triggerData: context.trigger?.data ? 'present' : 'absent' + }) }, 'Step condition met, proceeding with execution') } @@ -311,26 +335,54 @@ export class WorkflowExecutor { private resolveStepInput(config: Record, context: ExecutionContext): Record { const resolved: Record = {} + this.logger.debug({ + configKeys: Object.keys(config), + contextSteps: Object.keys(context.steps), + triggerType: context.trigger?.type + }, 'Starting step input resolution') + for (const [key, value] of Object.entries(config)) { if (typeof value === 'string' && value.startsWith('$')) { // This is a JSONPath expression + this.logger.debug({ + key, + jsonPath: value, + availableSteps: Object.keys(context.steps), + hasTriggerData: !!context.trigger?.data, + hasTriggerDoc: !!context.trigger?.doc + }, 'Resolving JSONPath expression') + try { const result = JSONPath({ json: context, path: value, wrap: false }) + + this.logger.debug({ + key, + jsonPath: value, + result: JSON.stringify(result).substring(0, 200), + resultType: Array.isArray(result) ? 'array' : typeof result + }, 'JSONPath resolved successfully') + resolved[key] = result } catch (error) { this.logger.warn({ error: error instanceof Error ? error.message : 'Unknown error', key, - path: value + path: value, + contextSnapshot: JSON.stringify(context).substring(0, 500) }, 'Failed to resolve JSONPath') resolved[key] = value // Keep original value if resolution fails } } else if (typeof value === 'object' && value !== null) { // Recursively resolve nested objects + this.logger.debug({ + key, + nestedKeys: Object.keys(value as Record) + }, 'Recursively resolving nested object') + resolved[key] = this.resolveStepInput(value as Record, context) } else { // Keep literal values as-is @@ -338,6 +390,11 @@ export class WorkflowExecutor { } } + this.logger.debug({ + resolvedKeys: Object.keys(resolved), + originalKeys: Object.keys(config) + }, 'Step input resolution completed') + return resolved } @@ -377,6 +434,14 @@ export class WorkflowExecutor { * Evaluate a condition using JSONPath */ public evaluateCondition(condition: string, context: ExecutionContext): boolean { + this.logger.debug({ + condition, + contextKeys: Object.keys(context), + triggerType: context.trigger?.type, + triggerData: context.trigger?.data, + triggerDoc: context.trigger?.doc ? 'present' : 'absent' + }, 'Starting condition evaluation') + try { const result = JSONPath({ json: context, @@ -384,16 +449,33 @@ export class WorkflowExecutor { wrap: false }) + this.logger.debug({ + condition, + result, + resultType: Array.isArray(result) ? 'array' : typeof result, + resultLength: Array.isArray(result) ? result.length : undefined + }, 'JSONPath evaluation result') + // Handle different result types + let finalResult: boolean if (Array.isArray(result)) { - return result.length > 0 && Boolean(result[0]) + finalResult = result.length > 0 && Boolean(result[0]) + } else { + finalResult = Boolean(result) } - return Boolean(result) + this.logger.debug({ + condition, + finalResult, + originalResult: result + }, 'Condition evaluation completed') + + return finalResult } catch (error) { this.logger.warn({ condition, - error: error instanceof Error ? error.message : 'Unknown error' + error: error instanceof Error ? error.message : 'Unknown error', + errorStack: error instanceof Error ? error.stack : undefined }, 'Failed to evaluate condition') // If condition evaluation fails, assume false @@ -564,6 +646,17 @@ export class WorkflowExecutor { // Check trigger condition if present if (trigger.condition) { + this.logger.debug({ + collection, + operation, + condition: trigger.condition, + docId: (doc as any)?.id, + docFields: doc ? Object.keys(doc) : [], + previousDocId: (previousDoc as any)?.id, + workflowId: workflow.id, + workflowName: workflow.name + }, 'Evaluating collection trigger condition') + const conditionMet = this.evaluateCondition(trigger.condition, context) if (!conditionMet) { @@ -572,7 +665,8 @@ export class WorkflowExecutor { condition: trigger.condition, operation, workflowId: workflow.id, - workflowName: workflow.name + workflowName: workflow.name, + docSnapshot: JSON.stringify(doc).substring(0, 200) }, 'Trigger condition not met, skipping workflow') continue } @@ -582,7 +676,8 @@ export class WorkflowExecutor { condition: trigger.condition, operation, workflowId: workflow.id, - workflowName: workflow.name + workflowName: workflow.name, + docSnapshot: JSON.stringify(doc).substring(0, 200) }, 'Trigger condition met') } diff --git a/src/plugin/cron-scheduler.ts b/src/plugin/cron-scheduler.ts index 4cc4a28..e99e3e1 100644 --- a/src/plugin/cron-scheduler.ts +++ b/src/plugin/cron-scheduler.ts @@ -1,4 +1,5 @@ import type {Config, Payload, TaskConfig} from 'payload' + import * as cron from 'node-cron' import {type Workflow, WorkflowExecutor} from '../core/workflow-executor.js' @@ -10,20 +11,20 @@ import {getConfigLogger} from './logger.js' */ export function generateCronTasks(config: Config): void { const logger = getConfigLogger() - + // Note: We can't query the database at config time, so we'll need a different approach // We'll create a single task that handles all cron-triggered workflows const cronTask: TaskConfig = { slug: 'workflow-cron-executor', handler: async ({ input, req }) => { - const { cronExpression, timezone, workflowId } = input as { + const { cronExpression, timezone, workflowId } = input as { cronExpression?: string timezone?: string workflowId: string } - + const logger = req.payload.logger.child({ plugin: '@xtr-dev/payload-automation' }) - + try { // Get the workflow const workflow = await req.payload.findByID({ @@ -32,11 +33,11 @@ export function generateCronTasks(config: Config): void { depth: 2, req }) - + if (!workflow) { throw new Error(`Workflow ${workflowId} not found`) } - + // Create execution context for cron trigger const context = { steps: {}, @@ -46,10 +47,10 @@ export function generateCronTasks(config: Config): void { triggeredAt: new Date().toISOString() } } - + // Create executor const executor = new WorkflowExecutor(req.payload, logger) - + // Find the matching cron trigger and check its condition if present const triggers = workflow.triggers as Array<{ condition?: string @@ -66,7 +67,7 @@ export function generateCronTasks(config: Config): void { // Check trigger condition if present if (matchingTrigger?.condition) { const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context) - + if (!conditionMet) { logger.info({ condition: matchingTrigger.condition, @@ -74,23 +75,23 @@ export function generateCronTasks(config: Config): void { workflowId, workflowName: workflow.name }, 'Cron trigger condition not met, skipping workflow execution') - + // Re-queue for next execution but don't run workflow if (cronExpression) { void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger) } - + return { output: { executedAt: new Date().toISOString(), - status: 'skipped', reason: 'Condition not met', + status: 'skipped', workflowId }, state: 'succeeded' } } - + logger.info({ condition: matchingTrigger.condition, cronExpression, @@ -98,15 +99,15 @@ export function generateCronTasks(config: Config): void { workflowName: workflow.name }, 'Cron trigger condition met') } - + // Execute the workflow await executor.execute(workflow as Workflow, context, req) - + // Re-queue the job for the next scheduled execution if cronExpression is provided if (cronExpression) { void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger) } - + return { output: { executedAt: new Date().toISOString(), @@ -120,7 +121,7 @@ export function generateCronTasks(config: Config): void { error: error instanceof Error ? error.message : 'Unknown error', workflowId }, 'Cron job execution failed') - + // Re-queue even on failure to ensure continuity (unless it's a validation error) if (cronExpression && !(error instanceof Error && error.message.includes('Invalid cron'))) { void requeueCronJob(workflowId, cronExpression, timezone, req.payload, logger) @@ -131,7 +132,7 @@ export function generateCronTasks(config: Config): void { }, 'Failed to re-queue cron job after execution failure') }) } - + return { output: { error: error instanceof Error ? error.message : 'Unknown error', @@ -142,16 +143,16 @@ export function generateCronTasks(config: Config): void { } } } - + // Add the cron task to config if not already present if (!config.jobs) { config.jobs = { tasks: [] } } - + if (!config.jobs.tasks) { config.jobs.tasks = [] } - + if (!config.jobs.tasks.find(task => task.slug === cronTask.slug)) { logger.debug(`Registering cron executor task: ${cronTask.slug}`) config.jobs.tasks.push(cronTask) @@ -177,19 +178,19 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger } } }) - + logger.info(`Found ${workflows.docs.length} workflows with cron triggers`) - + for (const workflow of workflows.docs) { const triggers = workflow.triggers as Array<{ cronExpression?: string timezone?: string type: string }> - + // Find all cron triggers for this workflow const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || [] - + for (const trigger of cronTriggers) { if (trigger.cronExpression) { try { @@ -202,7 +203,7 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger }, 'Invalid cron expression format') continue } - + // Validate timezone if provided if (trigger.timezone) { try { @@ -217,17 +218,17 @@ export async function registerCronJobs(payload: Payload, logger: Payload['logger continue } } - + // Calculate next execution time const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone) - + // Queue the job await payload.jobs.queue({ input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId: workflow.id }, task: 'workflow-cron-executor', waitUntil: nextExecution }) - + logger.info({ cronExpression: trigger.cronExpression, nextExecution: nextExecution.toISOString(), @@ -276,37 +277,37 @@ function getNextCronTime(cronExpression: string, timezone?: string): Date { const now = new Date() const options: { timezone?: string } = timezone ? { timezone } : {} - + // Create a task to find the next execution time const task = cron.schedule(cronExpression, () => {}, { ...options }) - + // Parse cron expression parts const cronParts = cronExpression.trim().split(/\s+/) if (cronParts.length !== 5) { void task.destroy() throw new Error(`Invalid cron format: ${cronExpression}. Expected 5 parts.`) } - + const [minutePart, hourPart, dayPart, monthPart, weekdayPart] = cronParts - + // Calculate next execution with proper lookahead for any schedule frequency // Start from next minute and look ahead systematically let testTime = new Date(now.getTime() + 60 * 1000) // Start 1 minute from now testTime.setSeconds(0, 0) // Reset seconds and milliseconds - + // Maximum iterations to prevent infinite loops (covers ~2 years) const maxIterations = 2 * 365 * 24 * 60 // 2 years worth of minutes let iterations = 0 - + while (iterations < maxIterations) { const minute = testTime.getMinutes() const hour = testTime.getHours() const dayOfMonth = testTime.getDate() const month = testTime.getMonth() + 1 const dayOfWeek = testTime.getDay() - + if (matchesCronPart(minute, minutePart) && matchesCronPart(hour, hourPart) && matchesCronPart(dayOfMonth, dayPart) && @@ -315,12 +316,12 @@ function getNextCronTime(cronExpression: string, timezone?: string): Date { void task.destroy() return testTime } - + // Increment time intelligently based on cron pattern testTime = incrementTimeForCronPattern(testTime, cronParts) iterations++ } - + void task.destroy() throw new Error(`Could not calculate next execution time for cron expression: ${cronExpression} within reasonable timeframe`) } @@ -331,7 +332,7 @@ function getNextCronTime(cronExpression: string, timezone?: string): Date { function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Date { const [minutePart, hourPart, _dayPart, _monthPart, _weekdayPart] = cronParts const nextTime = new Date(currentTime) - + // If minute is specific (not wildcard), we can jump to next hour if (minutePart !== '*' && !minutePart.includes('/')) { const targetMinute = getNextValidCronValue(currentTime.getMinutes(), minutePart) @@ -343,7 +344,7 @@ function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Da } return nextTime } - + // If hour is specific and we're past it, jump to next day if (hourPart !== '*' && !hourPart.includes('/')) { const targetHour = getNextValidCronValue(currentTime.getHours(), hourPart) @@ -356,7 +357,7 @@ function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Da } return nextTime } - + // Default: increment by 1 minute nextTime.setTime(nextTime.getTime() + 60 * 1000) return nextTime @@ -367,7 +368,7 @@ function incrementTimeForCronPattern(currentTime: Date, cronParts: string[]): Da */ function getNextValidCronValue(currentValue: number, cronPart: string): number { if (cronPart === '*') {return currentValue + 1} - + // Handle specific values and ranges const values = parseCronPart(cronPart) return values.find(v => v > currentValue) || values[0] @@ -378,9 +379,9 @@ function getNextValidCronValue(currentValue: number, cronPart: string): number { */ function parseCronPart(cronPart: string): number[] { if (cronPart === '*') {return []} - + const values: number[] = [] - + // Handle comma-separated values if (cronPart.includes(',')) { cronPart.split(',').forEach(part => { @@ -388,7 +389,7 @@ function parseCronPart(cronPart: string): number[] { }) return values.sort((a, b) => a - b) } - + // Handle ranges if (cronPart.includes('-')) { const [start, end] = cronPart.split('-').map(n => parseInt(n, 10)) @@ -397,21 +398,21 @@ function parseCronPart(cronPart: string): number[] { } return values } - + // Handle step values if (cronPart.includes('/')) { const [range, step] = cronPart.split('/') const stepNum = parseInt(step, 10) - + if (range === '*') { // For wildcards with steps, return empty - handled elsewhere return [] } - + const baseValues = parseCronPart(range) return baseValues.filter((_, index) => index % stepNum === 0) } - + // Single value values.push(parseInt(cronPart, 10)) return values @@ -422,29 +423,29 @@ function parseCronPart(cronPart: string): number[] { */ function matchesCronPart(value: number, cronPart: string): boolean { if (cronPart === '*') {return true} - + // Handle step values (e.g., */5) if (cronPart.includes('/')) { const [range, step] = cronPart.split('/') const stepNum = parseInt(step, 10) - + if (range === '*') { return value % stepNum === 0 } } - + // Handle ranges (e.g., 1-5) if (cronPart.includes('-')) { const [start, end] = cronPart.split('-').map(n => parseInt(n, 10)) return value >= start && value <= end } - + // Handle comma-separated values (e.g., 1,3,5) if (cronPart.includes(',')) { const values = cronPart.split(',').map(n => parseInt(n, 10)) return values.includes(value) } - + // Handle single value const cronValue = parseInt(cronPart, 10) return value === cronValue @@ -468,7 +469,7 @@ export async function requeueCronJob( task: 'workflow-cron-executor', waitUntil: getNextCronTime(cronExpression, timezone) }) - + logger.debug({ nextRun: getNextCronTime(cronExpression, timezone), timezone: timezone || 'UTC', @@ -487,41 +488,41 @@ export async function requeueCronJob( */ export async function updateWorkflowCronJobs( workflowId: string, - payload: Payload, + payload: Payload, logger: Payload['logger'] ): Promise { try { // First, cancel any existing cron jobs for this workflow cancelWorkflowCronJobs(workflowId, payload, logger) - + // Get the workflow const workflow = await payload.findByID({ id: workflowId, collection: 'workflows', depth: 0 }) - + if (!workflow) { logger.warn({ workflowId }, 'Workflow not found for cron job update') return } - + const triggers = workflow.triggers as Array<{ cronExpression?: string timezone?: string type: string }> - + // Find all cron triggers for this workflow const cronTriggers = triggers?.filter(t => t.type === 'cron-trigger') || [] - + if (cronTriggers.length === 0) { logger.debug({ workflowId }, 'No cron triggers found for workflow') return } - + let scheduledJobs = 0 - + for (const trigger of cronTriggers) { if (trigger.cronExpression) { try { @@ -534,7 +535,7 @@ export async function updateWorkflowCronJobs( }, 'Invalid cron expression format') continue } - + // Validate timezone if provided if (trigger.timezone) { try { @@ -548,19 +549,19 @@ export async function updateWorkflowCronJobs( continue } } - + // Calculate next execution time const nextExecution = getNextCronTime(trigger.cronExpression, trigger.timezone) - + // Queue the job await payload.jobs.queue({ input: { cronExpression: trigger.cronExpression, timezone: trigger.timezone, workflowId }, task: 'workflow-cron-executor', waitUntil: nextExecution }) - + scheduledJobs++ - + logger.info({ cronExpression: trigger.cronExpression, nextExecution: nextExecution.toISOString(), @@ -579,7 +580,7 @@ export async function updateWorkflowCronJobs( } } } - + if (scheduledJobs > 0) { logger.info({ scheduledJobs, workflowId }, 'Updated cron jobs for workflow') } diff --git a/src/plugin/index.ts b/src/plugin/index.ts index 3eb67c6..8046424 100644 --- a/src/plugin/index.ts +++ b/src/plugin/index.ts @@ -27,6 +27,9 @@ const applyCollectionsConfig = (pluginOptions: WorkflowsPlugin ) } +// Track if hooks have been initialized to prevent double registration +let hooksInitialized = false + export const workflowsPlugin = (pluginOptions: WorkflowsPluginConfig) => (config: Config): Config => { @@ -42,6 +45,7 @@ export const workflowsPlugin = } const configLogger = getConfigLogger() + configLogger.info(`Configuring workflow plugin with ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers`) // Generate cron tasks for workflows with cron triggers generateCronTasks(config) @@ -61,27 +65,49 @@ export const workflowsPlugin = // Set up onInit to register collection hooks and initialize features const incomingOnInit = config.onInit config.onInit = async (payload) => { + configLogger.info(`onInit called - hooks already initialized: ${hooksInitialized}, collections: ${Object.keys(payload.collections).length}`) + + // Prevent double initialization in dev mode + if (hooksInitialized) { + configLogger.warn('Hooks already initialized, skipping to prevent duplicate registration') + return + } + // Execute any existing onInit functions first if (incomingOnInit) { + configLogger.debug('Executing existing onInit function') await incomingOnInit(payload) } // Initialize the logger with the payload instance const logger = initializeLogger(payload) + logger.info('Logger initialized with payload instance') + + // Log collection trigger configuration + logger.info(`Plugin configuration: ${Object.keys(pluginOptions.collectionTriggers || {}).length} collection triggers, ${pluginOptions.steps?.length || 0} steps`) // Create workflow executor instance const executor = new WorkflowExecutor(payload, logger) // Initialize hooks + logger.info('Initializing collection hooks...') initCollectionHooks(pluginOptions, payload, logger, executor) + + logger.info('Initializing global hooks...') initGlobalHooks(payload, logger, executor) + + logger.info('Initializing workflow hooks...') initWorkflowHooks(payload, logger) + + logger.info('Initializing step tasks...') initStepTasks(pluginOptions, payload, logger) // Register cron jobs for workflows with cron triggers + logger.info('Registering cron jobs...') await registerCronJobs(payload, logger) - logger.info('Plugin initialized successfully') + logger.info('Plugin initialized successfully - all hooks registered') + hooksInitialized = true } return config diff --git a/src/plugin/init-collection-hooks.ts b/src/plugin/init-collection-hooks.ts index 47eac15..80dc3a5 100644 --- a/src/plugin/init-collection-hooks.ts +++ b/src/plugin/init-collection-hooks.ts @@ -5,10 +5,21 @@ import type { WorkflowExecutor } from "../core/workflow-executor.js" import type {CollectionTriggerConfigCrud, WorkflowsPluginConfig} from "./config-types.js" export function initCollectionHooks(pluginOptions: WorkflowsPluginConfig, payload: Payload, logger: Payload['logger'], executor: WorkflowExecutor) { + + if (!pluginOptions.collectionTriggers || Object.keys(pluginOptions.collectionTriggers).length === 0) { + logger.warn('No collection triggers configured in plugin options') + return + } + + logger.info({ + configuredCollections: Object.keys(pluginOptions.collectionTriggers), + availableCollections: Object.keys(payload.collections) + }, 'Starting collection hook registration') // Add hooks to configured collections for (const [collectionSlug, triggerConfig] of Object.entries(pluginOptions.collectionTriggers)) { if (!triggerConfig) { + logger.debug({collectionSlug}, 'Skipping collection with falsy trigger config') continue } @@ -29,7 +40,7 @@ export function initCollectionHooks(pluginOptions: WorkflowsPl collection.config.hooks.afterChange.push(async (change) => { const operation = change.operation as 'create' | 'update' logger.debug({ - collection: change.collection.slug, + slug: change.collection.slug, operation, }, 'Collection hook triggered') @@ -48,7 +59,7 @@ export function initCollectionHooks(pluginOptions: WorkflowsPl collection.config.hooks.afterRead = collection.config.hooks.afterRead || [] collection.config.hooks.afterRead.push(async (change) => { logger.debug({ - collection: change.collection.slug, + slug: change.collection.slug, operation: 'read', }, 'Collection hook triggered') @@ -67,7 +78,7 @@ export function initCollectionHooks(pluginOptions: WorkflowsPl collection.config.hooks.afterDelete = collection.config.hooks.afterDelete || [] collection.config.hooks.afterDelete.push(async (change) => { logger.debug({ - collection: change.collection.slug, + slug: change.collection.slug, operation: 'delete', }, 'Collection hook triggered') @@ -83,9 +94,19 @@ export function initCollectionHooks(pluginOptions: WorkflowsPl } if (collection) { - logger.info({collectionSlug}, 'Collection hooks registered') + logger.info({ + collectionSlug, + hooksRegistered: { + afterChange: crud.update || crud.create, + afterRead: crud.read, + afterDelete: crud.delete + } + }, 'Collection hooks registered successfully') } else { - logger.warn({collectionSlug}, 'Collection not found for trigger configuration') + logger.error({ + collectionSlug, + availableCollections: Object.keys(payload.collections) + }, 'Collection not found for trigger configuration - check collection slug spelling') } } } diff --git a/src/plugin/init-webhook.ts b/src/plugin/init-webhook.ts index 80d31f0..98d2a0b 100644 --- a/src/plugin/init-webhook.ts +++ b/src/plugin/init-webhook.ts @@ -45,7 +45,7 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'): ) } - // Create workflow executor for this request + // Create a workflow executor for this request const logger = initializeLogger(req.payload) const executor = new WorkflowExecutor(req.payload, logger) @@ -77,22 +77,33 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'): // Check trigger condition if present if (matchingTrigger?.condition) { + logger.debug({ + condition: matchingTrigger.condition, + path, + webhookData: JSON.stringify(webhookData).substring(0, 200), + headers: Object.keys(context.trigger.headers || {}), + workflowId: workflow.id, + workflowName: workflow.name + }, 'Evaluating webhook trigger condition') + const conditionMet = executor.evaluateCondition(matchingTrigger.condition, context) - + if (!conditionMet) { logger.info({ condition: matchingTrigger.condition, path, + webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200), workflowId: workflow.id, workflowName: workflow.name }, 'Webhook trigger condition not met, skipping workflow') - - return { status: 'skipped', workflowId: workflow.id, reason: 'Condition not met' } + + return { reason: 'Condition not met', status: 'skipped', workflowId: workflow.id } } - + logger.info({ condition: matchingTrigger.condition, path, + webhookDataSnapshot: JSON.stringify(webhookData).substring(0, 200), workflowId: workflow.id, workflowName: workflow.name }, 'Webhook trigger condition met') @@ -149,11 +160,11 @@ export function initWebhookEndpoint(config: Config, webhookPrefix = 'webhook'): path: `${normalizedPrefix}/:path` } - // Check if webhook endpoint already exists to avoid duplicates - const existingEndpoint = config.endpoints?.find(endpoint => + // Check if the webhook endpoint already exists to avoid duplicates + const existingEndpoint = config.endpoints?.find(endpoint => endpoint.path === webhookEndpoint.path && endpoint.method === webhookEndpoint.method ) - + if (!existingEndpoint) { // Combine existing endpoints with the webhook endpoint config.endpoints = [...(config.endpoints || []), webhookEndpoint] diff --git a/src/plugin/logger.ts b/src/plugin/logger.ts index c7fdfc3..8fc8144 100644 --- a/src/plugin/logger.ts +++ b/src/plugin/logger.ts @@ -1,25 +1,27 @@ import type { Payload } from 'payload' // Global logger instance - use Payload's logger type -let pluginLogger: Payload['logger'] | null = null +let pluginLogger: null | Payload['logger'] = null /** * Simple config-time logger for use during plugin configuration * Uses console with plugin prefix since Payload logger isn't available yet */ const configLogger = { - debug: (message: string, ...args: any[]) => { - if (process.env.NODE_ENV === 'development') { - console.log(`[payload-automation] ${message}`, ...args) - } - }, - error: (message: string, ...args: any[]) => { - console.error(`[payload-automation] ${message}`, ...args) - }, - info: (message: string, ...args: any[]) => { + debug: (message: string, ...args: T[]) => { + if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return} console.log(`[payload-automation] ${message}`, ...args) }, - warn: (message: string, ...args: any[]) => { + error: (message: string, ...args: T[]) => { + if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return} + console.error(`[payload-automation] ${message}`, ...args) + }, + info: (message: string, ...args: T[]) => { + if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return} + console.log(`[payload-automation] ${message}`, ...args) + }, + warn: (message: string, ...args: T[]) => { + if (!process.env.PAYLOAD_AUTOMATION_CONFIG_LOGGING) {return} console.warn(`[payload-automation] ${message}`, ...args) } } @@ -38,6 +40,7 @@ export function getConfigLogger() { export function initializeLogger(payload: Payload): Payload['logger'] { // Create a child logger with plugin identification pluginLogger = payload.logger.child({ + level: process.env.PAYLOAD_AUTOMATION_LOGGING || 'silent', plugin: '@xtr-dev/payload-automation' }) return pluginLogger diff --git a/src/steps/create-document-handler.ts b/src/steps/create-document-handler.ts index 08a5d95..59b636c 100644 --- a/src/steps/create-document-handler.ts +++ b/src/steps/create-document-handler.ts @@ -5,9 +5,9 @@ export const createDocumentHandler: TaskHandler<'create-document'> = async ({ in throw new Error('No input provided') } - const { collection, data, draft, locale } = input + const { collectionSlug, data, draft, locale } = input - if (!collection || typeof collection !== 'string') { + if (!collectionSlug || typeof collectionSlug !== 'string') { throw new Error('Collection slug is required') } @@ -19,7 +19,7 @@ export const createDocumentHandler: TaskHandler<'create-document'> = async ({ in const parsedData = typeof data === 'string' ? JSON.parse(data) : data const result = await req.payload.create({ - collection, + collection: collectionSlug, data: parsedData, draft: draft || false, locale: locale || undefined, diff --git a/src/steps/create-document.ts b/src/steps/create-document.ts index 0356983..aea3800 100644 --- a/src/steps/create-document.ts +++ b/src/steps/create-document.ts @@ -7,7 +7,7 @@ export const CreateDocumentStepTask = { handler: createDocumentHandler, inputSchema: [ { - name: 'collection', + name: 'collectionSlug', type: 'text', admin: { description: 'The collection slug to create a document in' diff --git a/src/steps/delete-document-handler.ts b/src/steps/delete-document-handler.ts index b8183e0..10aaea0 100644 --- a/src/steps/delete-document-handler.ts +++ b/src/steps/delete-document-handler.ts @@ -5,9 +5,9 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in throw new Error('No input provided') } - const { id, collection, where } = input + const { id, collectionSlug, where } = input - if (!collection || typeof collection !== 'string') { + if (!collectionSlug || typeof collectionSlug !== 'string') { throw new Error('Collection slug is required') } @@ -16,7 +16,7 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in if (id) { const result = await req.payload.delete({ id: id.toString(), - collection, + collection: collectionSlug, req }) @@ -38,7 +38,7 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in // First find the documents to delete const toDelete = await req.payload.find({ - collection, + collection: collectionSlug, limit: 1000, // Set a reasonable limit req, where: parsedWhere @@ -49,7 +49,7 @@ export const deleteDocumentHandler: TaskHandler<'delete-document'> = async ({ in for (const doc of toDelete.docs) { const result = await req.payload.delete({ id: doc.id, - collection, + collection: collectionSlug, req }) deleted.push(result) diff --git a/src/steps/delete-document.ts b/src/steps/delete-document.ts index 3b94b54..2b3c195 100644 --- a/src/steps/delete-document.ts +++ b/src/steps/delete-document.ts @@ -7,7 +7,7 @@ export const DeleteDocumentStepTask = { handler: deleteDocumentHandler, inputSchema: [ { - name: 'collection', + name: 'collectionSlug', type: 'text', admin: { description: 'The collection slug to delete from' diff --git a/src/steps/read-document-handler.ts b/src/steps/read-document-handler.ts index eafaed2..5c73e56 100644 --- a/src/steps/read-document-handler.ts +++ b/src/steps/read-document-handler.ts @@ -5,9 +5,9 @@ export const readDocumentHandler: TaskHandler<'read-document'> = async ({ input, throw new Error('No input provided') } - const { id, collection, depth, limit, locale, sort, where } = input + const { id, collectionSlug, depth, limit, locale, sort, where } = input - if (!collection || typeof collection !== 'string') { + if (!collectionSlug || typeof collectionSlug !== 'string') { throw new Error('Collection slug is required') } @@ -16,7 +16,7 @@ export const readDocumentHandler: TaskHandler<'read-document'> = async ({ input, if (id) { const result = await req.payload.findByID({ id: id.toString(), - collection, + collection: collectionSlug, depth: typeof depth === 'number' ? depth : undefined, locale: locale || undefined, req @@ -35,7 +35,7 @@ export const readDocumentHandler: TaskHandler<'read-document'> = async ({ input, const parsedWhere = where ? (typeof where === 'string' ? JSON.parse(where) : where) : {} const result = await req.payload.find({ - collection, + collection: collectionSlug, depth: typeof depth === 'number' ? depth : undefined, limit: typeof limit === 'number' ? limit : 10, locale: locale || undefined, diff --git a/src/steps/read-document.ts b/src/steps/read-document.ts index 678430d..d2ad185 100644 --- a/src/steps/read-document.ts +++ b/src/steps/read-document.ts @@ -7,7 +7,7 @@ export const ReadDocumentStepTask = { handler: readDocumentHandler, inputSchema: [ { - name: 'collection', + name: 'collectionSlug', type: 'text', admin: { description: 'The collection slug to read from' diff --git a/src/steps/update-document-handler.ts b/src/steps/update-document-handler.ts index d145fd1..5e7a5b3 100644 --- a/src/steps/update-document-handler.ts +++ b/src/steps/update-document-handler.ts @@ -5,9 +5,9 @@ export const updateDocumentHandler: TaskHandler<'update-document'> = async ({ in throw new Error('No input provided') } - const { id, collection, data, draft, locale } = input + const { id, collectionSlug, data, draft, locale } = input - if (!collection || typeof collection !== 'string') { + if (!collectionSlug || typeof collectionSlug !== 'string') { throw new Error('Collection slug is required') } @@ -24,7 +24,7 @@ export const updateDocumentHandler: TaskHandler<'update-document'> = async ({ in const result = await req.payload.update({ id: id.toString(), - collection, + collection: collectionSlug, data: parsedData, draft: draft || false, locale: locale || undefined, diff --git a/src/steps/update-document.ts b/src/steps/update-document.ts index 1e3a292..1c6de02 100644 --- a/src/steps/update-document.ts +++ b/src/steps/update-document.ts @@ -7,7 +7,7 @@ export const UpdateDocumentStepTask = { handler: updateDocumentHandler, inputSchema: [ { - name: 'collection', + name: 'collectionSlug', type: 'text', admin: { description: 'The collection slug to update a document in'