diff --git a/.eslintignore b/.eslintignore index a01da97f19..4b80d2afbe 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,5 +1,6 @@ html.js **/lib +packages/bodiless-backend/bin packages/gatsby-theme-bodiless/dist packages/gatsby-theme-bodiless/cjs packages/gatsby-theme-bodiless/index.js diff --git a/.github/workflows/build-test-lint-simple.yml b/.github/workflows/build-test-lint-simple.yml index bf5d01a3fa..d22779ffd9 100644 --- a/.github/workflows/build-test-lint-simple.yml +++ b/.github/workflows/build-test-lint-simple.yml @@ -3,10 +3,7 @@ env: CI: true FORCE_COLOR: 1 NODE_OPTIONS: --max_old_space_size=4096 -on: - pull_request: - branches: - - main +on: workflow_dispatch jobs: setup: name: Setup, Build, Lint and Test diff --git a/.vscode/launch.json b/.vscode/launch.json index dea589bdff..6c2923351a 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -2,6 +2,8 @@ // Use IntelliSense to learn about possible attributes. // Hover to view descriptions of existing attributes. // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + // Ref: + // https://code.visualstudio.com/docs/editor/variables-reference "version": "0.2.0", "compounds": [ // Launches the Gatsby stack and opens Chrome. @@ -103,7 +105,7 @@ "sourceMaps": false }, { - "name": "Jest Current File", + "name": "Jest Current File (Windows)", "type": "node", "request": "launch", "program": "${workspaceFolder}/node_modules/.bin/jest", @@ -116,6 +118,19 @@ "program": "${workspaceFolder}/node_modules/jest/bin/jest", } }, + { + "name": "Jest Current File (Mac/Linux)", + "type": "node", + "request": "launch", + "program": "${workspaceFolder}/node_modules/.bin/jest", + "args": [ + "${relativeFile}" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + // Update the node version below if v16.13.0 is not available locally. + "runtimeExecutable": "${userHome}/.nvm/versions/node/v16.13.0/bin/node", + }, { "name": "Launch Chrome", "type": "chrome", diff --git a/package-lock.json b/package-lock.json index 68066523c9..cb92cf8ce5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,6 +22,9 @@ "@types/cheerio": "^0.22.22", "@types/common-tags": "^1.8.0", "@types/copyfiles": "^2.1.1", + "@types/dotenv": "^8.2.0", + "@types/express": "^4.17.17", + "@types/formidable": "^1.2.5", "@types/fs-extra": "^8.0.0", "@types/glob": "^7.1.1", "@types/html2canvas": "0.0.35", @@ -29,14 +32,17 @@ "@types/lodash.flow": "^3.5.6", "@types/lunr": "^2.3.3", "@types/mime": "^2.0.1", + "@types/morgan": "^1.9.4", "@types/node": "^16", "@types/react-burger-menu": "^2.6.0", "@types/react-helmet": "^6.1.0", "@types/react-tag-autocomplete": "^6.0.0", + "@types/rimraf": "^3.0.0", "@types/semver": "5.5.0", "@types/supertest": "^2.0.8", "@types/tailwindcss": "^3.0.10", "@types/tar": "^6.1.1", + "@types/tmp": "^0.2.3", "@types/walk": "^2.3.0", "@types/webpack": "^5.0.0", "arg": "^4.1.1", @@ -108,6 +114,7 @@ "process": "^0.11.10", "prop-types": "^15.7.2", "pure-react-carousel": "^1.27.6", + "qs": "^6.11.0", "query-string": "^6.13.2", "rc-tooltip": "^5.1.1", "re-resizable": "^4.11.0", @@ -156,7 +163,7 @@ "@babel/plugin-proposal-decorators": "^7.4.0", "@babel/plugin-transform-modules-commonjs": "7.5.0", "@babel/runtime": "^7.4.2", - "@playwright/test": "1.22", + "@playwright/test": "^1.30.0", "@types/enzyme": "^3.9.1", "@types/jest": "^24.0.18", "@types/lodash": "^4.14.158", @@ -11013,12 +11020,13 @@ } }, "node_modules/@playwright/test": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.22.1.tgz", - "integrity": "sha512-8ouMBUboYslHom41W8bnSEn0TwlAMHhCACwOZeuiAgzukj7KobpZ+UBwrGE0jJ0UblJbKAQNRHXL+z7sDSkb6g==", + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.30.0.tgz", + "integrity": "sha512-SVxkQw1xvn/Wk/EvBnqWIq6NLo1AppwbYOjNLmyU0R1RoQ3rLEBtmjTnElcnz8VEtn11fptj1ECxK0tgURhajw==", "dev": true, "dependencies": { - "playwright-core": "1.22.1" + "@types/node": "*", + "playwright-core": "1.30.0" }, "bin": { "playwright": "cli.js" @@ -11448,6 +11456,15 @@ "@babel/types": "^7.3.0" } }, + "node_modules/@types/body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, "node_modules/@types/cacheable-request": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", @@ -11482,6 +11499,14 @@ "resolved": "https://registry.npmjs.org/@types/configstore/-/configstore-2.1.1.tgz", "integrity": "sha512-YY+hm3afkDHeSM2rsFXxeZtu0garnusBWNG1+7MknmDWQHqcH2w21/xOU9arJUi8ch4qyFklidANLCu3ihhVwQ==" }, + "node_modules/@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/cookie": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", @@ -11507,6 +11532,15 @@ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-0.0.30.tgz", "integrity": "sha512-orGL5LXERPYsLov6CWs3Fh6203+dXzJkR7OnddIr2514Hsecwc8xRpzCapshBbKFImCsvS/mk6+FWiN5LyZJAQ==" }, + "node_modules/@types/dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@types/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-ylSC9GhfRH7m1EUXBXofhgx4lUWmFeQDINW5oLuS+gxWdfUeW4zJdeVTYVkexEW+e2VUvlZR2kGnGGipAWR7kw==", + "deprecated": "This is a stub types definition. dotenv provides its own type definitions, so you do not need this installed.", + "dependencies": { + "dotenv": "*" + } + }, "node_modules/@types/enzyme": { "version": "3.10.12", "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.10.12.tgz", @@ -11540,6 +11574,35 @@ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" }, + "node_modules/@types/express": { + "version": "4.17.17", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", + "integrity": "sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.17.33", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.33.tgz", + "integrity": "sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*" + } + }, + "node_modules/@types/formidable": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/formidable/-/formidable-1.2.5.tgz", + "integrity": "sha512-zu3mQJa4hDNubEMViSj937602XdDGzK7Q5pJ5QmLUbNxclbo9tZGt5jtwM352ssZ+pqo5V4H14TBvT/ALqQQcA==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/fs-extra": { "version": "8.1.2", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.2.tgz", @@ -11741,6 +11804,14 @@ "@types/node": "*" } }, + "node_modules/@types/morgan": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.4.tgz", + "integrity": "sha512-cXoc4k+6+YAllH3ZHmx4hf7La1dzUk6keTR4bF4b4Sc0mZxU/zK4wO7l+ZzezXm/jkYj/qC+uYGZrarZdIVvyQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/ms": { "version": "0.7.31", "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", @@ -11799,6 +11870,16 @@ "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.5.tgz", "integrity": "sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ==" }, + "node_modules/@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" + }, + "node_modules/@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" + }, "node_modules/@types/rc-tooltip": { "version": "3.7.6", "resolved": "https://registry.npmjs.org/@types/rc-tooltip/-/rc-tooltip-3.7.6.tgz", @@ -11880,9 +11961,9 @@ } }, "node_modules/@types/rimraf": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", - "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==", "dependencies": { "@types/glob": "*", "@types/node": "*" @@ -11906,6 +11987,15 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, + "node_modules/@types/serve-static": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", + "dependencies": { + "@types/mime": "*", + "@types/node": "*" + } + }, "node_modules/@types/sharp": { "version": "0.30.2", "resolved": "https://registry.npmjs.org/@types/sharp/-/sharp-0.30.2.tgz", @@ -11994,9 +12084,9 @@ } }, "node_modules/@types/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-dDZH/tXzwjutnuk4UacGgFRwV+JSLaXL1ikvidfJprkb7L9Nx1njcRHHmi3Dsvt7pgqqTEeucQuOrWHPFgzVHA==" }, "node_modules/@types/uglify-js": { "version": "3.13.2", @@ -14080,6 +14170,20 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -17817,6 +17921,20 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==" }, + "node_modules/devcert/node_modules/@types/rimraf": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", + "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "dependencies": { + "@types/glob": "*", + "@types/node": "*" + } + }, + "node_modules/devcert/node_modules/@types/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + }, "node_modules/devcert/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -20249,26 +20367,6 @@ } ] }, - "node_modules/express/node_modules/body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-SAAwOxgoCKMGs9uUAUFHygfLAyaniaoun6I8mFY9pRAJL9+Kec34aU+oIjDhTycub1jozEfEwx1W1IuOYxVSFw==", - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "1.8.1", - "iconv-lite": "0.4.24", - "on-finished": "~2.3.0", - "qs": "6.9.7", - "raw-body": "2.4.3", - "type-is": "~1.6.18" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/express/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -20299,6 +20397,20 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, + "node_modules/express/node_modules/qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/express/node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -37389,9 +37501,9 @@ "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==" }, "node_modules/playwright-core": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.22.1.tgz", - "integrity": "sha512-H+ZUVYnceWNXrRf3oxTEKAr81QzFsCKu5Fp//fEjQvqgKkfA1iX3E9DBrPJpPNOrgVzcE+IqeI0fDmYJe6Ynnw==", + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.30.0.tgz", + "integrity": "sha512-7AnRmTCf+GVYhHbLJsGUtskWTE33SwMZkybJ0v6rqR1boxq2x36U7p1vDRV7HO2IwTZgmycracLxPEJI49wu4g==", "dev": true, "bin": { "playwright": "cli.js" @@ -39043,9 +39155,9 @@ } }, "node_modules/qs": { - "version": "6.10.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", - "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", "dependencies": { "side-channel": "^1.0.4" }, @@ -41481,66 +41593,6 @@ "node": ">= 0.8.0" } }, - "node_modules/serve-static/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/serve-static/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - }, - "node_modules/serve-static/node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/serve-static/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" - }, - "node_modules/serve-static/node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/serve-static/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "engines": { - "node": ">= 0.8" - } - }, "node_modules/session-file-store": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/session-file-store/-/session-file-store-1.5.0.tgz", @@ -56113,12 +56165,13 @@ } }, "@playwright/test": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.22.1.tgz", - "integrity": "sha512-8ouMBUboYslHom41W8bnSEn0TwlAMHhCACwOZeuiAgzukj7KobpZ+UBwrGE0jJ0UblJbKAQNRHXL+z7sDSkb6g==", + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.30.0.tgz", + "integrity": "sha512-SVxkQw1xvn/Wk/EvBnqWIq6NLo1AppwbYOjNLmyU0R1RoQ3rLEBtmjTnElcnz8VEtn11fptj1ECxK0tgURhajw==", "dev": true, "requires": { - "playwright-core": "1.22.1" + "@types/node": "*", + "playwright-core": "1.30.0" } }, "@root/walk": { @@ -56480,6 +56533,15 @@ "@babel/types": "^7.3.0" } }, + "@types/body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", + "requires": { + "@types/connect": "*", + "@types/node": "*" + } + }, "@types/cacheable-request": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", @@ -56514,6 +56576,14 @@ "resolved": "https://registry.npmjs.org/@types/configstore/-/configstore-2.1.1.tgz", "integrity": "sha512-YY+hm3afkDHeSM2rsFXxeZtu0garnusBWNG1+7MknmDWQHqcH2w21/xOU9arJUi8ch4qyFklidANLCu3ihhVwQ==" }, + "@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "requires": { + "@types/node": "*" + } + }, "@types/cookie": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", @@ -56539,6 +56609,14 @@ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-0.0.30.tgz", "integrity": "sha512-orGL5LXERPYsLov6CWs3Fh6203+dXzJkR7OnddIr2514Hsecwc8xRpzCapshBbKFImCsvS/mk6+FWiN5LyZJAQ==" }, + "@types/dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@types/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-ylSC9GhfRH7m1EUXBXofhgx4lUWmFeQDINW5oLuS+gxWdfUeW4zJdeVTYVkexEW+e2VUvlZR2kGnGGipAWR7kw==", + "requires": { + "dotenv": "*" + } + }, "@types/enzyme": { "version": "3.10.12", "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.10.12.tgz", @@ -56572,6 +56650,35 @@ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" }, + "@types/express": { + "version": "4.17.17", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", + "integrity": "sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==", + "requires": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "@types/express-serve-static-core": { + "version": "4.17.33", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.33.tgz", + "integrity": "sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==", + "requires": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*" + } + }, + "@types/formidable": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/formidable/-/formidable-1.2.5.tgz", + "integrity": "sha512-zu3mQJa4hDNubEMViSj937602XdDGzK7Q5pJ5QmLUbNxclbo9tZGt5jtwM352ssZ+pqo5V4H14TBvT/ALqQQcA==", + "requires": { + "@types/node": "*" + } + }, "@types/fs-extra": { "version": "8.1.2", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.2.tgz", @@ -56773,6 +56880,14 @@ "@types/node": "*" } }, + "@types/morgan": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.4.tgz", + "integrity": "sha512-cXoc4k+6+YAllH3ZHmx4hf7La1dzUk6keTR4bF4b4Sc0mZxU/zK4wO7l+ZzezXm/jkYj/qC+uYGZrarZdIVvyQ==", + "requires": { + "@types/node": "*" + } + }, "@types/ms": { "version": "0.7.31", "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", @@ -56830,6 +56945,16 @@ "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.5.tgz", "integrity": "sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ==" }, + "@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" + }, + "@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" + }, "@types/rc-tooltip": { "version": "3.7.6", "resolved": "https://registry.npmjs.org/@types/rc-tooltip/-/rc-tooltip-3.7.6.tgz", @@ -56913,9 +57038,9 @@ } }, "@types/rimraf": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", - "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==", "requires": { "@types/glob": "*", "@types/node": "*" @@ -56939,6 +57064,15 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, + "@types/serve-static": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", + "requires": { + "@types/mime": "*", + "@types/node": "*" + } + }, "@types/sharp": { "version": "0.30.2", "resolved": "https://registry.npmjs.org/@types/sharp/-/sharp-0.30.2.tgz", @@ -57027,9 +57161,9 @@ } }, "@types/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-dDZH/tXzwjutnuk4UacGgFRwV+JSLaXL1ikvidfJprkb7L9Nx1njcRHHmi3Dsvt7pgqqTEeucQuOrWHPFgzVHA==" }, "@types/uglify-js": { "version": "3.13.2", @@ -58640,6 +58774,14 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "requires": { + "side-channel": "^1.0.4" + } } } }, @@ -61551,6 +61693,20 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==" }, + "@types/rimraf": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", + "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "requires": { + "@types/glob": "*", + "@types/node": "*" + } + }, + "@types/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + }, "debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -63372,22 +63528,6 @@ "vary": "~1.1.2" }, "dependencies": { - "body-parser": { - "version": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-SAAwOxgoCKMGs9uUAUFHygfLAyaniaoun6I8mFY9pRAJL9+Kec34aU+oIjDhTycub1jozEfEwx1W1IuOYxVSFw==", - "requires": { - "bytes": "3.1.2", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "1.8.1", - "iconv-lite": "0.4.24", - "on-finished": "~2.3.0", - "qs": "6.9.7", - "raw-body": "2.4.3", - "type-is": "~1.6.18" - } - }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -63415,6 +63555,14 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, + "qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "requires": { + "side-channel": "^1.0.4" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -76300,9 +76448,9 @@ "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==" }, "playwright-core": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.22.1.tgz", - "integrity": "sha512-H+ZUVYnceWNXrRf3oxTEKAr81QzFsCKu5Fp//fEjQvqgKkfA1iX3E9DBrPJpPNOrgVzcE+IqeI0fDmYJe6Ynnw==", + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.30.0.tgz", + "integrity": "sha512-7AnRmTCf+GVYhHbLJsGUtskWTE33SwMZkybJ0v6rqR1boxq2x36U7p1vDRV7HO2IwTZgmycracLxPEJI49wu4g==", "dev": true }, "please-upgrade-node": { @@ -77422,9 +77570,9 @@ } }, "qs": { - "version": "6.10.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", - "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", "requires": { "side-channel": "^1.0.4" } @@ -79267,58 +79415,6 @@ "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.18.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - }, - "dependencies": { - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - } - } - }, - "mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" - }, - "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" - }, - "send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", - "requires": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - } - }, - "statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" - } } }, "session-file-store": { diff --git a/package.json b/package.json index dd2a1cb687..766b636507 100644 --- a/package.json +++ b/package.json @@ -81,7 +81,7 @@ "@babel/plugin-proposal-decorators": "^7.4.0", "@babel/plugin-transform-modules-commonjs": "7.5.0", "@babel/runtime": "^7.4.2", - "@playwright/test": "1.22", + "@playwright/test": "^1.30.0", "@types/enzyme": "^3.9.1", "@types/jest": "^24.0.18", "@types/lodash": "^4.14.158", diff --git a/packages/bodiless-backend/.gitignore b/packages/bodiless-backend/.gitignore index 4c49bd78f1..382d377bdf 100644 --- a/packages/bodiless-backend/.gitignore +++ b/packages/bodiless-backend/.gitignore @@ -1 +1,2 @@ .env +lib diff --git a/packages/bodiless-backend/__tests__/clonePage.test.ts b/packages/bodiless-backend/__tests__/clonePage.test.ts index 458d636245..7451b09e3a 100644 --- a/packages/bodiless-backend/__tests__/clonePage.test.ts +++ b/packages/bodiless-backend/__tests__/clonePage.test.ts @@ -13,25 +13,31 @@ */ import request from 'supertest'; +import type { Express } from 'express'; +import Backend from '../src/backend'; const origin = '/from'; const destination = '/to'; const backendPrefix = '/prefix'; const backendFilePath = '/files'; +process.env.GATSBY_BACKEND_PREFIX = backendPrefix; +process.env.BODILESS_BACKEND_DATA_PAGE_PATH = backendFilePath; + const mockPageClone = jest.fn(); jest.mock('../src/page', () => (pagePath: any) => ({ path: pagePath, - copyDirectory: pagePath === destination - ? mockPageClone.mockResolvedValue(true) - : mockPageClone.mockRejectedValue(false), + copyDirectory: ( + (pagePath === destination) + ? mockPageClone.mockResolvedValue({data: true}) + : mockPageClone.mockRejectedValue({data: false}) + ), setBasePath: () => true, })); -const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); +// get Express app +const getApp = (): Express => { const backend = new Backend(); return backend.getApp(); }; @@ -41,20 +47,24 @@ describe('Clone page endpoint', () => { // clearing mocks beforeEach(() => { jest.resetModules(); - process.env.GATSBY_BACKEND_PREFIX = backendPrefix; - process.env.BODILESS_BACKEND_DATA_PAGE_PATH = backendFilePath; mockPageClone.mockReset(); }); - const performRequest = (app$: any, data: any) => request(app$) + const performCloneRequest = ( + app$: Express, data?: (string | object) + ) => request(app$) .post(`${backendPrefix}/clone`) - .send(data); + .send(data) + .on('error', (err: any) => { + // Some errors might be expected for testing. + console.log(err.error); + }); - describe('when the page is cloned succefully', () => { + describe('when the page is cloned successfully', () => { const data = { origin, destination }; - it('cloned page should be writen to file system', async () => { + it('cloned page should be written to file system', async () => { const app = getApp(); - await performRequest(app, data); + await performCloneRequest(app, data); expect(mockPageClone).toHaveBeenCalledTimes(1); const resolved = await mockPageClone.mock.instances[0]; expect(resolved.path).toBe(destination); @@ -62,14 +72,14 @@ describe('Clone page endpoint', () => { it('should get the correct parameters', async () => { const app = getApp(); - await performRequest(app, data); + await performCloneRequest(app, data); expect(mockPageClone.mock.calls[0][0]).toBe(origin); expect(mockPageClone.mock.calls[0][1]).toBe(destination); }); it('should respond with 200 status', async () => { const app = getApp(); - const result = await performRequest(app, data); + const result = await performCloneRequest(app, data); expect(result.status).toEqual(200); }); }); @@ -78,7 +88,7 @@ describe('Clone page endpoint', () => { const data = { origin, destination: '/page/error' }; it('should respond with 500 status', async () => { const app = getApp(); - const result = await performRequest(app, data); + const result = await performCloneRequest(app, data); expect(result.status).toEqual(500); }); }); diff --git a/packages/bodiless-backend/__tests__/createPage.test.ts b/packages/bodiless-backend/__tests__/createPage.test.ts index 54724f35c4..36a6cc6724 100644 --- a/packages/bodiless-backend/__tests__/createPage.test.ts +++ b/packages/bodiless-backend/__tests__/createPage.test.ts @@ -14,6 +14,7 @@ import path from 'path'; import request from 'supertest'; +import Backend from '../src/backend'; const backendPrefix = '/prefix'; const backendFilePath = '/files'; @@ -27,8 +28,6 @@ jest.mock('../src/page', () => (pagePath: any) => ({ })); const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); const backend = new Backend(); return backend.getApp(); }; @@ -43,7 +42,7 @@ describe('Create page endpoint', () => { mockPageWrite.mockReset(); }); - describe('when the page is created succefully', () => { + describe('when the page is created successfully', () => { const page = 'products'; const template = '_default'; const performRequest = (app$: any) => request(app$) @@ -53,7 +52,7 @@ describe('Create page endpoint', () => { template, }); describe('index.json containing template', () => { - it('should be writen to file system', async () => { + it('should be written to file system', async () => { const app = getApp(); await performRequest(app); expect(mockPageWrite).toHaveBeenCalledTimes(1); diff --git a/packages/bodiless-backend/__tests__/deleteContent.test.ts b/packages/bodiless-backend/__tests__/deleteContent.test.ts index 70187c56a2..27fe3b1ec8 100644 --- a/packages/bodiless-backend/__tests__/deleteContent.test.ts +++ b/packages/bodiless-backend/__tests__/deleteContent.test.ts @@ -13,21 +13,26 @@ */ import request from 'supertest'; +import Backend from '../src/backend'; const backendPrefix = '/prefix'; const backendFilePath = '/files'; const mockPageDelete = jest.fn(); -const mockPage = jest.fn().mockImplementation(() => ({ - delete: mockPageDelete.mockResolvedValue(true), -})); +const mockPage = jest.fn(); -jest.mock('../src/page', () => mockPage); +jest.mock('../src/page', () => ( + function Page(pagePath: string) { + mockPage(pagePath); + return { + file: `${pagePath}.json`, + path: pagePath, + delete: mockPageDelete.mockResolvedValue('ok'), + }; + })); jest.mock('../src/logger'); const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); const backend = new Backend(); return backend.getApp(); }; @@ -46,10 +51,16 @@ describe('delete content endpoint', () => { .delete(`${backendPrefix}/content/${filePath}`); it('should invoke local file deletion', async () => { + jest.setTimeout(30000); const app = getApp(); const filePath = 'test'; - await performRequest(app, filePath); - expect(mockPage.mock.calls[0][0]).toBe(filePath); - expect(mockPageDelete).toHaveBeenCalledTimes(1); + try { + await performRequest(app, filePath); + expect(mockPage.mock.calls[0][0]).toBe(filePath); + expect(mockPageDelete).toHaveBeenCalledTimes(1); + } catch (error: any) { + console.log('error:', error); + throw new Error(error); + } }); }); diff --git a/packages/bodiless-backend/__tests__/getChanges.test.ts b/packages/bodiless-backend/__tests__/getChanges.test.ts index 2b76d8b244..20a86851bc 100644 --- a/packages/bodiless-backend/__tests__/getChanges.test.ts +++ b/packages/bodiless-backend/__tests__/getChanges.test.ts @@ -12,10 +12,11 @@ * limitations under the License. */ +import Git from '../src/tools/git'; +import GitCmd from '../src/gitCmd'; import { cloneGitFixture, cleanGitFixture } from './tools'; -const { getChanges } = require('../src/git'); -const GitCmd = require('../src/GitCmd'); +const { getChanges } = Git; describe('getChanges', () => { beforeEach(cloneGitFixture('get-changes', 'test-upstream-changes')); @@ -32,12 +33,16 @@ describe('getChanges', () => { it('lists no changes when there is no upstream branch', async () => { jest.setTimeout(30000); - await GitCmd.cmd().add('reset', '--hard', 'test-upstream-changes-local').exec(); - await GitCmd.cmd().add('checkout', '-b', 'foo').exec(); - const result = await getChanges(); - expect(result.upstream.branch).toBeNull(); - expect(result.upstream.commits).toHaveLength(0); - expect(result.upstream.files).toHaveLength(0); + try { + await GitCmd.cmd().add('reset', '--hard', 'test-upstream-changes-local').exec(); + await GitCmd.cmd().add('checkout', '-b', 'foo').exec(); + const result = await getChanges(); + expect(result.upstream.branch).toBeNull(); + expect(result.upstream.commits).toHaveLength(0); + expect(result.upstream.files).toHaveLength(0); + } catch (error) { + throw new Error(`${error}`); + } }); it('lists upstream changes when they exist', async () => { diff --git a/packages/bodiless-backend/__tests__/getConflict.test.ts b/packages/bodiless-backend/__tests__/getConflict.test.ts index a0566ffd5f..6f5d0f2580 100644 --- a/packages/bodiless-backend/__tests__/getConflict.test.ts +++ b/packages/bodiless-backend/__tests__/getConflict.test.ts @@ -12,10 +12,11 @@ * limitations under the License. */ +import Git from '../src/tools/git'; +import GitCmd from '../src/gitCmd'; import { cloneGitFixture, cleanGitFixture } from './tools'; -const { getConflicts, getUpstreamTrackingBranch } = require('../src/git'); -const GitCmd = require('../src/GitCmd'); +const { getConflicts, getUpstreamTrackingBranch } = Git; describe('getConflicts', () => { beforeEach(cloneGitFixture('get-conflicts', 'feat/foo-test-1')); diff --git a/packages/bodiless-backend/__tests__/tools.ts b/packages/bodiless-backend/__tests__/tools.ts index 8796eb884b..f5e12029da 100644 --- a/packages/bodiless-backend/__tests__/tools.ts +++ b/packages/bodiless-backend/__tests__/tools.ts @@ -13,19 +13,15 @@ */ import path from 'path'; +import rimraf from 'rimraf'; import { mkdirSync } from 'fs'; -import GitCmd from '../src/GitCmd'; - -// eslint-disable-next-line import/no-extraneous-dependencies -const rimraf = require('rimraf'); +import GitCmd from '../src/gitCmd'; const originalCwd = process.cwd(); -export const resolveRelativeToMe = (...segments: string[]) => { - const scriptName = path.basename(__filename); - const scriptPath = require.resolve(`./${scriptName}`); - return path.resolve(path.dirname(scriptPath), ...segments); -}; +export const resolveRelativeToMe = (...segments: string[]) => ( + path.resolve(path.dirname(__filename), ...segments) +); export const cloneGitFixture = (repo: string, branch: string) => async () => { const tmp = resolveRelativeToMe(`tmp-${repo}`); diff --git a/packages/bodiless-backend/bin/bodiless-backend b/packages/bodiless-backend/bin/bodiless-backend index b2572d6630..35fef5ede5 100755 --- a/packages/bodiless-backend/bin/bodiless-backend +++ b/packages/bodiless-backend/bin/bodiless-backend @@ -1,3 +1,3 @@ #!/usr/bin/env node -require('../src/server.js'); +require('../lib/server.js'); diff --git a/packages/bodiless-backend/dotenv.d.ts b/packages/bodiless-backend/dotenv.d.ts new file mode 100644 index 0000000000..0113d06fa4 --- /dev/null +++ b/packages/bodiless-backend/dotenv.d.ts @@ -0,0 +1,62 @@ +// TypeScript Version: 3.0 +/// + +export interface DotenvParseOptions { + /** + * You may turn on logging to help debug why certain keys or values + * are not being set as you expect. + */ + debug?: boolean; +} + +export interface DotenvParseOutput { + [name: string]: string; +} + +/** + * Parses a string or buffer in the .env file format into an object. + * + * @param src - contents to be parsed + * @param options - additional options + * @returns an object with keys and values based on `src` + */ +export function parse( + src: string | Buffer, + options?: DotenvParseOptions +): DotenvParseOutput; + +export interface DotenvConfigOptions { + /** + * You may specify a custom path if your file containing environment + * variables is located elsewhere. + */ + path?: string; + + /** + * You may specify the encoding of your file containing environment variables. + */ + encoding?: string; + + /** + * You may turn on logging to help debug why certain keys or values are + * not being set as you expect. + */ + debug?: boolean; +} + +export interface DotenvConfigOutput { + error?: Error; + parsed?: DotenvParseOutput; +} + +/** + * Loads `.env` file contents into {@link https://nodejs.org/api/process.html#process_process_env `process.env`}. + * Example: 'KEY=value' becomes { parsed: { KEY: 'value' } } + * + * @param options - controls behavior + * @returns an object with a `parsed` key if successful or `error` key if an error occurred + * + */ +export function config(options?: DotenvConfigOptions): DotenvConfigOutput; +/** @deprecated since v7.0.0 Use config instead. */ +export const load: typeof config; diff --git a/packages/bodiless-backend/package.json b/packages/bodiless-backend/package.json index e658ae34b4..ce49e06838 100644 --- a/packages/bodiless-backend/package.json +++ b/packages/bodiless-backend/package.json @@ -6,11 +6,17 @@ "files": [ "/bodiless.docs.json", "/getDocs.js", - "/src" + "/lib" ], "bin": { "bodiless-backend": "./bin/bodiless-backend" }, + "scripts": { + "build": "run-p build:lib", + "build:lib": "tsc -p ./tsconfig.json", + "build:watch": "npm run build:lib -- --watch", + "clean": "rimraf \"lib/*\" && rimraf tsconfig.tsbuildinfo && rimraf \"doc/api\"" + }, "dependencies": { "@bodiless/cli": "^1.0.0-rc.22", "body-parser": "^1.18.3", @@ -20,6 +26,7 @@ "formidable": "^1.2.1", "fs-extra": "^8.1.0", "lodash": "^4.17.19", + "qs": "^6.11.0", "replace-in-file": "^6.2.0", "rimraf": "^2.6.3", "tmp": "^0.0.33", @@ -32,7 +39,16 @@ "author": "Dewen Li ", "license": "Apache-2.0", "devDependencies": { + "@types/copyfiles": "^2.1.1", + "@types/dotenv": "^8.2.0", + "@types/express": "^4.17.17", + "@types/formidable": "^1.2.5", + "@types/fs-extra": "^8.0.0", + "@types/morgan": "^1.9.4", + "@types/react": "^17.0.39", + "@types/rimraf": "^3.0.0", "@types/supertest": "^2.0.8", + "@types/tmp": "^0.2.3", "morgan": "^1.9.1", "morgan-body": "^2.4.8", "supertest": "^4.0.2" diff --git a/packages/bodiless-backend/replace-in-file.d.ts b/packages/bodiless-backend/replace-in-file.d.ts new file mode 100644 index 0000000000..5f72dd57d9 --- /dev/null +++ b/packages/bodiless-backend/replace-in-file.d.ts @@ -0,0 +1,43 @@ +declare module 'replace-in-file' { + export function replaceInFile(config: ReplaceInFileConfig): Promise; + export function replaceInFile( + config: ReplaceInFileConfig, cb: (error: Error, results: ReplaceResult[]) => void): void; + export default replaceInFile; + + namespace replaceInFile { + export function sync(config: ReplaceInFileConfig): ReplaceResult[]; + export function replaceInFileSync(config: ReplaceInFileConfig): ReplaceResult[]; + export function replaceInFile(config: ReplaceInFileConfig): Promise; + export function replaceInFile( + config: ReplaceInFileConfig, cb: (error: Error, results: ReplaceResult[]) => void): void; + } + + export function sync(config: ReplaceInFileConfig): ReplaceResult[]; + export function replaceInFileSync(config: ReplaceInFileConfig): ReplaceResult[]; + + export type From = string | RegExp | FromCallback; + export type To = string | ToCallback; + + export interface ReplaceInFileConfig { + files: string | string[]; + ignore?: string | string[]; + from: From | Array; + to: To | Array; + countMatches?: boolean; + allowEmptyPaths?: boolean, + disableGlobs?: boolean, + encoding?: string, + dry?:boolean + glob?:object + } + + export interface ReplaceResult { + file: string; + hasChanged: boolean; + numMatches?: number, + numReplacements?: number, + } +} + +type FromCallback = (file: string) => string | RegExp | string[] | RegExp[]; +type ToCallback = (match: string, file: string) => string | string[]; diff --git a/packages/bodiless-backend/src/backend.js b/packages/bodiless-backend/src/backend.js deleted file mode 100644 index 9b9a2f072c..0000000000 --- a/packages/bodiless-backend/src/backend.js +++ /dev/null @@ -1,869 +0,0 @@ -/** - * Copyright © 2019 Johnson & Johnson - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint no-console: 0 */ -/* eslint global-require: 0 */ -const express = require('express'); -const bodyParser = require('body-parser'); -const { spawn } = require('child_process'); -const formidable = require('formidable'); -const tmp = require('tmp'); -const path = require('path'); -const uniq = require('lodash/uniq'); -const Page = require('./page'); -const GitCmd = require('./GitCmd'); -const { getChanges, getConflicts, mergeMain } = require('./git'); -const { copyAllFiles, copyFile, moveFile } = require('./fileHelper'); -const Logger = require('./logger'); - -const backendPrefix = process.env.GATSBY_BACKEND_PREFIX || '/___backend'; -const backendFilePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; -const defaultBackendPagePath = path.resolve(backendFilePath, 'pages'); -const defaultBackendSitePath = path.resolve(backendFilePath, 'site'); -const backendPagePath = process.env.BODILESS_BACKEND_DATA_PAGE_PATH || defaultBackendPagePath; -const backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; -const backendPublicPath = process.env.BODILESS_BACKEND_PUBLIC_PAGE_PATH || 'public/page-data'; -const isExtendedLogging = (process.env.BODILESS_BACKEND_EXTENDED_LOGGING_ENABLED || '0') === '1'; -const canCommit = (process.env.BODILESS_BACKEND_COMMIT_ENABLED || '0') === '1'; -const canSave = (process.env.BODILESS_BACKEND_SAVE_ENABLED || '1') === '1'; - -const logger = new Logger('BACKEND'); - -const isMorganEnabled = () => isExtendedLogging; -/* -This Class holds all of the interaction with Git -*/ -class Git { - static setCurrent(branch) { - return Git.cmd() - .add('checkout', branch) - .exec(); - } - - static getCurrent() { - return Git.cmd() - .add('rev-parse', '--abbrev-ref', 'HEAD') - .exec() - .catch(data => logger.log(data)) - .then(data => data.stdout); - } - - static list() { - return new Promise(resolve => { - const cmdName = path.join(__dirname, 'getBranches.sh'); - const cmd = spawn('bash', [cmdName]); - const results = []; - cmd.stdout.on('data', data => { - const values = data.toString().split('||'); - if (values.length === 4) { - results.push({ - isCurrent: Boolean(Number.parseInt(values[0], 10)), - name: values[1].trim(), - description: values[2].trim(), - lastCommitMessage: values[3].trim(), - }); - } - }); - cmd.stdout.on('close', () => { - resolve(results); - }); - }); - } -} - -/* -This Class lets us build and execute a GitCommit -*/ -class GitCommit { - constructor() { - try { - // If App git path is specified, switch to the path. - if (process.env.APP_GIT_PATH) { - process.chdir(process.env.APP_GIT_PATH); - } - } catch (err) { - logger.error(`chdir: ${err}`); - } - this.files = []; - this.remote = 'origin'; - } - - addDirectory(...dirs) { - this.files.push(...dirs); - return this; - } - - addPaths(...paths) { - this.files.push(...paths.map(p => `${backendFilePath}/${p}.json`)); - return this; - } - - addFiles(...files) { - this.files.push(...files.map(p => `${backendStaticPath}/${p}`)); - return this; - } - - async pull() { - const { remote } = this; - await GitCmd.cmd() - .add('fetch', remote) - .exec(); - - // Check if there are any unstaged files left before rebasing. - const dirty = await GitCmd.cmd() - .add('diff', '--quiet') - .exec(); - if (dirty.code) { - await GitCmd.cmd() - .add('add', '--all') - .exec(); - await GitCmd.cmd() - .add('commit', '-m', 'TEMPORARY COMMIT') - .exec(); - } - - // Get current branch name. - const data = await GitCmd.cmd() - .add('symbolic-ref', '--short', 'HEAD') - .exec(); - const branch = data.stdout.trim(); - - let result; - try { - result = await GitCmd.cmd() - .add('rebase', `${remote}/${branch}`, '-s', 'recursive', '-X', 'theirs') - .exec(); - } catch (rebaseErr) { - // Set another http.status code for unstaged changes? - // const unstaged = /You have unstaged changes/i.test(rebaseErr.message); - - // Set HTTP response status code to 409 if a conflict is found during rebase. - if (/could not apply/i.test(rebaseErr.message)) { - rebaseErr.code = '409'; - - // Abort rebase only if it's in progress (i.e. merge conflict). - try { - logger.log('Found error during rebase, attempting to abort rebase.'); - await GitCmd.cmd() - .add('rebase', '--abort') - .exec(); - } catch (abortErr) { - logger.log('Found error while attempting to abort rebase.'); - logger.error(abortErr); - } - } else { - rebaseErr.code = '500'; - } - throw rebaseErr; - } finally { - // If there was a temporary commit, rewind working directory back one commit. - if (dirty.code && (result.stdout.search('Already applied') === -1)) { - await GitCmd.cmd() - .add('reset', 'HEAD^') - .exec(); - } - } - return result; - } - - async commit(message, author) { - const { remote } = this; - - await this.pull(); - - // Stage user files specified by front-end (src/data, /static, etc.). - await GitCmd.cmd() - .add('add') - .addFiles(...this.files) - .exec(); - - // Check if we have any staged files to be committed. - let hasChanges = true; - try { - const resDiff = await GitCmd.cmd() - .add('diff', '--cached', '--exit-code') - .exec(); - - if (resDiff.code === 0) { - hasChanges = false; - } - } catch (errDiff) { - hasChanges = true; - } - if (!hasChanges) { - const errNoChange = new Error('No changes found for this commit.'); - errNoChange.code = 405; - throw errNoChange; - } - - // Commit the staged files.. - const commitCmd = GitCmd.cmd(); - commitCmd.add('commit', '-m', message); - // If we have an author, add it to the commit. - if (author) { - commitCmd.add('--author', author); - } - commitCmd.addFiles(...this.files); - const res = await commitCmd.exec(); - - try { - // Push changes after successful rebase. - await GitCmd.cmd() - .add('push', remote) - .exec(); - } catch (pushError) { - // Walk back last commit, and put it's contents into the working directory. - GitCmd.cmd() - .add('reset', '--mixed', 'HEAD^') - .exec(); - throw pushError; - } - - // return commit command response to front-end if successful - return res; - } - - amend() { - // we have to tell git we intend to add our files - return Git.cmd() - .add('add', '--intent-to-add') - .addFiles(...this.files) - .exec() - .then( - Git.cmd() - .add('commit') - .add('--amend', '--no-edit') - .addFiles(...this.files) - .exec(), - ); - } -} - -class Backend { - constructor() { - this.app = express(); - this.app.use(bodyParser.json()); - if (isMorganEnabled()) { - const morgan = require('morgan'); - const morganBody = require('morgan-body'); - this.app.use(morgan(':method :url :status :res[content-length] - :response-time ms')); - morganBody(this.app); - } - this.app.use((req, res, next) => { - res.header( - 'Access-Control-Allow-Headers', - 'Origin, X-Requested-With, Content-Type, Accept', - ); - res.header('Content-Type', 'application/json'); - next(); - }); - this.setRoute(`${backendPrefix}/changes`, Backend.getChanges); - this.setRoute(`${backendPrefix}/changes/conflicts`, Backend.getConflicts); - this.setRoute(`${backendPrefix}/get/commits`, Backend.getLatestCommits); - this.setRoute(`${backendPrefix}/change/amend`, Backend.setChangeAmend); - this.setRoute(`${backendPrefix}/change/commit`, Backend.setChangeCommit); - this.setRoute(`${backendPrefix}/change/push`, Backend.setChangePush); - this.setRoute(`${backendPrefix}/change/reset`, Backend.setChangeReset); - this.setRoute(`${backendPrefix}/change/pull`, Backend.setChangePull); - this.setRoute(`${backendPrefix}/merge/main`, Backend.mergeMain); - this.setRoute(`${backendPrefix}/asset/*`, Backend.setAsset); - this.setRoute(`${backendPrefix}/set/current`, Backend.setSetCurrent); - this.setRoute(`${backendPrefix}/set/list`, Backend.setSetList); - this.setRoute(`${backendPrefix}/content/*`, Backend.setContent); - this.setRoute(`${backendPrefix}/log`, Backend.log); - this.setRoute(`${backendPrefix}/pages`, Backend.setPages); - this.setRoute(`${backendPrefix}/clone`, Backend.clonePage); - this.setRoute(`${backendPrefix}/remove/*`, Backend.removePage); - this.setRoute(`${backendPrefix}/directory/child/*`, Backend.directoryChild); - this.setRoute(`${backendPrefix}/directory/exists/*`, Backend.directoryExists); - this.setRoute(`${backendPrefix}/file/remove/*`, Backend.removeFile); - this.setRoute(`${backendPrefix}/assets/remove/*`, Backend.removeAssets); - this.setRoute(`${backendPrefix}/assets/copy`, Backend.copyAssets); - this.setRoute(`${backendPrefix}/assets/move`, Backend.moveAssets); - } - - setRoute(route, action) { - action.bind(this)(this.app.route(route)); - } - - getApp() { - return this.app; - } - - static exitWithErrorResponse(error, res) { - logger.error(error); - if (Number(error.code) >= 300) { - res.status(Number(error.code)); - } else { - res.status(500); - } - // End response process to prevent any further queued promises/events from responding. - res.send(Backend.sanitizeOutput(error.message)).end(); - } - - static ensureCommitEnabled(res) { - // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. - if (!canCommit) { - const error = new Error( - 'Your current environment does not allow saving content.', - ); - error.code = 405; - Backend.exitWithErrorResponse(error, res); - return false; - } - return true; - } - - static ensureSaveEnabled(res) { - // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. - if (!canSave) { - const error = new Error( - 'Your current environment does not allow saving content.', - ); - error.code = 405; - Backend.exitWithErrorResponse(error, res); - return false; - } - return true; - } - - static getChanges(route) { - route.get(async (req, res) => { - try { - const status = await getChanges(); - res.send(status); - } catch (error) { - logger.log(error); - error.code = 500; - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static getConflicts(route) { - route.get(async (req, res) => { - const target = req.query.target || undefined; - try { - const conflicts = await getConflicts(target); - const pages = uniq(conflicts.files.filter(file => (file.search(backendPagePath) !== -1)) - .map(file => ( - path.dirname(file).replace(backendPagePath, '').replace(/^\/|\/$/g, '') || 'homepage' - ))); - const site = uniq(conflicts.files.filter( - file => (file.search(defaultBackendSitePath) !== -1), - ).map(file => ( - path.dirname(file).replace(defaultBackendSitePath, '').replace(/^\/|\/$/g, '') || 'site' - ))); - res.send({ ...conflicts, pages, site }); - } catch (error) { - logger.log(error); - error.code = 500; - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static getLatestCommits(route) { - route.post(async (req, res) => { - try { - await GitCmd.cmd().add('fetch', '--all'); - const gitLog = await GitCmd.cmd() - .add('log', '--pretty=format:%H%n%ad%n%an%n%s%n') - .exec(); - res.send(gitLog); - } catch (error) { - res.send(error.info); - } - }); - } - - static setChangeReset(route) { - route.post(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - logger.log('Start reset'); - try { - // Clean up untracked files. - if (backendFilePath && backendStaticPath) { - // Clean up public folder. - const gitStatus = await GitCmd.cmd() - .add('status', '--porcelain', backendPagePath) - .exec(); - const gitRootRelPath = await GitCmd.cmd() - .add('rev-parse', '--show-cdup') - .exec(); - const reGetDeletedAndUntracked = /(?<= D |\?\? ).*/gm; - const deletedAndUntracked = gitStatus.stdout.match(reGetDeletedAndUntracked); - if (deletedAndUntracked !== null) { - const dataPagePath = path.join(backendFilePath, 'pages'); - const obsoletePublicPages = deletedAndUntracked.map(gitPath => { - const publicPagePath = gitPath.replace(dataPagePath, backendPublicPath); - // Get absolute path considering location of .git folder - return path.resolve( - gitRootRelPath.stdout.trim(), - publicPagePath, - ); - }); - // Have to loop through every path since 'git clean' can work incorrectly when passing - // all the paths at once. - await Promise.all(obsoletePublicPages.map( - async (gitPath) => GitCmd.cmd().add('clean', '-dfx').addFiles(gitPath).exec(), - )); - } - // Clean up data folder. - await Promise.all([backendFilePath, backendStaticPath].map( - async (gitPath) => GitCmd.cmd().add('clean', '-df').addFiles(gitPath).exec(), - )); - } - // Discard changes in existing files. - const cleanExisting = await GitCmd.cmd() - .add('reset', '--hard', 'HEAD') - .exec(); - res.send(cleanExisting.stdout); - } catch (error) { - // Need to inform user of merge operation fails. - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static setChangePull(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - logger.log('Start pull'); - new GitCommit() - .pull() - .then(data => res.send(data.stdout)) - // Need to inform user of merge operation fails. - .catch(error => Backend.exitWithErrorResponse(error, res)); - }); - } - - static mergeMain(route) { - route.post(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - try { - const status = await mergeMain(); - res.send(status); - } catch (error) { - logger.log(error); - error.code = 500; - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static setChangeAmend(route) { - route.post((req, res) => { - logger.log('Start amend'); - logger.log(req.body.paths); - Git.commit() - .addPaths(...req.body.paths) - .amend() - .then(data => res.send(data.stdout)) - .catch(data => logger.log(data)); - }); - } - - static setChangeCommit(route) { - route.post((req, res) => { - if (!Backend.ensureCommitEnabled(res)) return; - logger.log(`Start committing: ${req.body.message}`); - const { author } = req.body; - const files = req.body.files || []; - const dirs = req.body.dirs || []; - new GitCommit() - .addDirectory(...dirs) - .addPaths(...req.body.paths) - .addFiles(...files) - .commit(`[CONTENT] ${req.body.message}`, author) - // .then(Git.cmd().add('push').exec()) - .then(data => { - res.send(data.stdout); - }) - // Need to inform user of merge operation fails. - .catch(error => Backend.exitWithErrorResponse(error, res)); - }); - } - - static setChangePush(route) { - route.post((req, res) => { - if (!Backend.ensureCommitEnabled(res)) return; - logger.log('Start push'); - new GitCmd() - .add('symbolic-ref', '--short', 'HEAD') - .exec() - .then(data => { - const branch = data.stdout.trim(); - logger.log(`Branch = ${branch}`); - Git.cmd() - .add('rebase', `origin/${branch}`) - .exec() - .then( - Git.cmd() - .add('push', 'origin', branch) - .exec(), - ) - .then(addData => res.send(addData.stdout)) - .catch(addData => logger.error(addData)); - }) - .catch(data => logger.log(data)); - }); - } - - static log(route) { - route.post((req, res) => { - new Logger(req.body.id).print(req.body.message, req.body.severity); - res.send('success'); - }); - } - - static setAsset(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const baseResourcePath = Backend.getPath(req); - const tmpDir = tmp.dirSync({ mode: '0755', unsafeCleanup: true, prefix: 'backendTmpDir_' }); - const form = formidable({ multiples: true, uploadDir: tmpDir.name }); - - form.parse(req, (err, fields, files) => { - const { nodePath } = fields; - copyAllFiles(files, baseResourcePath, nodePath).then((filesPath) => { - res.json({ filesPath }); - }).catch(copyErr => { - console.log(copyErr); - res.send(copyErr); - }); - }); - }); - } - - static setSetCurrent(route) { - route - .get((req, res) => { - logger.log('Start get current set'); - Git.getCurrent().then(data => res.send(data)); - }) - .post((req, res) => { - logger.log(`Start Post current Set:${req.body}`); - Git.setCurrent(req.body.name) - .then(Git.list()) - .then(data => { - res.send(data); - }) - .catch(reason => { - logger.log(reason); - }); - }); - } - - static setSetList(route) { - route.get((req, res) => { - logger.log('Start Get Set List'); - Git.list().then(data => res.send(data)); - }); - } - - static setContent(route) { - route - .get((req, res) => { - // @todo: refactor 2nd argument. - logger.log(req); - const page = Backend.getPage(Backend.getPath(req)); - logger.log(`Start get content for:${page.file}`); - page - .read() - .then(data => { - res.send(data); - }) - .catch(() => res.send({})); - }) - .post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - // @todo: refactor 2nd argument. - const page = Backend.getPage(Backend.getPath(req)); - logger.log(`Start post content for:${page.file}`); - page - .write(req.body) - .then(data => { - logger.log('Sending', data); - res.send(data); - }) - .catch(reason => { - logger.log(reason); - res.send({}); - }); - }) - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const page = Backend.getPage(Backend.getPath(req)); - logger.log(`Start deletion for:${page.file}`); - page - .delete() - .then(data => { - logger.log('Sending', data); - res.send(data); - }) - .catch(reason => { - logger.log(reason); - res.send({}); - }); - }); - } - - static getPath(req) { - const prefixCount = backendPrefix.split('/').filter(Boolean).length + 1; - logger.log(req.originalUrl); - return req.originalUrl - .replace(/\/*$/, '') - .replace(/^\/*/, '') - .split('/') - .splice(prefixCount) - .join('/'); - } - - static getPage(pagePath) { - return new Page(pagePath); - } - - static removePage(route) { - route - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - page.setBasePath(backendPagePath); - - logger.log(`Start deleting page:${page.directory}`); - - page - .deleteDirectory() - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static removeFile(route) { - route - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - page.setBasePath(backendPagePath); - const origin = `./src/data/pages/${pagePath}index.json`; - logger.log(`Start deleting file: ${origin}`); - - page - .removeFile(origin) - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static directoryChild(route) { - route - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - - page.setBasePath(backendPagePath); - - logger.log(`Start verify page child directory: ${page.directory}`); - - page - .hasChildDirectory() - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static directoryExists(route) { - route - .delete((req, res) => { - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - - page.setBasePath(backendPagePath); - - logger.log(`Start verifying new page exists: ${page.directory}`); - - page - .directoryExists(page.directory) - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static setPages(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { body } = req; - const pagePath = body.path || ''; - const template = body.template || '_default'; - const filePath = path.join(pagePath, 'index'); - const pageContent = { - '#template': template, - }; - const page = Backend.getPage(filePath); - page.setBasePath(backendPagePath); - logger.log(`Start creating page for:${page.file}`); - if (page.exists) { - res.status(409); - res.send(`Error: page ${pagePath} already exists`); - return; - } - page - .write(pageContent) - .then(data => { - logger.log('Sending', data); - res.status(201); - res.send(data); - }) - .catch(reason => { - logger.log(reason); - res.send({}); - }); - }); - } - - static clonePage(route) { - route.post(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { body: { origin, destination } } = req; - const page = Backend.getPage(destination); - page.setBasePath(backendPagePath); - - logger.log(`Start cloning page for:${destination}`); - - page - .copyDirectory(origin, destination) - .then(data => { - if (data) { - logger.log(data); - res.send(data); - } else { - res.send({}); - } - }) - .catch(reason => { - logger.log(reason); - res.status(500).send(`${reason}`); - }); - }); - } - - static removeAssets(route) { - route.delete(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const origin = req.params[0]; - const page = Backend.getPage(origin); - - logger.log(`Start removing assets for:${origin}`); - - const originPath = origin.replace(/\/$/, ''); - const originStaticPath = path.join(backendStaticPath, '/images/pages', originPath); - - page - .removePageAssets(originStaticPath) - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static copyAssets(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { - body: { - path_from: pathFrom, path_to: pathTo, - } - } = req; - const assetStaticPathFrom = path.join(backendStaticPath, pathFrom); - const assetStaticPathTo = path.join(backendStaticPath, pathTo); - logger.log(`Copy assets from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`); - try { - copyFile(assetStaticPathFrom, assetStaticPathTo); - setTimeout(() => { - res.send({status: 'success'}); - }, 500); - } catch (error) { - logger.log(error); - res.status(500).send(error); - } - }); - } - - static moveAssets(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { - body: { - path_from: pathFrom, path_to: pathTo, - } - } = req; - const assetStaticPathFrom = path.join(backendStaticPath, pathFrom); - const assetStaticPathTo = path.join(backendStaticPath, pathTo); - logger.log(`Move asset from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`); - try { - moveFile(assetStaticPathFrom, assetStaticPathTo); - setTimeout(() => { - res.send({status: 'success'}); - }, 500); - } catch (error) { - logger.log(error); - res.status(500).send(error); - } - }); - } - - static sanitizeOutput(data) { - return data.replace(/(http|https):\/\/[^@]+:[^@]+@/gi, '$1://****:****@'); - } - - start(port) { - logger.log('Start'); - this.app.listen(port, () => logger.log(`Backend listening on Port: ${port}`)); - } -} - -module.exports = Backend; diff --git a/packages/bodiless-backend/src/backend.ts b/packages/bodiless-backend/src/backend.ts new file mode 100644 index 0000000000..ed1f722112 --- /dev/null +++ b/packages/bodiless-backend/src/backend.ts @@ -0,0 +1,928 @@ +/** + * Copyright © 2019 Johnson & Johnson + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import express from 'express'; +import bodyParser from 'body-parser'; +import { spawn } from 'child_process'; +import formidable from 'formidable'; +import tmp from 'tmp'; +import path from 'path'; +import uniq from 'lodash/uniq'; +import morgan from 'morgan'; +import morganBody from 'morgan-body'; +import type { + Express, IRoute, Request, Response, +} from 'express'; +import Page from './page'; +import GitCmd, { GitCmdError } from './gitCmd'; +import gitUtil from './tools/git'; +import { copyAllFiles, copyFile, moveFile } from './fileHelper'; +import Logger from './logger'; +import type { GitInfoType } from './gitCmd'; +import type { GitUtil } from './tools/git'; + +const logger = new Logger('BACKEND'); + +type GitBranchInfoType = { + isCurrent: boolean; + name: string; + description: string; + lastCommitMessage: string; +}; + +/* +This Class holds all of the interaction with Git +*/ +class Git { + // static setCurrent(branch: string) { + // return Git.cmd() + // .add('checkout', branch) + // .exec(); + // } + + // static getCurrent() { + // return Git.cmd() + // .add('rev-parse', '--abbrev-ref', 'HEAD') + // .exec() + // .catch(data => logger.log(data)) + // .then(data => data.stdout); + // } + + static list() { + return new Promise((resolve) => { + const cmdName = path.join(__dirname, 'getBranches.sh'); + const cmd = spawn('bash', [cmdName]); + const results: GitBranchInfoType[] = []; + cmd.stdout.on('data', (data) => { + const values = data.toString().split('||'); + if (values.length === 4) { + results.push({ + isCurrent: Boolean(Number.parseInt(values[0], 10)), + name: values[1].trim(), + description: values[2].trim(), + lastCommitMessage: values[3].trim(), + }); + } + }); + cmd.stdout.on('close', () => { + resolve(results); + }); + }); + } +} + +/* +This Class lets us build and execute a GitCommit +*/ +class GitCommit { + files: string[] = []; + + remote: string; + + staticPath: string; + + filePath: string; + + constructor() { + try { + // If App git path is specified, switch to the path. + if (process.env.APP_GIT_PATH) { + process.chdir(process.env.APP_GIT_PATH); + } + } catch (err) { + logger.error(`chdir: ${err}`); + } + this.files = []; + this.remote = 'origin'; + this.filePath = ''; + this.staticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; + } + + addDirectory(...dirs: string[]) { + this.files.push(...dirs); + return this; + } + + addPaths(...paths: string[]) { + this.files.push(...paths.map((p) => `${this.filePath}/${p}.json`)); + return this; + } + + addFiles(...files: string[]) { + this.files.push(...files.map((p) => `${this.staticPath}/${p}`)); + return this; + } + + async pull() { + const { remote } = this; + await GitCmd.cmd().add('fetch', remote).exec(); + + // Check if there are any un-staged files left before rebasing. + const dirty = await GitCmd.cmd().add('diff', '--quiet').exec(); + if (dirty.code) { + await GitCmd.cmd().add('add', '--all').exec(); + await GitCmd.cmd().add('commit', '-m', 'TEMPORARY COMMIT').exec(); + } + + // Get current branch name. + const data: GitInfoType = await GitCmd.cmd() + .add('symbolic-ref', '--short', 'HEAD') + .exec(); + const branch = data.stdout.trim(); + + let result; + try { + result = await GitCmd.cmd() + .add('rebase', `${remote}/${branch}`, '-s', 'recursive', '-X', 'theirs') + .exec(); + } catch (rebaseErr: any) { + // Set another http.status code for unstaged changes? + // const unstaged = /You have unstaged changes/i.test(rebaseErr.message); + + // Set HTTP response status code to 409 if a conflict is found during rebase. + if (/could not apply/i.test(rebaseErr.message)) { + rebaseErr.code = '409'; + + // Abort rebase only if it's in progress (i.e. merge conflict). + try { + logger.log('Found error during rebase, attempting to abort rebase.'); + await GitCmd.cmd().add('rebase', '--abort').exec(); + } catch (abortErr: any) { + logger.log('Found error while attempting to abort rebase.'); + logger.error(abortErr); + } + } else { + rebaseErr.code = '500'; + } + throw rebaseErr; + } finally { + // If there was a temporary commit, rewind working directory back one commit. + if (dirty.code && result?.stdout.search('Already applied') === -1) { + await GitCmd.cmd().add('reset', 'HEAD^').exec(); + } + } + return result; + } + + async commit(message: string, author: string) { + const { remote } = this; + + await this.pull(); + + // Stage user files specified by front-end (src/data, /static, etc.). + await GitCmd.cmd() + .add('add') + .addFiles(...this.files) + .exec(); + + // Check if we have any staged files to be committed. + let hasChanges = true; + try { + const resDiff = await GitCmd.cmd() + .add('diff', '--cached', '--exit-code') + .exec(); + + if (resDiff.code === 0) { + hasChanges = false; + } + } catch (errDiff) { + hasChanges = true; + } + if (!hasChanges) { + const errNoChange = new GitCmdError('No changes found for this commit.'); + errNoChange.code = 405; + throw errNoChange; + } + + // Commit the staged files.. + const commitCmd = GitCmd.cmd(); + commitCmd.add('commit', '-m', message); + // If we have an author, add it to the commit. + if (author) { + commitCmd.add('--author', author); + } + commitCmd.addFiles(...this.files); + const res = await commitCmd.exec(); + + try { + // Push changes after successful rebase. + await GitCmd.cmd().add('push', remote).exec(); + } catch (pushError) { + // Walk back last commit, and put it's contents into the working directory. + GitCmd.cmd().add('reset', '--mixed', 'HEAD^').exec(); + throw pushError; + } + + // return commit command response to front-end if successful + return res; + } + + // amend() { + // // we have to tell git we intend to add our files + // return Git.cmd() + // .add('add', '--intent-to-add') + // .addFiles(...this.files) + // .exec() + // .then( + // Git.cmd() + // .add('commit') + // .add('--amend', '--no-edit') + // .addFiles(...this.files) + // .exec(), + // ); + // } +} + +class Backend { + app: Express; + + git: GitUtil; + + prefix: string = '/___backend'; + + filePath: string = ''; + + defaultPagePath: string = ''; + + defaultSitePath: string = ''; + + pagePath: string = ''; + + staticPath: string = ''; + + publicPath: string = ''; + + isExtendedLogging: boolean = false; + + canCommit: boolean = false; + + canSave: boolean = true; + + initConf() { + this.prefix = process.env.GATSBY_BACKEND_PREFIX || '/___backend'; + this.filePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; + this.defaultPagePath = path.resolve(this.filePath, 'pages'); + this.defaultSitePath = path.resolve(this.filePath, 'site'); + this.pagePath = process.env.BODILESS_BACKEND_DATA_PAGE_PATH || this.defaultPagePath; + this.staticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; + this.publicPath = process.env.BODILESS_BACKEND_PUBLIC_PAGE_PATH || 'public/page-data'; + this.isExtendedLogging = (process.env.BODILESS_BACKEND_EXTENDED_LOGGING_ENABLED || '0') === '1'; + this.canCommit = (process.env.BODILESS_BACKEND_COMMIT_ENABLED || '0') === '1'; + this.canSave = (process.env.BODILESS_BACKEND_SAVE_ENABLED || '1') === '1'; + } + + isMorganEnabled = () => this.isExtendedLogging; + + constructor() { + this.initConf(); + this.app = express(); + this.app.use(bodyParser.json()); + this.git = gitUtil; + if (this.isMorganEnabled()) { + this.app.use( + morgan(':method :url :status :res[content-length] - :response-time ms'), + ); + morganBody(this.app); + } + this.app.use((req, res, next) => { + res.header( + 'Access-Control-Allow-Headers', + 'Origin, X-Requested-With, Content-Type, Accept', + ); + res.header('Content-Type', 'application/json'); + next(); + }); + this.setRoute(`${this.prefix}/changes`, this.getChanges); + this.setRoute(`${this.prefix}/changes/conflicts`, this.getConflicts); + this.setRoute(`${this.prefix}/get/commits`, Backend.getLatestCommits); + // this.setRoute(`${this.prefix}/change/amend`, this.setChangeAmend); + this.setRoute(`${this.prefix}/change/commit`, this.setChangeCommit); + // this.setRoute(`${this.prefix}/change/push`, this.setChangePush); + this.setRoute(`${this.prefix}/change/reset`, this.setChangeReset); + this.setRoute(`${this.prefix}/change/pull`, this.setChangePull); + this.setRoute(`${this.prefix}/merge/main`, this.mergeMain); + this.setRoute(`${this.prefix}/asset/*`, this.setAsset); + // this.setRoute(`${this.prefix}/set/current`, Backend.setSetCurrent); + this.setRoute(`${this.prefix}/set/list`, Backend.setSetList); + this.setRoute(`${this.prefix}/content/*`, this.setContent); + this.setRoute(`${this.prefix}/log`, Backend.log); + this.setRoute(`${this.prefix}/pages`, this.setPages); + this.setRoute(`${this.prefix}/clone`, this.clonePage); + this.setRoute(`${this.prefix}/remove/*`, this.removePage); + this.setRoute(`${this.prefix}/directory/child/*`, this.directoryChild); + this.setRoute( + `${this.prefix}/directory/exists/*`, + Backend.directoryExists, + ); + this.setRoute(`${this.prefix}/file/remove/*`, this.removeFile); + this.setRoute(`${this.prefix}/assets/remove/*`, this.removeAssets); + this.setRoute(`${this.prefix}/assets/copy`, this.copyAssets); + this.setRoute(`${this.prefix}/assets/move`, this.moveAssets); + } + + setRoute(route: string, action: (r: IRoute) => void) { + action.bind(this)(this.app.route(route)); + } + + getApp() { + return this.app; + } + + static exitWithErrorResponse(error: GitCmdError, res: Response) { + logger.error(error.message); + if (Number(error.code) >= 300) { + res.status(Number(error.code)); + } else { + res.status(500); + } + // End response process to prevent any further queued promises/events from responding. + res.send({error: Backend.sanitizeOutput(error.message)}).end(); + } + + ensureCommitEnabled(res: Response) { + // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. + if (!this.canCommit) { + const error = new GitCmdError( + 'Your current environment does not allow saving content.', + ); + error.code = 405; + Backend.exitWithErrorResponse(error, res); + return false; + } + return true; + } + + ensureSaveEnabled(res: Response) { + // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. + if (!this.canSave) { + const error = new GitCmdError( + 'Your current environment does not allow saving content.', + ); + error.code = 405; + Backend.exitWithErrorResponse(error, res); + return false; + } + return true; + } + + getChanges(route: IRoute) { + route.get(async (req: Request, res) => { + try { + const status = await this.git.getChanges(); + res.send(status); + } catch (error: any) { + logger.log(error); + error.code = 500; + Backend.exitWithErrorResponse(error, res); + } + }); + } + + getConflicts(route: IRoute) { + route.get(async (req: Request, res: Response) => { + const { targetQs = undefined } = req.query; + + let target: string | undefined; + if (typeof targetQs === 'string') { + target = targetQs; + } + + try { + const conflicts = await this.git.getConflicts(target); + const pages = uniq( + conflicts.files + .filter((file) => file.search(this.pagePath) !== -1) + .map( + (file) => path + .dirname(file) + .replace(this.pagePath, '') + .replace(/^\/|\/$/g, '') || 'homepage', + ), + ); + const site = uniq( + conflicts.files + .filter((file) => file.search(this.defaultSitePath) !== -1) + .map( + (file) => path + .dirname(file) + .replace(this.defaultSitePath, '') + .replace(/^\/|\/$/g, '') || 'site', + ), + ); + res.send({ ...conflicts, pages, site }); + } catch (error: any) { + logger.log(error); + error.code = 500; + Backend.exitWithErrorResponse(error, res); + } + }); + } + + // @todo: remove static and convert cmd. + static getLatestCommits(route: IRoute) { + route.post(async (req: Request, res: Response) => { + try { + await GitCmd.cmd().add('fetch', '--all'); + const gitLog = await GitCmd.cmd() + .add('log', '--pretty=format:%H%n%ad%n%an%n%s%n') + .exec(); + res.send(gitLog); + } catch (error: any) { + res.send(error.info); + } + }); + } + + setChangeReset(route: IRoute) { + route.post(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + logger.log('Start reset'); + try { + // Clean up untracked files. + if (this.filePath && this.staticPath) { + // Clean up public folder. + const gitStatus = await GitCmd.cmd() + .add('status', '--porcelain', this.pagePath) + .exec(); + const gitRootRelPath = await GitCmd.cmd() + .add('rev-parse', '--show-cdup') + .exec(); + const reGetDeletedAndUntracked = /(?<= D |\?\? ).*/gm; + const deletedAndUntracked = gitStatus.stdout.match( + reGetDeletedAndUntracked, + ); + if (deletedAndUntracked !== null) { + const dataPagePath = path.join(this.filePath, 'pages'); + const obsoletePublicPages = deletedAndUntracked.map((gitPath) => { + const publicPagePath = gitPath.replace( + dataPagePath, + this.publicPath, + ); + // Get absolute path considering location of .git folder + return path.resolve(gitRootRelPath.stdout.trim(), publicPagePath); + }); + // Have to loop through every path since 'git clean' can work incorrectly when passing + // all the paths at once. + await Promise.all( + obsoletePublicPages.map( + async (gitPath) => GitCmd.cmd().add('clean', '-dfx').addFiles(gitPath).exec() + ), + ); + } + // Clean up data folder. + await Promise.all( + [this.filePath, this.staticPath].map( + async (gitPath) => GitCmd.cmd().add('clean', '-df').addFiles(gitPath).exec(), + ), + ); + } + // Discard changes in existing files. + const cleanExisting = await GitCmd.cmd() + .add('reset', '--hard', 'HEAD') + .exec(); + res.send({output: cleanExisting.stdout}); + } catch (error: any) { + // Need to inform user of merge operation fails. + Backend.exitWithErrorResponse(error, res); + } + }); + } + + setChangePull(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + logger.log('Start pull'); + new GitCommit() + .pull() + .then((data) => res.send({output: data.stdout})) + // Need to inform user of merge operation fails. + .catch((error) => Backend.exitWithErrorResponse(error, res)); + }); + } + + mergeMain(route: IRoute) { + route.post(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + try { + const status = await this.git.mergeMain(); + res.send(status); + } catch (error: any) { + logger.log(error); + error.code = 500; + Backend.exitWithErrorResponse(error, res); + } + }); + } + + // @todo: do we need this? + // static setChangeAmend(route: IRoute) { + // route.post((req: Request, res: Response) => { + // logger.log('Start amend'); + // logger.log(req.body.paths); + // GitCommit.commit() + // .addPaths(...req.body.paths) + // .amend() + // .then(data => res.send(data.stdout)) + // .catch(data => logger.log(data)); + // }); + // } + + setChangeCommit(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureCommitEnabled(res)) return; + logger.log(`Start committing: ${req.body.message}`); + const { author } = req.body; + const files = req.body.files || []; + const dirs = req.body.dirs || []; + new GitCommit() + .addDirectory(...dirs) + .addPaths(...req.body.paths) + .addFiles(...files) + .commit(`[CONTENT] ${req.body.message}`, author) + // .then(Git.cmd().add('push').exec()) + .then((data) => { + res.send({output: data.stdout}); + }) + // Need to inform user of merge operation fails. + .catch((error) => Backend.exitWithErrorResponse(error, res)); + }); + } + + // @todo: !!! do we need this? + // static setChangePush(route: IRoute) { + // route.post((req: Request, res: Response) => { + // if (!Backend.ensureCommitEnabled(res)) return; + // logger.log('Start push'); + // new GitCmd() + // .add('symbolic-ref', '--short', 'HEAD') + // .exec() + // .then(data => { + // const branch = data.stdout.trim(); + // logger.log(`Branch = ${branch}`); + // Git.cmd() + // .add('rebase', `origin/${branch}`) + // .exec() + // .then( + // Git.cmd() + // .add('push', 'origin', branch) + // .exec(), + // ) + // .then(addData => res.send(addData.stdout)) + // .catch(addData => logger.error(addData)); + // }) + // .catch(data => logger.log(data)); + // }); + // } + + static log(route: IRoute) { + route.post((req: Request, res: Response) => { + new Logger(req.body.id).print(req.body.message, req.body.severity); + res.send({status: 'success'}); + }); + } + + setAsset(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const baseResourcePath = this.getPath(req); + const tmpDir = tmp.dirSync({ + mode: 0o755, + unsafeCleanup: true, + prefix: 'backendTmpDir_', + }); + const form = formidable({ multiples: true, uploadDir: tmpDir.name }); + + form.parse(req, (err, fields, files) => { + const { nodePath } = fields; + copyAllFiles(files, baseResourcePath, nodePath as string) + .then((filesPath) => { + res.json({ filesPath }); + }) + .catch((copyErr) => { + console.log(copyErr); + res.send(copyErr); + }); + }); + }); + } + + // static setSetCurrent(route: IRoute) { + // route + // .get((req: Request, res: Response) => { + // logger.log('Start get current set'); + // Git.getCurrent().then(data => res.send(data)); + // }) + // .post((req: Request, res: Response) => { + // logger.log(`Start Post current Set:${req.body}`); + // Git.setCurrent(req.body.name) + // .then(Git.list()) + // .then(data => { + // res.send(data); + // }) + // .catch(reason => { + // logger.log(reason); + // }); + // }); + // } + + static setSetList(route: IRoute) { + route.get((req: Request, res: Response) => { + logger.log('Start Get Set List'); + Git.list().then((data) => res.send(data)); + }); + } + + setContent(route: IRoute) { + route + .get((req: Request, res: Response) => { + const page = Backend.getPage(this.getPath(req)); + logger.log(`Start get content for:${page.file}`); + page + .read() + .then((data) => { + res.send({data}); + }) + .catch(() => res.send({})); + }) + .post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + // @todo: refactor 2nd argument. + const page = Backend.getPage(this.getPath(req)); + logger.log(`Start post content for:${page.file}`); + page + .write(req.body) + .then((data) => { + logger.log('Sending', data); + res.send({data}); + }) + .catch((error: any) => { + logger.log(error); + res.status(500).send({error}); + }); + }) + .delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const page = Backend.getPage(this.getPath(req)); + logger.log(`Start deletion for:${page.file}`); + page + .delete() + .then((data) => { + logger.log('Sending', data); + res.send({data}); + }) + .catch((error: any) => { + logger.log(error); + res.status(501).send({error}); + }); + }); + } + + getPath(req: Request) { + const prefixCount = this.prefix.split('/').filter(Boolean).length + 1; + logger.log(req.originalUrl); + return req.originalUrl + .replace(/\/*$/, '') + .replace(/^\/*/, '') + .split('/') + .splice(prefixCount) + .join('/'); + } + + static getPage(pagePath: string) { + return new Page(pagePath); + } + + removePage(route: IRoute) { + route.delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + page.setBasePath(pagePath); + + logger.log(`Start deleting page:${page.directory}`); + + page.deleteDirectory().then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + removeFile(route: IRoute) { + route.delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + page.setBasePath(pagePath); + const origin = `./src/data/pages/${pagePath}index.json`; + logger.log(`Start deleting file: ${origin}`); + + page.removeFile(origin).then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + directoryChild(route: IRoute) { + route.delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + + page.setBasePath(pagePath); + + logger.log(`Start verify page child directory: ${page.directory}`); + + page.hasChildDirectory().then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + static directoryExists(route: IRoute) { + route.delete((req: Request, res: Response) => { + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + + page.setBasePath(pagePath); + + logger.log(`Start verifying new page exists: ${page.directory}`); + + page.directoryExists(page.directory).then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + setPages(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { body } = req; + const pagePath = body.path || ''; + const template = body.template || '_default'; + const filePath = path.join(pagePath, 'index'); + const pageContent = { + '#template': template, + }; + const page = Backend.getPage(filePath); + page.setBasePath(pagePath); + logger.log(`Start creating page for:${page.file}`); + if (page.exists) { + res.status(409); + res.send({error: `Error: page ${pagePath} already exists`}); + return; + } + page + .write(pageContent) + .then((data: any) => { + logger.log('Sending', data); + res.status(201); + res.send(data); + }) + .catch((reason) => { + logger.log(reason); + res.send({}); + }); + }); + } + + clonePage(route: IRoute) { + route.post(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { + body: { origin, destination }, + } = req; + const page = Backend.getPage(destination); + page.setBasePath(this.pagePath); + + logger.log(`Start cloning page for:${destination}`); + + page + .copyDirectory(origin, destination) + .then((data) => { + if (data) { + logger.log(JSON.stringify(data)); + res.send(data); + } else { + res.send({}); + } + }) + .catch((reason) => { + res.status(500).send({err: `${reason}`}); + }); + }); + } + + removeAssets(route: IRoute) { + route.delete(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const origin = req.params[0]; + const page = Backend.getPage(origin); + + logger.log(`Start removing assets for:${origin}`); + + const originPath = origin.replace(/\/$/, ''); + const originStaticPath = path.join( + this.staticPath, + '/images/pages', + originPath, + ); + + page.removePageAssets(originStaticPath).then((error) => { + if (error) { + logger.log(error); + res.send({error}); + } else { + res.send({}); + } + }); + }); + } + + copyAssets(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { + body: { path_from: pathFrom, path_to: pathTo }, + } = req; + const assetStaticPathFrom = path.join(this.staticPath, pathFrom); + const assetStaticPathTo = path.join(this.staticPath, pathTo); + logger.log( + `Copy assets from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`, + ); + try { + copyFile(assetStaticPathFrom, assetStaticPathTo); + setTimeout(() => { + res.send({ status: 'success' }); + }, 500); + } catch (error: any) { + logger.log(error); + res.status(500).send(error); + } + }); + } + + moveAssets(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { + body: { path_from: pathFrom, path_to: pathTo }, + } = req; + const assetStaticPathFrom = path.join(this.staticPath, pathFrom); + const assetStaticPathTo = path.join(this.staticPath, pathTo); + logger.log( + `Move asset from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`, + ); + try { + moveFile(assetStaticPathFrom, assetStaticPathTo); + setTimeout(() => { + res.send({ status: 'success' }); + }, 500); + } catch (error: any) { + logger.log(error); + res.status(500).send(error); + } + }); + } + + static sanitizeOutput(data: string) { + return data.replace(/(http|https):\/\/[^@]+:[^@]+@/gi, '$1://****:****@'); + } + + start(port: string | number) { + logger.log('Start'); + this.app.listen(port, () => logger.log(`Backend listening on Port: ${port}`)); + } +} + +export default Backend; diff --git a/packages/bodiless-backend/src/fileHelper.js b/packages/bodiless-backend/src/fileHelper.ts similarity index 69% rename from packages/bodiless-backend/src/fileHelper.js rename to packages/bodiless-backend/src/fileHelper.ts index 54ab752e84..1ca3f47c39 100644 --- a/packages/bodiless-backend/src/fileHelper.js +++ b/packages/bodiless-backend/src/fileHelper.ts @@ -12,14 +12,15 @@ * limitations under the License. */ -const fs = require('fs'); -const fse = require('fs-extra'); -const path = require('path'); -const crypto = require('crypto'); +import fs from 'fs'; +import fse from 'fs-extra'; +import path from 'path'; +import crypto from 'crypto'; +import type { Files, File } from 'formidable'; const backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; -const copyFilePromise = (from, to) => new Promise((resolve, reject) => { +const copyFilePromise = (from: string, to: string) => new Promise((resolve, reject) => { fs.copyFile(from, to, copyErr => { if (copyErr) reject(copyErr); fs.unlinkSync(from); @@ -27,9 +28,9 @@ const copyFilePromise = (from, to) => new Promise((resolve, reject) => { }); }); -const generateHash = str => crypto.createHash('md5').update(str).digest('hex'); +const generateHash = (str: string) => crypto.createHash('md5').update(str).digest('hex'); -const isImage = fileType => { +const isImage = (fileType: string) => { const imageFileTypes = [ 'image/jpeg', 'image/png', @@ -40,12 +41,18 @@ const isImage = fileType => { return imageFileTypes.includes(fileType); }; -const copyAllFiles = (files, baseResourcePath, nodePath) => { - const allFiles = []; - Object.keys(files).forEach(key => allFiles.push(files[key])); +const copyAllFiles = (files: Files, baseResourcePath: string, nodePath: string) => { + const allFiles: File[] = []; + Object.keys(files).forEach(key => { + if (Array.isArray(files[key])) { + allFiles.push(...files[key] as File[]); + } else { + allFiles.push(files[key] as File); + } + }); - return Promise.all(allFiles.map(file => { - const baseDir = isImage(file.type) ? 'images' : 'files'; + return Promise.all(allFiles.map((file: File) => { + const baseDir = isImage(file.type || '') ? 'images' : 'files'; const distFolderPath = path.join( backendStaticPath, baseDir, @@ -57,7 +64,7 @@ const copyAllFiles = (files, baseResourcePath, nodePath) => { fs.mkdirSync(distFolderPath, { recursive: true }); } - return copyFilePromise(file.path, path.join(distFolderPath, file.name)); + return copyFilePromise(file.path, path.join(distFolderPath, file.name || '')); })); }; @@ -71,10 +78,10 @@ const copyAllFiles = (files, baseResourcePath, nodePath) => { * @param pathFrom string - source file path * @param pathTo string - destination file path */ -const copyFile = (pathFrom, pathTo) => { +const copyFile = (pathFrom: string, pathTo: string) => { try { fse.copySync(pathFrom, pathTo); - } catch (err) { + } catch (err: any) { throw new Error(`Failed to copy file from ${pathFrom} to ${pathTo}: ${err.message}`); } }; @@ -85,15 +92,15 @@ const copyFile = (pathFrom, pathTo) => { * @param pathFrom string - source file path * @param pathTo string - destination file path */ -const moveFile = (pathFrom, pathTo) => { +const moveFile = (pathFrom: string, pathTo: string) => { try { fse.moveSync(pathFrom, pathTo, { overwrite: true }); - } catch (err) { + } catch (err: any) { throw new Error(`Failed to move file from ${pathFrom} to ${pathTo}: ${err.message}`); } }; -module.exports = { +export { copyAllFiles, moveFile, copyFile, diff --git a/packages/bodiless-backend/src/GitCmd.js b/packages/bodiless-backend/src/gitCmd.ts similarity index 69% rename from packages/bodiless-backend/src/GitCmd.js rename to packages/bodiless-backend/src/gitCmd.ts index 51090e17c3..b8524fc1cf 100644 --- a/packages/bodiless-backend/src/GitCmd.js +++ b/packages/bodiless-backend/src/gitCmd.ts @@ -12,14 +12,36 @@ * limitations under the License. */ -const { spawn } = require('child_process'); -const Logger = require('./logger'); +import { spawn } from 'child_process'; +import type { SpawnOptionsWithoutStdio } from 'child_process'; +import Logger from './logger'; const logger = new Logger('BACKEND'); + +export type GitInfoType = { + stdout: string, + stderr: string, + code: number | null; +}; + +export class GitCmdError extends Error { + code: number | null = 0; + + info?: GitInfoType; +} + /* This Class wraps spawn and lets us build out git commands with standard responses */ class GitCmd { + cmd: string = ''; + + params: string[] = []; + + files: string[] = []; + + options?: SpawnOptionsWithoutStdio; + constructor() { this.cmd = 'git'; this.params = []; @@ -27,17 +49,17 @@ class GitCmd { this.options = {}; } - add(...params) { + add(...params: string[]) { this.params.push(...params); return this; } - set(options) { + set(options: SpawnOptionsWithoutStdio) { this.options = { ...this.options, ...options }; return this; } - addFiles(...files) { + addFiles(...files: string[]) { this.files.push(...files); // const rawFiles = [...arguments] // this.files.push(...rawFiles.map((file) => file.replace(/ /,'\ '))) @@ -46,12 +68,17 @@ class GitCmd { spawn() { const args = [...this.params, ...this.files]; - logger.log([`Spawning command: ${this.cmd}`, ...args, Date.now(), process.cwd()]); + logger.log( + `Spawning command: ${this.cmd}`, + ...args, + Date.now().toString(), + process.cwd() + ); return spawn(this.cmd, args, this.options); } exec() { - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { const cmd = this.spawn(); let stderr = ''; let stdout = ''; @@ -67,7 +94,11 @@ class GitCmd { stderr += err.message; }); cmd.on('close', code => { - logger.log(stdout, stderr, code); + logger.log( + stdout, + stderr, + `${code || 0}`, + ); if (code === 0) { resolve({ stdout, stderr, code }); return; @@ -78,8 +109,8 @@ class GitCmd { return; } - const error = new Error(`${stderr}`); - error.code = `${code}`; + const error = new GitCmdError(`${stderr}`); + error.code = code; error.info = { stdout, stderr, code }; reject(error); }); @@ -91,4 +122,4 @@ class GitCmd { } } -module.exports = GitCmd; +export default GitCmd; diff --git a/packages/bodiless-backend/src/logger.js b/packages/bodiless-backend/src/logger.ts similarity index 83% rename from packages/bodiless-backend/src/logger.js rename to packages/bodiless-backend/src/logger.ts index 816b32fee3..1c4a83e8a4 100644 --- a/packages/bodiless-backend/src/logger.js +++ b/packages/bodiless-backend/src/logger.ts @@ -14,11 +14,13 @@ */ class Logger { - constructor(prefix) { + prefix: string = ''; + + constructor(prefix: string) { this.prefix = `[${prefix.toUpperCase()}]`; } - print(message, severity) { + print(message: string[], severity?: string) { const fullMessage = [this.prefix, new Date().toISOString(), ...message]; switch (severity) { case 'error': @@ -32,17 +34,17 @@ class Logger { } } - log(...args) { + log(...args: string[]) { this.print(args); } - error(...args) { + error(...args: string[]) { this.print(args, 'error'); } - warn(...args) { + warn(...args: string[]) { this.print(args, 'warn'); } } -module.exports = Logger; +export default Logger; diff --git a/packages/bodiless-backend/src/page.js b/packages/bodiless-backend/src/page.ts similarity index 71% rename from packages/bodiless-backend/src/page.js rename to packages/bodiless-backend/src/page.ts index 29d67c16ff..63cc2b9a00 100644 --- a/packages/bodiless-backend/src/page.js +++ b/packages/bodiless-backend/src/page.ts @@ -12,12 +12,14 @@ * limitations under the License. */ -const fs = require('fs'); -const fse = require('fs-extra'); -const path = require('path'); -const os = require('os'); -const replace = require('replace-in-file'); -const Logger = require('./logger'); +import fs from 'fs'; +import fse from 'fs-extra'; +import path from 'path'; +import os from 'os'; +import replace from 'replace-in-file'; +import type { Dirent, PathLike } from 'fs'; +import type { ReplaceInFileConfig, ReplaceResult } from 'replace-in-file'; +import Logger from './logger'; const logger = new Logger('BACKEND'); @@ -25,12 +27,12 @@ const backendFilePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; const backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; const IMG_ASSETS_PATH = `/images${path.sep}pages`; -const getDirectories = (dir) => fs +const getDirectories = (dir: string) => fs .readdirSync(dir) .filter((file) => fs.statSync(`${dir}/${file}`).isDirectory()); -// once we on node > 10.12.0 +// @todo: update to fs.mkdir - once we on node > 10.12.0 // we can leverage fs.mkdir since it supports { recursive: true } -function ensureDirectoryExistence(filePath) { +function ensureDirectoryExistence(filePath: string): void { const dirname = path.dirname(filePath); if (fs.existsSync(dirname)) { return; @@ -40,9 +42,13 @@ function ensureDirectoryExistence(filePath) { } class Page { - supportedExtensions = ['json', 'tsx', 'jsx', 'js']; + path: string = ''; - constructor(pagePath) { + basePath: string = ''; + + extensions = ['json', 'tsx', 'jsx', 'js']; + + constructor(pagePath: string) { this.path = pagePath; } @@ -50,12 +56,12 @@ class Page { return this.basePath || backendFilePath; } - setBasePath(basePath) { + setBasePath(basePath: string) { this.basePath = basePath; } get supportedExtensions() { - return this.supportedExtensions; + return this.extensions; } get exists() { @@ -74,86 +80,95 @@ class Page { } read() { - const readPromise = new Promise((resolve) => { - fs.readFile(this.file, (err, data) => { - if (err) logger.log(err); - resolve(data || {}); - }); + return new Promise((resolve) => { + fs.readFile( + this.file, + 'utf8', + (err: NodeJS.ErrnoException | null, data: string) => { + if (err) logger.log(err.message); + resolve(data); + } + ); }); - return readPromise; } - write(data) { - const readPromise = new Promise((resolve, reject) => { + write(data: any) { + return new Promise((resolve, reject) => { ensureDirectoryExistence(this.file); fs.writeFile(this.file, JSON.stringify(data, null, 2), (err) => { if (err) { reject(err); } - resolve(this); + resolve('ok'); }); }); - return readPromise; } delete() { - const readPromise = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { ensureDirectoryExistence(this.file); fs.unlink(this.file, (err) => { if (err) { reject(err); } - resolve(this); + resolve('ok'); }); }); - return readPromise; } - static dirHasSubObjects(dirPath, objType) { - return new Promise((resove) => { + static dirHasSubObjects(dirPath: string, objType?: string) { + // @todo: add reject + return new Promise((resolve) => { try { - fs.readdir(dirPath, { withFileTypes: true }, (err, files) => { - if (err) { - return resove([]); - } - - const filteredObjects = files.filter((item) => { - if (objType === 'file') { - return item.isFile(); - } - if (objType === 'directory') { - return item.isDirectory(); + fs.readdir( + dirPath, + { withFileTypes: true }, + (err: NodeJS.ErrnoException | null, files: Dirent[]) => { + if (err) { + return resolve([]); } - return true; - }); - if (!filteredObjects.length) { - return resove([]); + const filteredObjects = files.filter((item) => { + if (objType === 'file') { + return item.isFile(); + } + if (objType === 'directory') { + return item.isDirectory(); + } + return true; + }); + + if (!filteredObjects.length) { + return resolve([]); + } + return resolve(filteredObjects); } - return resove(filteredObjects); - }); + ); } catch (error) { - resove([]); + resolve([]); } }); } - static dirHasFiles(dirPath) { + static dirHasFiles(dirPath: string): Promise { return Page.dirHasSubObjects(dirPath, 'file'); } - static dirHasDirectories(dirPath) { + static dirHasDirectories(dirPath: string) { return Page.dirHasSubObjects(dirPath, 'directory'); } - static rmDirectories(destinationPath, dirPaths) { - const dels = []; + static rmDirectories(destinationPath: string, dirPaths: Dirent[]) { + const dels: Promise[] = []; dirPaths.forEach((dir) => { dels.push( - new Promise((resove) => { - fse.remove(`${destinationPath}/${dir.name}`, (err) => { - if (err) return console.error(err); - return resove(); + new Promise((resolve, reject) => { + fse.remove(`${destinationPath}/${dir.name}`, (err: any) => { + if (err) { + console.error(err); + return reject(err); + } + return resolve('ok'); }); }), ); @@ -161,18 +176,22 @@ class Page { return Promise.resolve(Promise.all(dels)); } - static jsFilesPathResolve(originPath, destinationPath, files) { - const actions = []; + static jsFilesPathResolve( + originPath: string, + destinationPath: string, + files: Dirent[] + ) { + const actions: Promise[] = []; const reg = /from ('|")(\.\..*)('|")/g; - const readF = (file) => new Promise((resolve, reject) => { + const readF = (file: Dirent) => new Promise((resolve, reject) => { const filePath = `${destinationPath}/${file.name}`; fs.readFile(filePath, 'utf8', (err, content) => { if (err) return reject(); - const matchs = content.match(reg); - if (!matchs.length) return reject(); + const matches = content.match(reg); + if (!matches?.length) return reject(); let newContent = content; - matchs.forEach((item) => { + matches.forEach((item) => { const delimiter = item[item.search(/'|"/)]; const oldPath = item.split(' ')[1].replace(/'|"/g, ''); const from = path.dirname(filePath); @@ -186,7 +205,7 @@ class Page { }); fs.writeFile(filePath, newContent, (writeErr) => { if (writeErr) return reject(); - return resolve(); + return resolve('ok'); }); return true; }); @@ -199,19 +218,19 @@ class Page { return Promise.resolve(Promise.all(actions)); } - async copyDirectory(origin, destination) { - const bp = this.basePath; - const originPath = `${bp}${origin}`.replace(/\/$/, ''); - const destinationPath = `${bp}${destination}`.replace(/\/$/, ''); - + async copyDirectory(origin: string, destination: string): Promise { + const {basePath} = this; + const originPath = path.resolve(basePath, origin).replace(/\/$/, ''); + const destinationPath = path.resolve(basePath, destination).replace(/\/$/, ''); const isDestinationPathExists = await Page.dirHasFiles(destinationPath); + if (isDestinationPathExists.length) { - return Promise.reject(new Error(`page ${destination} already exists`)); + return Promise.reject(new Error(`page ${destinationPath} already exists`)); } const isOriginPathExists = await Page.dirHasFiles(originPath); if (!isOriginPathExists.length) { - return Promise.reject(new Error(`page ${origin} is not exists`)); + return Promise.reject(new Error(`page ${originPath} does not exist`)); } // Make sure the destination tree exist @@ -222,10 +241,10 @@ class Page { isOriginPathExists.map((file) => { const from = `${originPath}/${file.name}`; const to = `${destinationPath}/${file.name}`; - return new Promise((resove, reject) => { + return new Promise((resolve, reject) => { fse.copy(from, to, (err) => { if (err) return reject(err); - return resove(); + return resolve('ok'); }); }); }), @@ -251,14 +270,23 @@ class Page { // Clone Image assets Page.clonePageImgAssets(origin, destination, this.basePath); - return 'success'; + return {status: 'ok'}; } - static clonePageImgAssets(origin, destination, basePath) { + static clonePageImgAssets( + origin: string, + destination: string, + basePath: string + ) { Page.clonePageAssets(origin, destination, basePath, IMG_ASSETS_PATH); } - static async clonePageAssets(origin, destination, basePath, target) { + static async clonePageAssets( + origin: string, + destination: string, + basePath: string, + target: string + ) { const originPath = origin.replace(/\/$/, ''); const originPathCrossPlatform = os.platform() === 'win32' ? originPath.replace('/', '\\\\') @@ -305,7 +333,7 @@ class Page { return new Promise((resolve, reject) => { fse.copy(fromPath, toPath, (err) => { if (err) return reject(err); - return resolve(); + return resolve('ok'); }); }); }), @@ -332,10 +360,10 @@ class Page { // - '/images/pages/example2' const toPath = `${imgAssetsPath}${destinationPathCrossPlatform}`; - const options = { + const options: ReplaceInFileConfig = { files: fileToBeUpdated, from: fromPath, - to: match => match.replace(fromPath, toPath), + to: (match) => match.replace(fromPath, toPath), }; return Page.updateFileContent(options); }), @@ -343,20 +371,20 @@ class Page { } } } - } catch (err) { + } catch (err: any) { if (err) logger.log(err); } return 'success'; } - static updateFileContent(options) { - return new Promise((resolve) => { + static updateFileContent(options: ReplaceInFileConfig) { + return new Promise((resolve, reject) => { try { - replace(options, (error, results) => { + replace.replaceInFile(options, (error: Error, results: ReplaceResult[]) => { if (error) { - return resolve([]); + reject(error); } - return resolve(results); + resolve(results); }); } catch (error) { resolve([]); @@ -364,7 +392,7 @@ class Page { }); } - directoryExists(newDirectory) { + directoryExists(newDirectory: PathLike) { const readPromise = new Promise((resolve) => { fs.access(newDirectory, err => { if (!err) { @@ -376,7 +404,7 @@ class Page { return readPromise; } - removeFile(origin) { + removeFile(origin: string) { const readPromise = new Promise((resolve, reject) => { fs.unlink(origin, err => { if (err) { @@ -415,8 +443,8 @@ class Page { hasChildDirectory() { const readPromise = new Promise((resolve) => { - const subdirs = getDirectories(this.directory); - if (subdirs.length !== 0) { + const subDirs = getDirectories(this.directory); + if (subDirs.length !== 0) { resolve( 'The page cannot be deleted it has child pages. To delete this page, first delete or move all child pages, and retry.', ); @@ -428,16 +456,16 @@ class Page { } // eslint-disable-next-line class-methods-use-this - removePageAssets(path) { - return new Promise((resolve, reject) => { + removePageAssets(path: string) { + return new Promise((resolve, reject) => { fse.remove(path, err => { if (err) { reject(err); } - resolve(); + resolve('ok'); }); }); } } -module.exports = Page; +export default Page; diff --git a/packages/bodiless-backend/src/server.js b/packages/bodiless-backend/src/server.ts similarity index 84% rename from packages/bodiless-backend/src/server.js rename to packages/bodiless-backend/src/server.ts index fe13514382..7c91a57366 100644 --- a/packages/bodiless-backend/src/server.js +++ b/packages/bodiless-backend/src/server.ts @@ -12,15 +12,14 @@ * limitations under the License. */ +import dotenv from 'dotenv'; +import Backend from './backend'; + // Use the same .env file as gatsby develop. -require('dotenv').config({ +dotenv.config({ path: '.env.development', }); -const express = require('express'); -const Backend = require('./backend'); - const backendPort = process.env.BODILESS_BACKEND_PORT || 8001; - -const backend = new Backend(express()); +const backend = new Backend(); backend.start(backendPort); diff --git a/packages/bodiless-backend/src/git.js b/packages/bodiless-backend/src/tools/git.ts similarity index 82% rename from packages/bodiless-backend/src/git.js rename to packages/bodiless-backend/src/tools/git.ts index 7d3b437cdb..828b8e55f5 100644 --- a/packages/bodiless-backend/src/git.js +++ b/packages/bodiless-backend/src/tools/git.ts @@ -11,14 +11,43 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -const path = require('path'); -const util = require('util'); -const os = require('os'); -const rimraf = require('rimraf'); -const { v1 } = require('uuid'); -const copyfiles = require('copyfiles'); -const GitCmd = require('./GitCmd'); -const Logger = require('./logger'); +import path from 'path'; +import util from 'util'; +import os from 'os'; +import rimraf from 'rimraf'; +import { v1 } from 'uuid'; +import copyfiles from 'copyfiles'; +import GitCmd, { GitInfoType } from '../gitCmd'; +import Logger from '../logger'; + +export type GitBranchInfo = { + branch: string | null, + commits: string[], + files: string[], +}; + +export type GitChanges = { + upstream: GitBranchInfo, + production: GitBranchInfo, + local: GitBranchInfo, +}; + +export type GitConflictInfo = { + hasConflict: boolean, + files: string[], + target: string, +}; + +export type GitUtil = { + getCurrentBranch: () => Promise, + getUpstreamBranch: (branch: string, remote?: string) => Promise, + getUpstreamTrackingBranch: (branch: string) => Promise, + getChanges: () => Promise, + getConflicts: (branch?: string) => Promise, + getMergeBase: (b1: string, b2: string) => Promise, + compare: (show: string, comparedTo: string) => Promise<{commits: string[], files: string[]}>, + mergeMain: () => Promise<{}>, +}; /** * Returns the name of the current branch as a string. @@ -32,13 +61,13 @@ const getCurrentBranch = async () => { * Verify the existence of an upstream branch. * @todo: replace with getUpstreamTrackingBranch? */ -const getUpstreamBranch = async (branch, remote = 'origin') => { +const getUpstreamBranch = async (branch: string, remote = 'origin'): Promise => { try { await GitCmd.cmd().add('ls-remote', '--heads', '--exit-code', remote, branch).exec(); return `${remote}/${branch}`; - } catch (e) { + } catch (e: any) { // Catch only the error where the upstream branch doesn't exist. - if (e.code === '2') return undefined; + if (e.code === 2) return ''; throw e; } }; @@ -48,7 +77,7 @@ const getUpstreamBranch = async (branch, remote = 'origin') => { * * @param {string} local branch name. */ -const getUpstreamTrackingBranch = async branch => { +const getUpstreamTrackingBranch = async (branch: string) => { const result = await GitCmd.cmd() .add('for-each-ref', '--format="%(upstream:short)"', `refs/heads/${branch}`) .exec(); @@ -58,15 +87,15 @@ const getUpstreamTrackingBranch = async branch => { /** * Returns the merge-base between two branches. */ -const getMergeBase = async (a, b) => { +const getMergeBase = async (a: string, b: string) => { const mergeBase = await GitCmd.cmd() .add('merge-base', a, b) .exec(); return mergeBase.stdout.trim(); }; -const getGitCmdOutputString = result => result.stdout.trim().replace('\n$', ''); -const getGitCmdOutputArray = result => ( +const getGitCmdOutputString = (result: GitInfoType) => result.stdout.trim().replace('\n$', ''); +const getGitCmdOutputArray = (result: GitInfoType) => ( result.stdout.trim().split('\n').map(l => l.trim()).filter(l => l.length > 0) ); @@ -77,7 +106,7 @@ const getGitCmdOutputArray = result => ( * @param show The branch whose commits to show. * @param comparedTo The branch to compare it to. */ -const compare = async (show, comparedTo) => { +const compare = async (show: string, comparedTo: string) => { const mergeBase = await getMergeBase(show, comparedTo); const commitsPromise = GitCmd.cmd() .add('rev-list', '--oneline', '--left-only', `${show}...${comparedTo}`) @@ -150,7 +179,7 @@ const getChanges = async () => { }, }; return status; - } catch (e) { + } catch (e: any) { throw new Error(`Error occurred: ${e.message}`); } }; @@ -161,7 +190,7 @@ const getChanges = async () => { * @param {string} url - Repo url. * @param {array} options - Clone options [branch|directory]. */ -const clone = async (url, options = {}) => { +const clone = async (url: string, options: {branch?: string, directory?: string} = {}) => { const logger = new Logger('BACKEND'); let result = await GitCmd.cmd().add('config', '--get', 'user.name').exec(); const configName = result.stdout.trim().replace('\n', ''); @@ -173,7 +202,7 @@ const clone = async (url, options = {}) => { cmd.add('--config', `user.name=${configName}`); if (options.branch) cmd.add('-b', options.branch); if (options.directory) cmd.add(options.directory); - logger.log([`Clone to path: ${options.directory}`]); + logger.log(`Clone to path: ${options?.directory}`); return cmd.exec(); }; @@ -185,7 +214,7 @@ const clone = async (url, options = {}) => { * * @return {object} Results. */ -const getConflicts = async (target = 'upstream') => { +const getConflicts = async (target: string = 'upstream') => { // const remoteUrl = await getRemote('origin'); const logger = new Logger('BACKEND'); const tmpDir = path.resolve(process.env.BODILESS_BACKEND_TMP || os.tmpdir(), v1()); @@ -201,12 +230,12 @@ const getConflicts = async (target = 'upstream') => { const rootResult = await GitCmd.cmd().add('rev-parse', '--show-toplevel').exec(); const rootDir = getGitCmdOutputString(rootResult); - logger.log([`Repo root: ${rootDir}`]); + logger.log(`Repo root: ${rootDir}`); let workBranch = ''; let targetBranch = ''; let uncommittedResult; - let files = []; + let files: string[] = []; switch (target) { case 'edit': targetBranch = `origin-${branch}`; @@ -249,14 +278,17 @@ const getConflicts = async (target = 'upstream') => { await clone(rootDir, { directory: tmpDir, branch: targetBranch }); process.chdir(tmpDir); - const copyfilesPromised = util.promisify(copyfiles); + const copyfilesPromised = util.promisify(copyfiles); if (files.length) { - logger.log([`Copy Files: ${files} ${tmpDir}`, process.cwd()]); + logger.log(`Copy Files: ${files} ${tmpDir}`, process.cwd()); try { const result = await copyfilesPromised( [...files, tmpDir], - { error: true, up: (rootDir.match(/\//g) || []).length + 1 }, + { + error: true, + up: (rootDir.match(/\//g) || []).length + 1 + }, ); logger.log(`Result: ${result}`); @@ -266,12 +298,12 @@ const getConflicts = async (target = 'upstream') => { await GitCmd.cmd() .add('commit', '-m', 'TEMPORARY COMMIT') .exec(); - } catch (e) { + } catch (e: any) { logger.error(e); } } - let conflictFiles = []; + let conflictFiles: string[] = []; try { await GitCmd.cmd() .add('merge', '--no-commit', '--no-ff', 'origin/origin-main') @@ -356,7 +388,7 @@ const mergeMain = async () => { await GitCmd.cmd() .add('push') .exec(); - } catch (e) { + } catch (e: any) { logger.error(e); } @@ -371,7 +403,7 @@ const mergeMain = async () => { return {}; }; -module.exports = { +const gitUtil: GitUtil = { getCurrentBranch, getUpstreamBranch, getUpstreamTrackingBranch, @@ -381,3 +413,5 @@ module.exports = { compare, mergeMain, }; + +export default gitUtil; diff --git a/packages/bodiless-backend/tsconfig.json b/packages/bodiless-backend/tsconfig.json new file mode 100644 index 0000000000..b8867aff8d --- /dev/null +++ b/packages/bodiless-backend/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.settings.json", + "compilerOptions": { + "rootDir": "src", + "module": "CommonJS", + "outDir": "lib", + "baseUrl": "." + }, + "include": [ + "src" + ], + "exclude": [ + "lib", + "bin", + "node_modules", + "__tests__/**/*.*" + ] +} diff --git a/playwright.config.ts b/playwright.config.ts index ae12573b27..573d2eaaae 100644 --- a/playwright.config.ts +++ b/playwright.config.ts @@ -14,7 +14,7 @@ import { devices } from '@playwright/test'; const config: PlaywrightTestConfig = { testDir: './playwright/tests', /* Maximum time one test can run for. */ - timeout: 30 * 1000, + timeout: 60 * 1000, expect: { /** * Maximum time expect() should wait for the condition to be met. diff --git a/playwright/pages/base-page.ts b/playwright/pages/base-page.ts index 134bbb273a..682c120991 100644 --- a/playwright/pages/base-page.ts +++ b/playwright/pages/base-page.ts @@ -133,8 +133,8 @@ export class BasePage { async isImageVisible(imageXpath: string) { expect(await this.page.locator(imageXpath).isVisible()).toBeTruthy(); - const imageDimentions = await this.page.locator(imageXpath).boundingBox(); - expect(imageDimentions!.width).toBeGreaterThan(0); - expect(imageDimentions!.height).toBeGreaterThan(0); + const imageDimensions = await this.page.locator(imageXpath).boundingBox(); + expect(imageDimensions!.width).toBeGreaterThan(0); + expect(imageDimensions!.height).toBeGreaterThan(0); } } diff --git a/playwright/tests/smoke/editorMenu.spec.ts b/playwright/tests/smoke/editorMenu.spec.ts index e482c0cdbb..435e7171cf 100644 --- a/playwright/tests/smoke/editorMenu.spec.ts +++ b/playwright/tests/smoke/editorMenu.spec.ts @@ -13,7 +13,7 @@ */ // editorMenu.spec.ts 'use strict'; -import { expect, Page, test } from '@playwright/test'; +import { expect, Page, test, BrowserContext } from '@playwright/test'; import { EditorMenuPage } from '../../pages/editor-menu-page'; async function checkEditorMenuButtons(page: Page, editorMenuPage: EditorMenuPage) { @@ -34,7 +34,7 @@ async function checkAddNewPageButton(page: Page, editorMenuPage: EditorMenuPage) test.describe('Editor Menu (left and right)', () => { let page: Page; - let context:any; + let context: BrowserContext; let editorMenuPage: EditorMenuPage; test.beforeAll(async ({ browser }) => { context = await browser.newContext(); diff --git a/playwright/tests/smoke/flowContainer.spec.ts b/playwright/tests/smoke/flowContainer.spec.ts index 53c4bb3cff..58d1f2dd47 100644 --- a/playwright/tests/smoke/flowContainer.spec.ts +++ b/playwright/tests/smoke/flowContainer.spec.ts @@ -102,7 +102,7 @@ test.describe('Flow container', async () => { ]); const containerWidth = await page.locator(flowContainerPage.flowContainer33Width).boundingBox(); const contentfulWidth = await page.locator(flowContainerPage.elementInside33Width).boundingBox(); - const ratio = Math.floor((contentfulWidth.width + 40) / containerWidth.width * 100); + const ratio = Math.floor(((contentfulWidth?.width || 0) + 40) / (containerWidth?.width || 0) * 100); expect.soft(ratio).toBeCloseTo(32); }); diff --git a/tsconfig.settings.json b/tsconfig.settings.json index 134b951fa7..bdf6236f56 100644 --- a/tsconfig.settings.json +++ b/tsconfig.settings.json @@ -27,6 +27,8 @@ "--vital--/*": ["__vital__/src/*"], "@bodiless/accordion": ["bodiless-accordion/src"], "@bodiless/accordion/*": ["bodiless-accordion/src/*"], + "@bodiless/backend": ["bodiless-backend/src"], + "@bodiless/backend/*": ["bodiless-backend/src/*"], "@bodiless/bv": ["bodiless-bv/src"], "@bodiless/bv/*": ["bodiless-bv/src/*"], "@bodiless/card": ["bodiless-card/src"],