From dfc56bedb519e590eccb69e0bb30bda74810dfb3 Mon Sep 17 00:00:00 2001 From: Dewen Li Date: Wed, 3 May 2023 16:10:28 -0400 Subject: [PATCH] feat(backend): Convert backend to TS upgrade/next (#2058) * feat(backend): Convert Bodiless backend package to TypeScript * fix backend test * push build test * lock update; react type version upgrade --- .eslintignore | 1 + .vscode/launch.json | 17 +- package-lock.json | 256 ++++- package.json | 2 +- packages/bodiless-backend/.gitignore | 1 + .../__tests__/clonePage.test.ts | 42 +- .../__tests__/createPage.test.ts | 7 +- .../__tests__/deleteContent.test.ts | 29 +- .../__tests__/fileHelper.test.ts | 8 +- .../__tests__/getChanges.test.ts | 21 +- .../__tests__/getConflict.test.ts | 5 +- packages/bodiless-backend/__tests__/tools.ts | 14 +- .../bodiless-backend/bin/bodiless-backend | 2 +- packages/bodiless-backend/package.json | 18 +- packages/bodiless-backend/src/backend.js | 869 ---------------- packages/bodiless-backend/src/backend.ts | 932 ++++++++++++++++++ .../src/{fileHelper.js => fileHelper.ts} | 47 +- .../src/{GitCmd.js => gitCmd.ts} | 53 +- .../src/{logger.js => logger.ts} | 14 +- .../bodiless-backend/src/{page.js => page.ts} | 231 +++-- .../src/{server.js => server.ts} | 11 +- .../src/{git.js => tools/git.ts} | 92 +- packages/bodiless-backend/tsconfig.json | 18 + playwright/pages/base-page.ts | 6 +- .../tests/smoke-deprecated/editorMenu.spec.ts | 8 +- .../smoke-deprecated/flowContainer.spec.ts | 2 +- tsconfig.settings.json | 2 + 27 files changed, 1591 insertions(+), 1117 deletions(-) delete mode 100644 packages/bodiless-backend/src/backend.js create mode 100644 packages/bodiless-backend/src/backend.ts rename packages/bodiless-backend/src/{fileHelper.js => fileHelper.ts} (65%) rename packages/bodiless-backend/src/{GitCmd.js => gitCmd.ts} (69%) rename packages/bodiless-backend/src/{logger.js => logger.ts} (83%) rename packages/bodiless-backend/src/{page.js => page.ts} (66%) rename packages/bodiless-backend/src/{server.js => server.ts} (84%) rename packages/bodiless-backend/src/{git.js => tools/git.ts} (82%) create mode 100644 packages/bodiless-backend/tsconfig.json diff --git a/.eslintignore b/.eslintignore index 2c46975a4e..5f9f16767b 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,6 +1,7 @@ html.js **/lib **/doc +packages/bodiless-backend/bin packages/gatsby-theme-bodiless/dist packages/gatsby-theme-bodiless/cjs packages/gatsby-theme-bodiless/index.js diff --git a/.vscode/launch.json b/.vscode/launch.json index dea589bdff..6c2923351a 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -2,6 +2,8 @@ // Use IntelliSense to learn about possible attributes. // Hover to view descriptions of existing attributes. // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + // Ref: + // https://code.visualstudio.com/docs/editor/variables-reference "version": "0.2.0", "compounds": [ // Launches the Gatsby stack and opens Chrome. @@ -103,7 +105,7 @@ "sourceMaps": false }, { - "name": "Jest Current File", + "name": "Jest Current File (Windows)", "type": "node", "request": "launch", "program": "${workspaceFolder}/node_modules/.bin/jest", @@ -116,6 +118,19 @@ "program": "${workspaceFolder}/node_modules/jest/bin/jest", } }, + { + "name": "Jest Current File (Mac/Linux)", + "type": "node", + "request": "launch", + "program": "${workspaceFolder}/node_modules/.bin/jest", + "args": [ + "${relativeFile}" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + // Update the node version below if v16.13.0 is not available locally. + "runtimeExecutable": "${userHome}/.nvm/versions/node/v16.13.0/bin/node", + }, { "name": "Launch Chrome", "type": "chrome", diff --git a/package-lock.json b/package-lock.json index 6f2d0deb37..64cbeb32e6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -26,6 +26,9 @@ "@types/common-tags": "^1.8.0", "@types/copyfiles": "^2.1.1", "@types/crypto-js": "4.1.1", + "@types/dotenv": "^8.2.0", + "@types/express": "^4.17.17", + "@types/formidable": "^1.2.5", "@types/fs-extra": "^8.0.0", "@types/glob": "^7.1.1", "@types/html2canvas": "0.0.35", @@ -33,12 +36,15 @@ "@types/lodash.flow": "^3.5.6", "@types/lunr": "^2.3.3", "@types/mime": "^2.0.1", + "@types/morgan": "^1.9.4", "@types/node": "^16", "@types/react-burger-menu": "^2.6.0", "@types/react-helmet": "^6.1.0", + "@types/rimraf": "^3.0.0", "@types/semver": "5.5.0", "@types/supertest": "^2.0.8", "@types/tar": "^6.1.1", + "@types/tmp": "^0.2.3", "@types/walk": "^2.3.0", "@types/webpack": "^5.0.0", "@uiw/react-md-editor": "^3.19.7", @@ -111,6 +117,7 @@ "process": "^0.11.10", "prop-types": "^15.7.2", "pure-react-carousel": "^1.27.6", + "qs": "^6.11.0", "query-string": "^6.13.2", "rc-tooltip": "^5.1.1", "re-resizable": "^4.11.0", @@ -159,7 +166,7 @@ "@babel/plugin-transform-modules-commonjs": "7.5.0", "@babel/runtime": "^7.4.2", "@cfaester/enzyme-adapter-react-18": "^0.7.0", - "@playwright/test": "^1.29.1", + "@playwright/test": "^1.30.0", "@types/enzyme": "^3.9.1", "@types/jest": "^24.0.18", "@types/lodash": "^4.14.158", @@ -13042,6 +13049,15 @@ "@babel/types": "^7.3.0" } }, + "node_modules/@types/body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, "node_modules/@types/cacheable-request": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", @@ -13071,6 +13087,14 @@ "resolved": "https://registry.npmjs.org/@types/configstore/-/configstore-2.1.1.tgz", "integrity": "sha512-YY+hm3afkDHeSM2rsFXxeZtu0garnusBWNG1+7MknmDWQHqcH2w21/xOU9arJUi8ch4qyFklidANLCu3ihhVwQ==" }, + "node_modules/@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/cookie": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", @@ -13104,6 +13128,15 @@ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-0.0.30.tgz", "integrity": "sha512-orGL5LXERPYsLov6CWs3Fh6203+dXzJkR7OnddIr2514Hsecwc8xRpzCapshBbKFImCsvS/mk6+FWiN5LyZJAQ==" }, + "node_modules/@types/dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@types/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-ylSC9GhfRH7m1EUXBXofhgx4lUWmFeQDINW5oLuS+gxWdfUeW4zJdeVTYVkexEW+e2VUvlZR2kGnGGipAWR7kw==", + "deprecated": "This is a stub types definition. dotenv provides its own type definitions, so you do not need this installed.", + "dependencies": { + "dotenv": "*" + } + }, "node_modules/@types/enzyme": { "version": "3.10.13", "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.10.13.tgz", @@ -13148,6 +13181,36 @@ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz", "integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==" }, + "node_modules/@types/express": { + "version": "4.17.17", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", + "integrity": "sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.17.34", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.34.tgz", + "integrity": "sha512-fvr49XlCGoUj2Pp730AItckfjat4WNb0lb3kfrLWffd+RLeoGAMsq7UOy04PAPtoL01uKwcp6u8nhzpgpDYr3w==", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/formidable": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/formidable/-/formidable-1.2.5.tgz", + "integrity": "sha512-zu3mQJa4hDNubEMViSj937602XdDGzK7Q5pJ5QmLUbNxclbo9tZGt5jtwM352ssZ+pqo5V4H14TBvT/ALqQQcA==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/fs-extra": { "version": "8.1.2", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.2.tgz", @@ -13331,6 +13394,14 @@ "@types/node": "*" } }, + "node_modules/@types/morgan": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.4.tgz", + "integrity": "sha512-cXoc4k+6+YAllH3ZHmx4hf7La1dzUk6keTR4bF4b4Sc0mZxU/zK4wO7l+ZzezXm/jkYj/qC+uYGZrarZdIVvyQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/ms": { "version": "0.7.31", "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", @@ -13399,6 +13470,16 @@ "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" }, + "node_modules/@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" + }, + "node_modules/@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" + }, "node_modules/@types/rc-tooltip": { "version": "3.7.7", "resolved": "https://registry.npmjs.org/@types/rc-tooltip/-/rc-tooltip-3.7.7.tgz", @@ -13473,9 +13554,9 @@ "dev": true }, "node_modules/@types/rimraf": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", - "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==", "dependencies": { "@types/glob": "*", "@types/node": "*" @@ -13499,6 +13580,29 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, + "node_modules/@types/send": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.1.tgz", + "integrity": "sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q==", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/send/node_modules/@types/mime": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", + "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" + }, + "node_modules/@types/serve-static": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.1.tgz", + "integrity": "sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==", + "dependencies": { + "@types/mime": "*", + "@types/node": "*" + } + }, "node_modules/@types/sharp": { "version": "0.30.5", "resolved": "https://registry.npmjs.org/@types/sharp/-/sharp-0.30.5.tgz", @@ -13568,9 +13672,9 @@ } }, "node_modules/@types/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-dDZH/tXzwjutnuk4UacGgFRwV+JSLaXL1ikvidfJprkb7L9Nx1njcRHHmi3Dsvt7pgqqTEeucQuOrWHPFgzVHA==" }, "node_modules/@types/unist": { "version": "2.0.6", @@ -19588,6 +19692,20 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==" }, + "node_modules/devcert/node_modules/@types/rimraf": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", + "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "dependencies": { + "@types/glob": "*", + "@types/node": "*" + } + }, + "node_modules/devcert/node_modules/@types/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + }, "node_modules/devcert/node_modules/debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", @@ -60461,6 +60579,15 @@ "@babel/types": "^7.3.0" } }, + "@types/body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", + "requires": { + "@types/connect": "*", + "@types/node": "*" + } + }, "@types/cacheable-request": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", @@ -60490,6 +60617,14 @@ "resolved": "https://registry.npmjs.org/@types/configstore/-/configstore-2.1.1.tgz", "integrity": "sha512-YY+hm3afkDHeSM2rsFXxeZtu0garnusBWNG1+7MknmDWQHqcH2w21/xOU9arJUi8ch4qyFklidANLCu3ihhVwQ==" }, + "@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "requires": { + "@types/node": "*" + } + }, "@types/cookie": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.4.1.tgz", @@ -60523,6 +60658,14 @@ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-0.0.30.tgz", "integrity": "sha512-orGL5LXERPYsLov6CWs3Fh6203+dXzJkR7OnddIr2514Hsecwc8xRpzCapshBbKFImCsvS/mk6+FWiN5LyZJAQ==" }, + "@types/dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@types/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-ylSC9GhfRH7m1EUXBXofhgx4lUWmFeQDINW5oLuS+gxWdfUeW4zJdeVTYVkexEW+e2VUvlZR2kGnGGipAWR7kw==", + "requires": { + "dotenv": "*" + } + }, "@types/enzyme": { "version": "3.10.13", "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.10.13.tgz", @@ -60569,6 +60712,36 @@ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz", "integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==" }, + "@types/express": { + "version": "4.17.17", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", + "integrity": "sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==", + "requires": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "@types/express-serve-static-core": { + "version": "4.17.34", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.34.tgz", + "integrity": "sha512-fvr49XlCGoUj2Pp730AItckfjat4WNb0lb3kfrLWffd+RLeoGAMsq7UOy04PAPtoL01uKwcp6u8nhzpgpDYr3w==", + "requires": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "@types/formidable": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/formidable/-/formidable-1.2.5.tgz", + "integrity": "sha512-zu3mQJa4hDNubEMViSj937602XdDGzK7Q5pJ5QmLUbNxclbo9tZGt5jtwM352ssZ+pqo5V4H14TBvT/ALqQQcA==", + "requires": { + "@types/node": "*" + } + }, "@types/fs-extra": { "version": "8.1.2", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-8.1.2.tgz", @@ -60752,6 +60925,14 @@ "@types/node": "*" } }, + "@types/morgan": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.4.tgz", + "integrity": "sha512-cXoc4k+6+YAllH3ZHmx4hf7La1dzUk6keTR4bF4b4Sc0mZxU/zK4wO7l+ZzezXm/jkYj/qC+uYGZrarZdIVvyQ==", + "requires": { + "@types/node": "*" + } + }, "@types/ms": { "version": "0.7.31", "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", @@ -60819,6 +61000,16 @@ "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" }, + "@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" + }, + "@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" + }, "@types/rc-tooltip": { "version": "3.7.7", "resolved": "https://registry.npmjs.org/@types/rc-tooltip/-/rc-tooltip-3.7.7.tgz", @@ -60893,9 +61084,9 @@ "dev": true }, "@types/rimraf": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", - "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==", "requires": { "@types/glob": "*", "@types/node": "*" @@ -60919,6 +61110,31 @@ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" }, + "@types/send": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.1.tgz", + "integrity": "sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q==", + "requires": { + "@types/mime": "^1", + "@types/node": "*" + }, + "dependencies": { + "@types/mime": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", + "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" + } + } + }, + "@types/serve-static": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.1.tgz", + "integrity": "sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==", + "requires": { + "@types/mime": "*", + "@types/node": "*" + } + }, "@types/sharp": { "version": "0.30.5", "resolved": "https://registry.npmjs.org/@types/sharp/-/sharp-0.30.5.tgz", @@ -60988,9 +61204,9 @@ } }, "@types/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-dDZH/tXzwjutnuk4UacGgFRwV+JSLaXL1ikvidfJprkb7L9Nx1njcRHHmi3Dsvt7pgqqTEeucQuOrWHPFgzVHA==" }, "@types/unist": { "version": "2.0.6", @@ -65609,6 +65825,20 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==" }, + "@types/rimraf": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-2.0.5.tgz", + "integrity": "sha512-YyP+VfeaqAyFmXoTh3HChxOQMyjByRMsHU7kc5KOJkSlXudhMhQIALbYV7rHh/l8d2lX3VUQzprrcAgWdRuU8g==", + "requires": { + "@types/glob": "*", + "@types/node": "*" + } + }, + "@types/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-gVC1InwyVrO326wbBZw+AO3u2vRXz/iRWq9jYhpG4W8LXyIgDv3ZmcLQ5Q4Gs+gFMyqx+viFoFT+l3p61QFCmQ==" + }, "debug": { "version": "3.2.7", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", diff --git a/package.json b/package.json index 4ed3d2e369..fe208d7119 100644 --- a/package.json +++ b/package.json @@ -85,7 +85,7 @@ "@babel/plugin-transform-modules-commonjs": "7.5.0", "@babel/runtime": "^7.4.2", "@applitools/eyes-playwright": "^1.13.0", - "@playwright/test": "^1.29.1", + "@playwright/test": "^1.30.0", "@types/enzyme": "^3.9.1", "@types/jest": "^24.0.18", "@types/lodash": "^4.14.158", diff --git a/packages/bodiless-backend/.gitignore b/packages/bodiless-backend/.gitignore index 4c49bd78f1..382d377bdf 100644 --- a/packages/bodiless-backend/.gitignore +++ b/packages/bodiless-backend/.gitignore @@ -1 +1,2 @@ .env +lib diff --git a/packages/bodiless-backend/__tests__/clonePage.test.ts b/packages/bodiless-backend/__tests__/clonePage.test.ts index 458d636245..7451b09e3a 100644 --- a/packages/bodiless-backend/__tests__/clonePage.test.ts +++ b/packages/bodiless-backend/__tests__/clonePage.test.ts @@ -13,25 +13,31 @@ */ import request from 'supertest'; +import type { Express } from 'express'; +import Backend from '../src/backend'; const origin = '/from'; const destination = '/to'; const backendPrefix = '/prefix'; const backendFilePath = '/files'; +process.env.GATSBY_BACKEND_PREFIX = backendPrefix; +process.env.BODILESS_BACKEND_DATA_PAGE_PATH = backendFilePath; + const mockPageClone = jest.fn(); jest.mock('../src/page', () => (pagePath: any) => ({ path: pagePath, - copyDirectory: pagePath === destination - ? mockPageClone.mockResolvedValue(true) - : mockPageClone.mockRejectedValue(false), + copyDirectory: ( + (pagePath === destination) + ? mockPageClone.mockResolvedValue({data: true}) + : mockPageClone.mockRejectedValue({data: false}) + ), setBasePath: () => true, })); -const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); +// get Express app +const getApp = (): Express => { const backend = new Backend(); return backend.getApp(); }; @@ -41,20 +47,24 @@ describe('Clone page endpoint', () => { // clearing mocks beforeEach(() => { jest.resetModules(); - process.env.GATSBY_BACKEND_PREFIX = backendPrefix; - process.env.BODILESS_BACKEND_DATA_PAGE_PATH = backendFilePath; mockPageClone.mockReset(); }); - const performRequest = (app$: any, data: any) => request(app$) + const performCloneRequest = ( + app$: Express, data?: (string | object) + ) => request(app$) .post(`${backendPrefix}/clone`) - .send(data); + .send(data) + .on('error', (err: any) => { + // Some errors might be expected for testing. + console.log(err.error); + }); - describe('when the page is cloned succefully', () => { + describe('when the page is cloned successfully', () => { const data = { origin, destination }; - it('cloned page should be writen to file system', async () => { + it('cloned page should be written to file system', async () => { const app = getApp(); - await performRequest(app, data); + await performCloneRequest(app, data); expect(mockPageClone).toHaveBeenCalledTimes(1); const resolved = await mockPageClone.mock.instances[0]; expect(resolved.path).toBe(destination); @@ -62,14 +72,14 @@ describe('Clone page endpoint', () => { it('should get the correct parameters', async () => { const app = getApp(); - await performRequest(app, data); + await performCloneRequest(app, data); expect(mockPageClone.mock.calls[0][0]).toBe(origin); expect(mockPageClone.mock.calls[0][1]).toBe(destination); }); it('should respond with 200 status', async () => { const app = getApp(); - const result = await performRequest(app, data); + const result = await performCloneRequest(app, data); expect(result.status).toEqual(200); }); }); @@ -78,7 +88,7 @@ describe('Clone page endpoint', () => { const data = { origin, destination: '/page/error' }; it('should respond with 500 status', async () => { const app = getApp(); - const result = await performRequest(app, data); + const result = await performCloneRequest(app, data); expect(result.status).toEqual(500); }); }); diff --git a/packages/bodiless-backend/__tests__/createPage.test.ts b/packages/bodiless-backend/__tests__/createPage.test.ts index 54724f35c4..36a6cc6724 100644 --- a/packages/bodiless-backend/__tests__/createPage.test.ts +++ b/packages/bodiless-backend/__tests__/createPage.test.ts @@ -14,6 +14,7 @@ import path from 'path'; import request from 'supertest'; +import Backend from '../src/backend'; const backendPrefix = '/prefix'; const backendFilePath = '/files'; @@ -27,8 +28,6 @@ jest.mock('../src/page', () => (pagePath: any) => ({ })); const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); const backend = new Backend(); return backend.getApp(); }; @@ -43,7 +42,7 @@ describe('Create page endpoint', () => { mockPageWrite.mockReset(); }); - describe('when the page is created succefully', () => { + describe('when the page is created successfully', () => { const page = 'products'; const template = '_default'; const performRequest = (app$: any) => request(app$) @@ -53,7 +52,7 @@ describe('Create page endpoint', () => { template, }); describe('index.json containing template', () => { - it('should be writen to file system', async () => { + it('should be written to file system', async () => { const app = getApp(); await performRequest(app); expect(mockPageWrite).toHaveBeenCalledTimes(1); diff --git a/packages/bodiless-backend/__tests__/deleteContent.test.ts b/packages/bodiless-backend/__tests__/deleteContent.test.ts index 70187c56a2..27fe3b1ec8 100644 --- a/packages/bodiless-backend/__tests__/deleteContent.test.ts +++ b/packages/bodiless-backend/__tests__/deleteContent.test.ts @@ -13,21 +13,26 @@ */ import request from 'supertest'; +import Backend from '../src/backend'; const backendPrefix = '/prefix'; const backendFilePath = '/files'; const mockPageDelete = jest.fn(); -const mockPage = jest.fn().mockImplementation(() => ({ - delete: mockPageDelete.mockResolvedValue(true), -})); +const mockPage = jest.fn(); -jest.mock('../src/page', () => mockPage); +jest.mock('../src/page', () => ( + function Page(pagePath: string) { + mockPage(pagePath); + return { + file: `${pagePath}.json`, + path: pagePath, + delete: mockPageDelete.mockResolvedValue('ok'), + }; + })); jest.mock('../src/logger'); const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); const backend = new Backend(); return backend.getApp(); }; @@ -46,10 +51,16 @@ describe('delete content endpoint', () => { .delete(`${backendPrefix}/content/${filePath}`); it('should invoke local file deletion', async () => { + jest.setTimeout(30000); const app = getApp(); const filePath = 'test'; - await performRequest(app, filePath); - expect(mockPage.mock.calls[0][0]).toBe(filePath); - expect(mockPageDelete).toHaveBeenCalledTimes(1); + try { + await performRequest(app, filePath); + expect(mockPage.mock.calls[0][0]).toBe(filePath); + expect(mockPageDelete).toHaveBeenCalledTimes(1); + } catch (error: any) { + console.log('error:', error); + throw new Error(error); + } }); }); diff --git a/packages/bodiless-backend/__tests__/fileHelper.test.ts b/packages/bodiless-backend/__tests__/fileHelper.test.ts index 1103e9bdec..dff61752fb 100644 --- a/packages/bodiless-backend/__tests__/fileHelper.test.ts +++ b/packages/bodiless-backend/__tests__/fileHelper.test.ts @@ -15,6 +15,7 @@ import request from 'supertest'; import MD5 from 'crypto-js/md5'; import { tmpdir } from 'os'; +import Backend from '../src/backend'; import { resolveRelativeToMe } from './tools'; const backendPrefix = '/prefix'; @@ -22,22 +23,19 @@ const backendStaticPath = tmpdir(); const filePath = resolveRelativeToMe('fixtures', 'images', 'image.png'); const getApp = () => { - // eslint-disable-next-line global-require - const Backend = require('../src/backend'); const backend = new Backend(); return backend.getApp(); }; describe('Create fileHelper endpoint', () => { - // preparing environment variables - // clearing mocks + // preparing environment variables; clearing mocks beforeEach(() => { jest.resetModules(); process.env.GATSBY_BACKEND_PREFIX = backendPrefix; process.env.BODILESS_BACKEND_STATIC_PATH = backendStaticPath; }); - describe('when the image is uploaded succefully', () => { + describe('when the image is uploaded successfully', () => { const nodePath = 'Page$homepage$309e2660-767a-11ea-8222-6fba863d924a$image'; const file = ''; const performRequest = (app$: any) => request(app$) diff --git a/packages/bodiless-backend/__tests__/getChanges.test.ts b/packages/bodiless-backend/__tests__/getChanges.test.ts index 2b76d8b244..20a86851bc 100644 --- a/packages/bodiless-backend/__tests__/getChanges.test.ts +++ b/packages/bodiless-backend/__tests__/getChanges.test.ts @@ -12,10 +12,11 @@ * limitations under the License. */ +import Git from '../src/tools/git'; +import GitCmd from '../src/gitCmd'; import { cloneGitFixture, cleanGitFixture } from './tools'; -const { getChanges } = require('../src/git'); -const GitCmd = require('../src/GitCmd'); +const { getChanges } = Git; describe('getChanges', () => { beforeEach(cloneGitFixture('get-changes', 'test-upstream-changes')); @@ -32,12 +33,16 @@ describe('getChanges', () => { it('lists no changes when there is no upstream branch', async () => { jest.setTimeout(30000); - await GitCmd.cmd().add('reset', '--hard', 'test-upstream-changes-local').exec(); - await GitCmd.cmd().add('checkout', '-b', 'foo').exec(); - const result = await getChanges(); - expect(result.upstream.branch).toBeNull(); - expect(result.upstream.commits).toHaveLength(0); - expect(result.upstream.files).toHaveLength(0); + try { + await GitCmd.cmd().add('reset', '--hard', 'test-upstream-changes-local').exec(); + await GitCmd.cmd().add('checkout', '-b', 'foo').exec(); + const result = await getChanges(); + expect(result.upstream.branch).toBeNull(); + expect(result.upstream.commits).toHaveLength(0); + expect(result.upstream.files).toHaveLength(0); + } catch (error) { + throw new Error(`${error}`); + } }); it('lists upstream changes when they exist', async () => { diff --git a/packages/bodiless-backend/__tests__/getConflict.test.ts b/packages/bodiless-backend/__tests__/getConflict.test.ts index a0566ffd5f..6f5d0f2580 100644 --- a/packages/bodiless-backend/__tests__/getConflict.test.ts +++ b/packages/bodiless-backend/__tests__/getConflict.test.ts @@ -12,10 +12,11 @@ * limitations under the License. */ +import Git from '../src/tools/git'; +import GitCmd from '../src/gitCmd'; import { cloneGitFixture, cleanGitFixture } from './tools'; -const { getConflicts, getUpstreamTrackingBranch } = require('../src/git'); -const GitCmd = require('../src/GitCmd'); +const { getConflicts, getUpstreamTrackingBranch } = Git; describe('getConflicts', () => { beforeEach(cloneGitFixture('get-conflicts', 'feat/foo-test-1')); diff --git a/packages/bodiless-backend/__tests__/tools.ts b/packages/bodiless-backend/__tests__/tools.ts index 8796eb884b..f5e12029da 100644 --- a/packages/bodiless-backend/__tests__/tools.ts +++ b/packages/bodiless-backend/__tests__/tools.ts @@ -13,19 +13,15 @@ */ import path from 'path'; +import rimraf from 'rimraf'; import { mkdirSync } from 'fs'; -import GitCmd from '../src/GitCmd'; - -// eslint-disable-next-line import/no-extraneous-dependencies -const rimraf = require('rimraf'); +import GitCmd from '../src/gitCmd'; const originalCwd = process.cwd(); -export const resolveRelativeToMe = (...segments: string[]) => { - const scriptName = path.basename(__filename); - const scriptPath = require.resolve(`./${scriptName}`); - return path.resolve(path.dirname(scriptPath), ...segments); -}; +export const resolveRelativeToMe = (...segments: string[]) => ( + path.resolve(path.dirname(__filename), ...segments) +); export const cloneGitFixture = (repo: string, branch: string) => async () => { const tmp = resolveRelativeToMe(`tmp-${repo}`); diff --git a/packages/bodiless-backend/bin/bodiless-backend b/packages/bodiless-backend/bin/bodiless-backend index b2572d6630..35fef5ede5 100755 --- a/packages/bodiless-backend/bin/bodiless-backend +++ b/packages/bodiless-backend/bin/bodiless-backend @@ -1,3 +1,3 @@ #!/usr/bin/env node -require('../src/server.js'); +require('../lib/server.js'); diff --git a/packages/bodiless-backend/package.json b/packages/bodiless-backend/package.json index a58d27ab87..ac1e998635 100644 --- a/packages/bodiless-backend/package.json +++ b/packages/bodiless-backend/package.json @@ -6,11 +6,17 @@ "files": [ "/bodiless.docs.json", "/getDocs.js", - "/src" + "/lib" ], "bin": { "bodiless-backend": "./bin/bodiless-backend" }, + "scripts": { + "build": "run-p build:lib", + "build:lib": "tsc -p ./tsconfig.json", + "build:watch": "npm run build:lib -- --watch", + "clean": "rimraf \"lib/*\" && rimraf tsconfig.tsbuildinfo && rimraf \"doc/api\"" + }, "dependencies": { "@bodiless/cli": "^1.0.0-rc.37", "body-parser": "^1.18.3", @@ -21,6 +27,7 @@ "formidable": "^1.2.1", "fs-extra": "^8.1.0", "lodash": "^4.17.19", + "qs": "^6.11.0", "replace-in-file": "^6.2.0", "rimraf": "^2.6.3", "tmp": "^0.0.33", @@ -33,8 +40,17 @@ "author": "Dewen Li ", "license": "Apache-2.0", "devDependencies": { + "@types/copyfiles": "^2.1.1", + "@types/dotenv": "^8.2.0", + "@types/express": "^4.17.17", + "@types/formidable": "^1.2.5", + "@types/fs-extra": "^8.0.0", + "@types/morgan": "^1.9.4", + "@types/react": "^18.2.0", + "@types/rimraf": "^3.0.0", "@types/crypto-js": "4.1.1", "@types/supertest": "^2.0.8", + "@types/tmp": "^0.2.3", "morgan": "^1.9.1", "morgan-body": "^2.4.8", "supertest": "^4.0.2" diff --git a/packages/bodiless-backend/src/backend.js b/packages/bodiless-backend/src/backend.js deleted file mode 100644 index 9b9a2f072c..0000000000 --- a/packages/bodiless-backend/src/backend.js +++ /dev/null @@ -1,869 +0,0 @@ -/** - * Copyright © 2019 Johnson & Johnson - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint no-console: 0 */ -/* eslint global-require: 0 */ -const express = require('express'); -const bodyParser = require('body-parser'); -const { spawn } = require('child_process'); -const formidable = require('formidable'); -const tmp = require('tmp'); -const path = require('path'); -const uniq = require('lodash/uniq'); -const Page = require('./page'); -const GitCmd = require('./GitCmd'); -const { getChanges, getConflicts, mergeMain } = require('./git'); -const { copyAllFiles, copyFile, moveFile } = require('./fileHelper'); -const Logger = require('./logger'); - -const backendPrefix = process.env.GATSBY_BACKEND_PREFIX || '/___backend'; -const backendFilePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; -const defaultBackendPagePath = path.resolve(backendFilePath, 'pages'); -const defaultBackendSitePath = path.resolve(backendFilePath, 'site'); -const backendPagePath = process.env.BODILESS_BACKEND_DATA_PAGE_PATH || defaultBackendPagePath; -const backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; -const backendPublicPath = process.env.BODILESS_BACKEND_PUBLIC_PAGE_PATH || 'public/page-data'; -const isExtendedLogging = (process.env.BODILESS_BACKEND_EXTENDED_LOGGING_ENABLED || '0') === '1'; -const canCommit = (process.env.BODILESS_BACKEND_COMMIT_ENABLED || '0') === '1'; -const canSave = (process.env.BODILESS_BACKEND_SAVE_ENABLED || '1') === '1'; - -const logger = new Logger('BACKEND'); - -const isMorganEnabled = () => isExtendedLogging; -/* -This Class holds all of the interaction with Git -*/ -class Git { - static setCurrent(branch) { - return Git.cmd() - .add('checkout', branch) - .exec(); - } - - static getCurrent() { - return Git.cmd() - .add('rev-parse', '--abbrev-ref', 'HEAD') - .exec() - .catch(data => logger.log(data)) - .then(data => data.stdout); - } - - static list() { - return new Promise(resolve => { - const cmdName = path.join(__dirname, 'getBranches.sh'); - const cmd = spawn('bash', [cmdName]); - const results = []; - cmd.stdout.on('data', data => { - const values = data.toString().split('||'); - if (values.length === 4) { - results.push({ - isCurrent: Boolean(Number.parseInt(values[0], 10)), - name: values[1].trim(), - description: values[2].trim(), - lastCommitMessage: values[3].trim(), - }); - } - }); - cmd.stdout.on('close', () => { - resolve(results); - }); - }); - } -} - -/* -This Class lets us build and execute a GitCommit -*/ -class GitCommit { - constructor() { - try { - // If App git path is specified, switch to the path. - if (process.env.APP_GIT_PATH) { - process.chdir(process.env.APP_GIT_PATH); - } - } catch (err) { - logger.error(`chdir: ${err}`); - } - this.files = []; - this.remote = 'origin'; - } - - addDirectory(...dirs) { - this.files.push(...dirs); - return this; - } - - addPaths(...paths) { - this.files.push(...paths.map(p => `${backendFilePath}/${p}.json`)); - return this; - } - - addFiles(...files) { - this.files.push(...files.map(p => `${backendStaticPath}/${p}`)); - return this; - } - - async pull() { - const { remote } = this; - await GitCmd.cmd() - .add('fetch', remote) - .exec(); - - // Check if there are any unstaged files left before rebasing. - const dirty = await GitCmd.cmd() - .add('diff', '--quiet') - .exec(); - if (dirty.code) { - await GitCmd.cmd() - .add('add', '--all') - .exec(); - await GitCmd.cmd() - .add('commit', '-m', 'TEMPORARY COMMIT') - .exec(); - } - - // Get current branch name. - const data = await GitCmd.cmd() - .add('symbolic-ref', '--short', 'HEAD') - .exec(); - const branch = data.stdout.trim(); - - let result; - try { - result = await GitCmd.cmd() - .add('rebase', `${remote}/${branch}`, '-s', 'recursive', '-X', 'theirs') - .exec(); - } catch (rebaseErr) { - // Set another http.status code for unstaged changes? - // const unstaged = /You have unstaged changes/i.test(rebaseErr.message); - - // Set HTTP response status code to 409 if a conflict is found during rebase. - if (/could not apply/i.test(rebaseErr.message)) { - rebaseErr.code = '409'; - - // Abort rebase only if it's in progress (i.e. merge conflict). - try { - logger.log('Found error during rebase, attempting to abort rebase.'); - await GitCmd.cmd() - .add('rebase', '--abort') - .exec(); - } catch (abortErr) { - logger.log('Found error while attempting to abort rebase.'); - logger.error(abortErr); - } - } else { - rebaseErr.code = '500'; - } - throw rebaseErr; - } finally { - // If there was a temporary commit, rewind working directory back one commit. - if (dirty.code && (result.stdout.search('Already applied') === -1)) { - await GitCmd.cmd() - .add('reset', 'HEAD^') - .exec(); - } - } - return result; - } - - async commit(message, author) { - const { remote } = this; - - await this.pull(); - - // Stage user files specified by front-end (src/data, /static, etc.). - await GitCmd.cmd() - .add('add') - .addFiles(...this.files) - .exec(); - - // Check if we have any staged files to be committed. - let hasChanges = true; - try { - const resDiff = await GitCmd.cmd() - .add('diff', '--cached', '--exit-code') - .exec(); - - if (resDiff.code === 0) { - hasChanges = false; - } - } catch (errDiff) { - hasChanges = true; - } - if (!hasChanges) { - const errNoChange = new Error('No changes found for this commit.'); - errNoChange.code = 405; - throw errNoChange; - } - - // Commit the staged files.. - const commitCmd = GitCmd.cmd(); - commitCmd.add('commit', '-m', message); - // If we have an author, add it to the commit. - if (author) { - commitCmd.add('--author', author); - } - commitCmd.addFiles(...this.files); - const res = await commitCmd.exec(); - - try { - // Push changes after successful rebase. - await GitCmd.cmd() - .add('push', remote) - .exec(); - } catch (pushError) { - // Walk back last commit, and put it's contents into the working directory. - GitCmd.cmd() - .add('reset', '--mixed', 'HEAD^') - .exec(); - throw pushError; - } - - // return commit command response to front-end if successful - return res; - } - - amend() { - // we have to tell git we intend to add our files - return Git.cmd() - .add('add', '--intent-to-add') - .addFiles(...this.files) - .exec() - .then( - Git.cmd() - .add('commit') - .add('--amend', '--no-edit') - .addFiles(...this.files) - .exec(), - ); - } -} - -class Backend { - constructor() { - this.app = express(); - this.app.use(bodyParser.json()); - if (isMorganEnabled()) { - const morgan = require('morgan'); - const morganBody = require('morgan-body'); - this.app.use(morgan(':method :url :status :res[content-length] - :response-time ms')); - morganBody(this.app); - } - this.app.use((req, res, next) => { - res.header( - 'Access-Control-Allow-Headers', - 'Origin, X-Requested-With, Content-Type, Accept', - ); - res.header('Content-Type', 'application/json'); - next(); - }); - this.setRoute(`${backendPrefix}/changes`, Backend.getChanges); - this.setRoute(`${backendPrefix}/changes/conflicts`, Backend.getConflicts); - this.setRoute(`${backendPrefix}/get/commits`, Backend.getLatestCommits); - this.setRoute(`${backendPrefix}/change/amend`, Backend.setChangeAmend); - this.setRoute(`${backendPrefix}/change/commit`, Backend.setChangeCommit); - this.setRoute(`${backendPrefix}/change/push`, Backend.setChangePush); - this.setRoute(`${backendPrefix}/change/reset`, Backend.setChangeReset); - this.setRoute(`${backendPrefix}/change/pull`, Backend.setChangePull); - this.setRoute(`${backendPrefix}/merge/main`, Backend.mergeMain); - this.setRoute(`${backendPrefix}/asset/*`, Backend.setAsset); - this.setRoute(`${backendPrefix}/set/current`, Backend.setSetCurrent); - this.setRoute(`${backendPrefix}/set/list`, Backend.setSetList); - this.setRoute(`${backendPrefix}/content/*`, Backend.setContent); - this.setRoute(`${backendPrefix}/log`, Backend.log); - this.setRoute(`${backendPrefix}/pages`, Backend.setPages); - this.setRoute(`${backendPrefix}/clone`, Backend.clonePage); - this.setRoute(`${backendPrefix}/remove/*`, Backend.removePage); - this.setRoute(`${backendPrefix}/directory/child/*`, Backend.directoryChild); - this.setRoute(`${backendPrefix}/directory/exists/*`, Backend.directoryExists); - this.setRoute(`${backendPrefix}/file/remove/*`, Backend.removeFile); - this.setRoute(`${backendPrefix}/assets/remove/*`, Backend.removeAssets); - this.setRoute(`${backendPrefix}/assets/copy`, Backend.copyAssets); - this.setRoute(`${backendPrefix}/assets/move`, Backend.moveAssets); - } - - setRoute(route, action) { - action.bind(this)(this.app.route(route)); - } - - getApp() { - return this.app; - } - - static exitWithErrorResponse(error, res) { - logger.error(error); - if (Number(error.code) >= 300) { - res.status(Number(error.code)); - } else { - res.status(500); - } - // End response process to prevent any further queued promises/events from responding. - res.send(Backend.sanitizeOutput(error.message)).end(); - } - - static ensureCommitEnabled(res) { - // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. - if (!canCommit) { - const error = new Error( - 'Your current environment does not allow saving content.', - ); - error.code = 405; - Backend.exitWithErrorResponse(error, res); - return false; - } - return true; - } - - static ensureSaveEnabled(res) { - // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. - if (!canSave) { - const error = new Error( - 'Your current environment does not allow saving content.', - ); - error.code = 405; - Backend.exitWithErrorResponse(error, res); - return false; - } - return true; - } - - static getChanges(route) { - route.get(async (req, res) => { - try { - const status = await getChanges(); - res.send(status); - } catch (error) { - logger.log(error); - error.code = 500; - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static getConflicts(route) { - route.get(async (req, res) => { - const target = req.query.target || undefined; - try { - const conflicts = await getConflicts(target); - const pages = uniq(conflicts.files.filter(file => (file.search(backendPagePath) !== -1)) - .map(file => ( - path.dirname(file).replace(backendPagePath, '').replace(/^\/|\/$/g, '') || 'homepage' - ))); - const site = uniq(conflicts.files.filter( - file => (file.search(defaultBackendSitePath) !== -1), - ).map(file => ( - path.dirname(file).replace(defaultBackendSitePath, '').replace(/^\/|\/$/g, '') || 'site' - ))); - res.send({ ...conflicts, pages, site }); - } catch (error) { - logger.log(error); - error.code = 500; - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static getLatestCommits(route) { - route.post(async (req, res) => { - try { - await GitCmd.cmd().add('fetch', '--all'); - const gitLog = await GitCmd.cmd() - .add('log', '--pretty=format:%H%n%ad%n%an%n%s%n') - .exec(); - res.send(gitLog); - } catch (error) { - res.send(error.info); - } - }); - } - - static setChangeReset(route) { - route.post(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - logger.log('Start reset'); - try { - // Clean up untracked files. - if (backendFilePath && backendStaticPath) { - // Clean up public folder. - const gitStatus = await GitCmd.cmd() - .add('status', '--porcelain', backendPagePath) - .exec(); - const gitRootRelPath = await GitCmd.cmd() - .add('rev-parse', '--show-cdup') - .exec(); - const reGetDeletedAndUntracked = /(?<= D |\?\? ).*/gm; - const deletedAndUntracked = gitStatus.stdout.match(reGetDeletedAndUntracked); - if (deletedAndUntracked !== null) { - const dataPagePath = path.join(backendFilePath, 'pages'); - const obsoletePublicPages = deletedAndUntracked.map(gitPath => { - const publicPagePath = gitPath.replace(dataPagePath, backendPublicPath); - // Get absolute path considering location of .git folder - return path.resolve( - gitRootRelPath.stdout.trim(), - publicPagePath, - ); - }); - // Have to loop through every path since 'git clean' can work incorrectly when passing - // all the paths at once. - await Promise.all(obsoletePublicPages.map( - async (gitPath) => GitCmd.cmd().add('clean', '-dfx').addFiles(gitPath).exec(), - )); - } - // Clean up data folder. - await Promise.all([backendFilePath, backendStaticPath].map( - async (gitPath) => GitCmd.cmd().add('clean', '-df').addFiles(gitPath).exec(), - )); - } - // Discard changes in existing files. - const cleanExisting = await GitCmd.cmd() - .add('reset', '--hard', 'HEAD') - .exec(); - res.send(cleanExisting.stdout); - } catch (error) { - // Need to inform user of merge operation fails. - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static setChangePull(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - logger.log('Start pull'); - new GitCommit() - .pull() - .then(data => res.send(data.stdout)) - // Need to inform user of merge operation fails. - .catch(error => Backend.exitWithErrorResponse(error, res)); - }); - } - - static mergeMain(route) { - route.post(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - try { - const status = await mergeMain(); - res.send(status); - } catch (error) { - logger.log(error); - error.code = 500; - Backend.exitWithErrorResponse(error, res); - } - }); - } - - static setChangeAmend(route) { - route.post((req, res) => { - logger.log('Start amend'); - logger.log(req.body.paths); - Git.commit() - .addPaths(...req.body.paths) - .amend() - .then(data => res.send(data.stdout)) - .catch(data => logger.log(data)); - }); - } - - static setChangeCommit(route) { - route.post((req, res) => { - if (!Backend.ensureCommitEnabled(res)) return; - logger.log(`Start committing: ${req.body.message}`); - const { author } = req.body; - const files = req.body.files || []; - const dirs = req.body.dirs || []; - new GitCommit() - .addDirectory(...dirs) - .addPaths(...req.body.paths) - .addFiles(...files) - .commit(`[CONTENT] ${req.body.message}`, author) - // .then(Git.cmd().add('push').exec()) - .then(data => { - res.send(data.stdout); - }) - // Need to inform user of merge operation fails. - .catch(error => Backend.exitWithErrorResponse(error, res)); - }); - } - - static setChangePush(route) { - route.post((req, res) => { - if (!Backend.ensureCommitEnabled(res)) return; - logger.log('Start push'); - new GitCmd() - .add('symbolic-ref', '--short', 'HEAD') - .exec() - .then(data => { - const branch = data.stdout.trim(); - logger.log(`Branch = ${branch}`); - Git.cmd() - .add('rebase', `origin/${branch}`) - .exec() - .then( - Git.cmd() - .add('push', 'origin', branch) - .exec(), - ) - .then(addData => res.send(addData.stdout)) - .catch(addData => logger.error(addData)); - }) - .catch(data => logger.log(data)); - }); - } - - static log(route) { - route.post((req, res) => { - new Logger(req.body.id).print(req.body.message, req.body.severity); - res.send('success'); - }); - } - - static setAsset(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const baseResourcePath = Backend.getPath(req); - const tmpDir = tmp.dirSync({ mode: '0755', unsafeCleanup: true, prefix: 'backendTmpDir_' }); - const form = formidable({ multiples: true, uploadDir: tmpDir.name }); - - form.parse(req, (err, fields, files) => { - const { nodePath } = fields; - copyAllFiles(files, baseResourcePath, nodePath).then((filesPath) => { - res.json({ filesPath }); - }).catch(copyErr => { - console.log(copyErr); - res.send(copyErr); - }); - }); - }); - } - - static setSetCurrent(route) { - route - .get((req, res) => { - logger.log('Start get current set'); - Git.getCurrent().then(data => res.send(data)); - }) - .post((req, res) => { - logger.log(`Start Post current Set:${req.body}`); - Git.setCurrent(req.body.name) - .then(Git.list()) - .then(data => { - res.send(data); - }) - .catch(reason => { - logger.log(reason); - }); - }); - } - - static setSetList(route) { - route.get((req, res) => { - logger.log('Start Get Set List'); - Git.list().then(data => res.send(data)); - }); - } - - static setContent(route) { - route - .get((req, res) => { - // @todo: refactor 2nd argument. - logger.log(req); - const page = Backend.getPage(Backend.getPath(req)); - logger.log(`Start get content for:${page.file}`); - page - .read() - .then(data => { - res.send(data); - }) - .catch(() => res.send({})); - }) - .post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - // @todo: refactor 2nd argument. - const page = Backend.getPage(Backend.getPath(req)); - logger.log(`Start post content for:${page.file}`); - page - .write(req.body) - .then(data => { - logger.log('Sending', data); - res.send(data); - }) - .catch(reason => { - logger.log(reason); - res.send({}); - }); - }) - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const page = Backend.getPage(Backend.getPath(req)); - logger.log(`Start deletion for:${page.file}`); - page - .delete() - .then(data => { - logger.log('Sending', data); - res.send(data); - }) - .catch(reason => { - logger.log(reason); - res.send({}); - }); - }); - } - - static getPath(req) { - const prefixCount = backendPrefix.split('/').filter(Boolean).length + 1; - logger.log(req.originalUrl); - return req.originalUrl - .replace(/\/*$/, '') - .replace(/^\/*/, '') - .split('/') - .splice(prefixCount) - .join('/'); - } - - static getPage(pagePath) { - return new Page(pagePath); - } - - static removePage(route) { - route - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - page.setBasePath(backendPagePath); - - logger.log(`Start deleting page:${page.directory}`); - - page - .deleteDirectory() - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static removeFile(route) { - route - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - page.setBasePath(backendPagePath); - const origin = `./src/data/pages/${pagePath}index.json`; - logger.log(`Start deleting file: ${origin}`); - - page - .removeFile(origin) - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static directoryChild(route) { - route - .delete((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - - page.setBasePath(backendPagePath); - - logger.log(`Start verify page child directory: ${page.directory}`); - - page - .hasChildDirectory() - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static directoryExists(route) { - route - .delete((req, res) => { - const pagePath = req.params[0]; - const page = Backend.getPage(pagePath); - - page.setBasePath(backendPagePath); - - logger.log(`Start verifying new page exists: ${page.directory}`); - - page - .directoryExists(page.directory) - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static setPages(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { body } = req; - const pagePath = body.path || ''; - const template = body.template || '_default'; - const filePath = path.join(pagePath, 'index'); - const pageContent = { - '#template': template, - }; - const page = Backend.getPage(filePath); - page.setBasePath(backendPagePath); - logger.log(`Start creating page for:${page.file}`); - if (page.exists) { - res.status(409); - res.send(`Error: page ${pagePath} already exists`); - return; - } - page - .write(pageContent) - .then(data => { - logger.log('Sending', data); - res.status(201); - res.send(data); - }) - .catch(reason => { - logger.log(reason); - res.send({}); - }); - }); - } - - static clonePage(route) { - route.post(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { body: { origin, destination } } = req; - const page = Backend.getPage(destination); - page.setBasePath(backendPagePath); - - logger.log(`Start cloning page for:${destination}`); - - page - .copyDirectory(origin, destination) - .then(data => { - if (data) { - logger.log(data); - res.send(data); - } else { - res.send({}); - } - }) - .catch(reason => { - logger.log(reason); - res.status(500).send(`${reason}`); - }); - }); - } - - static removeAssets(route) { - route.delete(async (req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const origin = req.params[0]; - const page = Backend.getPage(origin); - - logger.log(`Start removing assets for:${origin}`); - - const originPath = origin.replace(/\/$/, ''); - const originStaticPath = path.join(backendStaticPath, '/images/pages', originPath); - - page - .removePageAssets(originStaticPath) - .then(error => { - if (error) { - logger.log(error); - res.send(error); - } else { - res.send({}); - } - }); - }); - } - - static copyAssets(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { - body: { - path_from: pathFrom, path_to: pathTo, - } - } = req; - const assetStaticPathFrom = path.join(backendStaticPath, pathFrom); - const assetStaticPathTo = path.join(backendStaticPath, pathTo); - logger.log(`Copy assets from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`); - try { - copyFile(assetStaticPathFrom, assetStaticPathTo); - setTimeout(() => { - res.send({status: 'success'}); - }, 500); - } catch (error) { - logger.log(error); - res.status(500).send(error); - } - }); - } - - static moveAssets(route) { - route.post((req, res) => { - if (!Backend.ensureSaveEnabled(res)) return; - const { - body: { - path_from: pathFrom, path_to: pathTo, - } - } = req; - const assetStaticPathFrom = path.join(backendStaticPath, pathFrom); - const assetStaticPathTo = path.join(backendStaticPath, pathTo); - logger.log(`Move asset from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`); - try { - moveFile(assetStaticPathFrom, assetStaticPathTo); - setTimeout(() => { - res.send({status: 'success'}); - }, 500); - } catch (error) { - logger.log(error); - res.status(500).send(error); - } - }); - } - - static sanitizeOutput(data) { - return data.replace(/(http|https):\/\/[^@]+:[^@]+@/gi, '$1://****:****@'); - } - - start(port) { - logger.log('Start'); - this.app.listen(port, () => logger.log(`Backend listening on Port: ${port}`)); - } -} - -module.exports = Backend; diff --git a/packages/bodiless-backend/src/backend.ts b/packages/bodiless-backend/src/backend.ts new file mode 100644 index 0000000000..5652129f3d --- /dev/null +++ b/packages/bodiless-backend/src/backend.ts @@ -0,0 +1,932 @@ +/** + * Copyright © 2019 Johnson & Johnson + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import express from 'express'; +import bodyParser from 'body-parser'; +import { spawn } from 'child_process'; +import formidable from 'formidable'; +import tmp from 'tmp'; +import path from 'path'; +import uniq from 'lodash/uniq'; +import morgan from 'morgan'; +import morganBody from 'morgan-body'; +import type { + Express, IRoute, Request, Response, +} from 'express'; +import Page from './page'; +import GitCmd, { GitCmdError } from './gitCmd'; +import gitUtil from './tools/git'; +import { copyAllFiles, copyFile, moveFile } from './fileHelper'; +import Logger from './logger'; +import type { GitInfoType } from './gitCmd'; +import type { GitUtil } from './tools/git'; + +const logger = new Logger('BACKEND'); + +type GitBranchInfoType = { + isCurrent: boolean; + name: string; + description: string; + lastCommitMessage: string; +}; + +/* +This Class holds all of the interaction with Git +*/ +class Git { + static setCurrent(branch: string) { + return GitCmd.cmd() + .add('checkout', branch) + .exec(); + } + + static getCurrent() { + return GitCmd.cmd() + .add('rev-parse', '--abbrev-ref', 'HEAD') + .exec() + .catch(data => logger.log(data)) + .then(data => data?.stdout); + } + + static list() { + return new Promise((resolve) => { + const cmdName = path.join(__dirname, 'getBranches.sh'); + const cmd = spawn('bash', [cmdName]); + const results: GitBranchInfoType[] = []; + cmd.stdout.on('data', (data) => { + const values = data.toString().split('||'); + if (values.length === 4) { + results.push({ + isCurrent: Boolean(Number.parseInt(values[0], 10)), + name: values[1].trim(), + description: values[2].trim(), + lastCommitMessage: values[3].trim(), + }); + } + }); + cmd.stdout.on('close', () => { + resolve(results); + }); + }); + } +} + +/* +This Class lets us build and execute a GitCommit +*/ +class GitCommit { + files: string[] = []; + + remote: string; + + staticPath: string; + + filePath: string; + + constructor() { + try { + // If App git path is specified, switch to the path. + if (process.env.APP_GIT_PATH) { + process.chdir(process.env.APP_GIT_PATH); + } + } catch (err) { + logger.error(`chdir: ${err}`); + } + this.files = []; + this.remote = 'origin'; + this.filePath = ''; + this.staticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; + } + + addDirectory(...dirs: string[]) { + this.files.push(...dirs); + return this; + } + + addPaths(...paths: string[]) { + this.files.push(...paths.map((p) => `${this.filePath}/${p}.json`)); + return this; + } + + addFiles(...files: string[]) { + this.files.push(...files.map((p) => `${this.staticPath}/${p}`)); + return this; + } + + async pull() { + const { remote } = this; + await GitCmd.cmd().add('fetch', remote).exec(); + + // Check if there are any un-staged files left before rebasing. + const dirty = await GitCmd.cmd().add('diff', '--quiet').exec(); + if (dirty.code) { + await GitCmd.cmd().add('add', '--all').exec(); + await GitCmd.cmd().add('commit', '-m', 'TEMPORARY COMMIT').exec(); + } + + // Get current branch name. + const data: GitInfoType = await GitCmd.cmd() + .add('symbolic-ref', '--short', 'HEAD') + .exec(); + const branch = data.stdout.trim(); + + let result; + try { + result = await GitCmd.cmd() + .add('rebase', `${remote}/${branch}`, '-s', 'recursive', '-X', 'theirs') + .exec(); + } catch (rebaseErr: any) { + // Set another http.status code for unstaged changes? + // const unstaged = /You have unstaged changes/i.test(rebaseErr.message); + + // Set HTTP response status code to 409 if a conflict is found during rebase. + if (/could not apply/i.test(rebaseErr.message)) { + rebaseErr.code = '409'; + + // Abort rebase only if it's in progress (i.e. merge conflict). + try { + logger.log('Found error during rebase, attempting to abort rebase.'); + await GitCmd.cmd().add('rebase', '--abort').exec(); + } catch (abortErr: any) { + logger.log('Found error while attempting to abort rebase.'); + logger.error(abortErr); + } + } else { + rebaseErr.code = '500'; + } + throw rebaseErr; + } finally { + // If there was a temporary commit, rewind working directory back one commit. + if (dirty.code && result?.stdout.search('Already applied') === -1) { + await GitCmd.cmd().add('reset', 'HEAD^').exec(); + } + } + return result; + } + + async commit(message: string, author: string) { + const { remote } = this; + + await this.pull(); + + // Stage user files specified by front-end (src/data, /static, etc.). + await GitCmd.cmd() + .add('add') + .addFiles(...this.files) + .exec(); + + // Check if we have any staged files to be committed. + let hasChanges = true; + try { + const resDiff = await GitCmd.cmd() + .add('diff', '--cached', '--exit-code') + .exec(); + + if (resDiff.code === 0) { + hasChanges = false; + } + } catch (errDiff) { + hasChanges = true; + } + if (!hasChanges) { + const errNoChange = new GitCmdError('No changes found for this commit.'); + errNoChange.code = 405; + throw errNoChange; + } + + // Commit the staged files.. + const commitCmd = GitCmd.cmd(); + commitCmd.add('commit', '-m', message); + // If we have an author, add it to the commit. + if (author) { + commitCmd.add('--author', author); + } + commitCmd.addFiles(...this.files); + const res = await commitCmd.exec(); + + try { + // Push changes after successful rebase. + await GitCmd.cmd().add('push', remote).exec(); + } catch (pushError) { + // Walk back last commit, and put it's contents into the working directory. + GitCmd.cmd().add('reset', '--mixed', 'HEAD^').exec(); + throw pushError; + } + + // return commit command response to front-end if successful + return res; + } + + amend() { + // we have to tell git we intend to add our files + return GitCmd.cmd() + .add('add', '--intent-to-add') + .addFiles(...this.files) + .exec() + .then(() => { + GitCmd.cmd() + .add('commit') + .add('--amend', '--no-edit') + .addFiles(...this.files) + .exec(); + }); + } +} + +class Backend { + app: Express; + + git: GitUtil; + + prefix: string = '/___backend'; + + filePath: string = ''; + + defaultPagePath: string = ''; + + defaultSitePath: string = ''; + + pagePath: string = ''; + + staticPath: string = ''; + + publicPath: string = ''; + + isExtendedLogging: boolean = false; + + canCommit: boolean = false; + + canSave: boolean = true; + + initConf() { + this.prefix = process.env.GATSBY_BACKEND_PREFIX || '/___backend'; + this.filePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; + this.defaultPagePath = path.resolve(this.filePath, 'pages'); + this.defaultSitePath = path.resolve(this.filePath, 'site'); + this.pagePath = process.env.BODILESS_BACKEND_DATA_PAGE_PATH || this.defaultPagePath; + this.staticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; + this.publicPath = process.env.BODILESS_BACKEND_PUBLIC_PAGE_PATH || 'public/page-data'; + this.isExtendedLogging = (process.env.BODILESS_BACKEND_EXTENDED_LOGGING_ENABLED || '0') === '1'; + this.canCommit = (process.env.BODILESS_BACKEND_COMMIT_ENABLED || '0') === '1'; + this.canSave = (process.env.BODILESS_BACKEND_SAVE_ENABLED || '1') === '1'; + } + + isMorganEnabled = () => this.isExtendedLogging; + + constructor() { + this.initConf(); + this.app = express(); + this.app.use(bodyParser.json()); + this.git = gitUtil; + if (this.isMorganEnabled()) { + this.app.use( + morgan(':method :url :status :res[content-length] - :response-time ms'), + ); + morganBody(this.app); + } + this.app.use((req, res, next) => { + res.header( + 'Access-Control-Allow-Headers', + 'Origin, X-Requested-With, Content-Type, Accept', + ); + res.header('Content-Type', 'application/json'); + next(); + }); + this.setRoute(`${this.prefix}/changes`, this.getChanges); + this.setRoute(`${this.prefix}/changes/conflicts`, this.getConflicts); + this.setRoute(`${this.prefix}/get/commits`, Backend.getLatestCommits); + // this.setRoute(`${this.prefix}/change/amend`, this.setChangeAmend); + this.setRoute(`${this.prefix}/change/commit`, this.setChangeCommit); + // this.setRoute(`${this.prefix}/change/push`, this.setChangePush); + this.setRoute(`${this.prefix}/change/reset`, this.setChangeReset); + this.setRoute(`${this.prefix}/change/pull`, this.setChangePull); + this.setRoute(`${this.prefix}/merge/main`, this.mergeMain); + this.setRoute(`${this.prefix}/asset/*`, this.setAsset); + // this.setRoute(`${this.prefix}/set/current`, Backend.setSetCurrent); + this.setRoute(`${this.prefix}/set/list`, Backend.setSetList); + this.setRoute(`${this.prefix}/content/*`, this.setContent); + this.setRoute(`${this.prefix}/log`, Backend.log); + this.setRoute(`${this.prefix}/pages`, this.setPages); + this.setRoute(`${this.prefix}/clone`, this.clonePage); + this.setRoute(`${this.prefix}/remove/*`, this.removePage); + this.setRoute(`${this.prefix}/directory/child/*`, this.directoryChild); + this.setRoute( + `${this.prefix}/directory/exists/*`, + this.directoryExists, + ); + this.setRoute(`${this.prefix}/file/remove/*`, this.removeFile); + this.setRoute(`${this.prefix}/assets/remove/*`, this.removeAssets); + this.setRoute(`${this.prefix}/assets/copy`, this.copyAssets); + this.setRoute(`${this.prefix}/assets/move`, this.moveAssets); + } + + setRoute(route: string, action: (r: IRoute) => void) { + action.bind(this)(this.app.route(route)); + } + + getApp() { + return this.app; + } + + static exitWithErrorResponse(error: GitCmdError, res: Response) { + logger.error(error.message); + if (Number(error.code) >= 300) { + res.status(Number(error.code)); + } else { + res.status(500); + } + // End response process to prevent any further queued promises/events from responding. + res.send({error: Backend.sanitizeOutput(error.message)}).end(); + } + + ensureCommitEnabled(res: Response) { + // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. + if (!this.canCommit) { + const error = new GitCmdError( + 'Your current environment does not allow saving content.', + ); + error.code = 405; + Backend.exitWithErrorResponse(error, res); + return false; + } + return true; + } + + ensureSaveEnabled(res: Response) { + // Exit with HTTP 405 "Method Not Allowed" if git commits are disabled. + if (!this.canSave) { + const error = new GitCmdError( + 'Your current environment does not allow saving content.', + ); + error.code = 405; + Backend.exitWithErrorResponse(error, res); + return false; + } + return true; + } + + getPath(req: Request) { + const prefixCount = this.prefix.split('/').filter(Boolean).length + 1; + logger.log(req.originalUrl); + return req.originalUrl + .replace(/\/*$/, '') + .replace(/^\/*/, '') + .split('/') + .splice(prefixCount) + .join('/'); + } + + static getPage(pagePath: string) { + return new Page(pagePath); + } + + // *** Middleware *** + // @todo: move middleware to a new folder + + getChanges(route: IRoute) { + route.get(async (req: Request, res) => { + try { + const status = await this.git.getChanges(); + res.send(status); + } catch (error: any) { + logger.log(error); + error.code = 500; + Backend.exitWithErrorResponse(error, res); + } + }); + } + + getConflicts(route: IRoute) { + route.get(async (req: Request, res: Response) => { + const { targetQs = undefined } = req.query; + + let target: string | undefined; + if (typeof targetQs === 'string') { + target = targetQs; + } + + try { + const conflicts = await this.git.getConflicts(target); + const pages = uniq( + conflicts.files + .filter((file) => file.search(this.pagePath) !== -1) + .map( + (file) => path + .dirname(file) + .replace(this.pagePath, '') + .replace(/^\/|\/$/g, '') || 'homepage', + ), + ); + const site = uniq( + conflicts.files + .filter((file) => file.search(this.defaultSitePath) !== -1) + .map( + (file) => path + .dirname(file) + .replace(this.defaultSitePath, '') + .replace(/^\/|\/$/g, '') || 'site', + ), + ); + res.send({ ...conflicts, pages, site }); + } catch (error: any) { + logger.log(error); + error.code = 500; + Backend.exitWithErrorResponse(error, res); + } + }); + } + + // @todo: remove static and convert cmd. + static getLatestCommits(route: IRoute) { + route.post(async (req: Request, res: Response) => { + try { + await GitCmd.cmd().add('fetch', '--all'); + const gitLog = await GitCmd.cmd() + .add('log', '--pretty=format:%H%n%ad%n%an%n%s%n') + .exec(); + res.send(gitLog); + } catch (error: any) { + res.send(error.info); + } + }); + } + + setChangeReset(route: IRoute) { + route.post(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + logger.log('Start reset'); + try { + // Clean up untracked files. + if (this.filePath && this.staticPath) { + // Clean up public folder. + const gitStatus = await GitCmd.cmd() + .add('status', '--porcelain', this.pagePath) + .exec(); + const gitRootRelPath = await GitCmd.cmd() + .add('rev-parse', '--show-cdup') + .exec(); + const reGetDeletedAndUntracked = /(?<= D |\?\? ).*/gm; + const deletedAndUntracked = gitStatus.stdout.match( + reGetDeletedAndUntracked, + ); + if (deletedAndUntracked !== null) { + const dataPagePath = path.join(this.filePath, 'pages'); + const obsoletePublicPages = deletedAndUntracked.map((gitPath) => { + const publicPagePath = gitPath.replace( + dataPagePath, + this.publicPath, + ); + // Get absolute path considering location of .git folder + return path.resolve(gitRootRelPath.stdout.trim(), publicPagePath); + }); + // Have to loop through every path since 'git clean' can work incorrectly when passing + // all the paths at once. + await Promise.all( + obsoletePublicPages.map( + async (gitPath) => GitCmd.cmd().add('clean', '-dfx').addFiles(gitPath).exec() + ), + ); + } + // Clean up data folder. + await Promise.all( + [this.filePath, this.staticPath].map( + async (gitPath) => GitCmd.cmd().add('clean', '-df').addFiles(gitPath).exec(), + ), + ); + } + // Discard changes in existing files. + const cleanExisting = await GitCmd.cmd() + .add('reset', '--hard', 'HEAD') + .exec(); + res.send({output: cleanExisting.stdout}); + } catch (error: any) { + // Need to inform user of merge operation fails. + Backend.exitWithErrorResponse(error, res); + } + }); + } + + setChangePull(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + logger.log('Start pull'); + new GitCommit() + .pull() + .then((data) => res.send({output: data.stdout})) + // Need to inform user of merge operation fails. + .catch((error) => Backend.exitWithErrorResponse(error, res)); + }); + } + + mergeMain(route: IRoute) { + route.post(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + try { + const status = await this.git.mergeMain(); + res.send(status); + } catch (error: any) { + logger.log(error); + error.code = 500; + Backend.exitWithErrorResponse(error, res); + } + }); + } + + // @todo: do we need this? + // static setChangeAmend(route: IRoute) { + // route.post((req: Request, res: Response) => { + // logger.log('Start amend'); + // logger.log(req.body.paths); + // GitCommit.commit() + // .addPaths(...req.body.paths) + // .amend() + // .then(data => res.send(data.stdout)) + // .catch(data => logger.log(data)); + // }); + // } + + setChangeCommit(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureCommitEnabled(res)) return; + logger.log(`Start committing: ${req.body.message}`); + const { author } = req.body; + const files = req.body.files || []; + const dirs = req.body.dirs || []; + new GitCommit() + .addDirectory(...dirs) + .addPaths(...req.body.paths) + .addFiles(...files) + .commit(`[CONTENT] ${req.body.message}`, author) + // .then(Git.cmd().add('push').exec()) + .then((data) => { + res.send({output: data.stdout}); + }) + // Need to inform user of merge operation fails. + .catch((error) => Backend.exitWithErrorResponse(error, res)); + }); + } + + // @todo: !!! do we need this? + // setChangePush(route: IRoute) { + // route.post((req: Request, res: Response) => { + // if (!this.ensureCommitEnabled(res)) return; + // logger.log('Start push'); + // new GitCmd() + // .add('symbolic-ref', '--short', 'HEAD') + // .exec() + // .then(data => { + // const branch = data.stdout.trim(); + // logger.log(`Branch = ${branch}`); + // GitCmd.cmd() + // .add('rebase', `origin/${branch}`) + // .exec() + // .then(() => { + // GitCmd.cmd() + // .add('push', 'origin', branch) + // .exec(); + // }) + // .then(addData => res.send(addData.stdout)) + // .catch(addData => logger.error(addData)); + // }) + // .catch(data => logger.log(data)); + // }); + // } + + static log(route: IRoute) { + route.post((req: Request, res: Response) => { + new Logger(req.body.id).print(req.body.message, req.body.severity); + res.send({status: 'success'}); + }); + } + + setAsset(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const baseResourcePath = this.getPath(req); + const tmpDir = tmp.dirSync({ + mode: 0o755, + unsafeCleanup: true, + prefix: 'backendTmpDir_', + }); + const form = formidable({ multiples: true, uploadDir: tmpDir.name }); + + form.parse(req, (err, fields, files) => { + const { nodePath } = fields; + copyAllFiles(files, baseResourcePath, nodePath as string) + .then((filesPath) => { + res.json({ filesPath }); + }) + .catch((copyErr) => { + console.log(copyErr); + res.send(copyErr); + }); + }); + }); + } + + static setSetCurrent(route: IRoute) { + route + .get((req: Request, res: Response) => { + logger.log('Start get current set'); + Git.getCurrent().then(data => res.send(data)); + }) + .post((req: Request, res: Response) => { + logger.log(`Start Post current Set:${req.body}`); + Git.setCurrent(req.body.name) + .then(() => Git.list()) + .then(data => { + res.send(data); + }) + .catch(reason => { + logger.log(reason); + }); + }); + } + + static setSetList(route: IRoute) { + route.get((req: Request, res: Response) => { + logger.log('Start Get Set List'); + Git.list().then((data) => res.send(data)); + }); + } + + setContent(route: IRoute) { + route + .get((req: Request, res: Response) => { + const page = Backend.getPage(this.getPath(req)); + logger.log(`Start get content for:${page.file}`); + page + .read() + .then((data) => { + res.send({data}); + }) + .catch(() => res.send({})); + }) + .post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + // @todo: refactor 2nd argument. + const page = Backend.getPage(this.getPath(req)); + logger.log(`Start post content for:${page.file}`); + page + .write(req.body) + .then((data) => { + logger.log('Sending', data); + res.send({data}); + }) + .catch((error: any) => { + logger.log(error); + res.status(500).send({error}); + }); + }) + .delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const page = Backend.getPage(this.getPath(req)); + logger.log(`Start deletion for:${page.file}`); + page + .delete() + .then((data) => { + logger.log('Sending', data); + res.send({data}); + }) + .catch((error: any) => { + logger.log(error); + res.status(501).send({error}); + }); + }); + } + + removePage(route: IRoute) { + route.delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + page.setBasePath(this.pagePath); + + logger.log(`Start deleting page:${page.directory}`); + + page.deleteDirectory().then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + removeFile(route: IRoute) { + route.delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + page.setBasePath(this.pagePath); + // @todo: remove hard coded path + const origin = `./src/data/pages/${pagePath}index.json`; + logger.log(`Start deleting file: ${origin}`); + + page.removeFile(origin).then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + directoryChild(route: IRoute) { + route.delete((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + + page.setBasePath(this.pagePath); + + logger.log(`Start verify page child directory: ${page.directory}`); + + page.hasChildDirectory().then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + directoryExists(route: IRoute) { + route.delete((req: Request, res: Response) => { + const pagePath = req.params[0]; + const page = Backend.getPage(pagePath); + + page.setBasePath(this.pagePath); + + logger.log(`Start verifying new page exists: ${page.directory}`); + + page.directoryExists(page.directory).then((error: any) => { + if (error) { + logger.log(error); + res.send(error); + } else { + res.send({}); + } + }); + }); + } + + setPages(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { body } = req; + const pagePath = body.path || ''; + const template = body.template || '_default'; + const filePath = path.join(pagePath, 'index'); + const pageContent = { + '#template': template, + }; + const page = Backend.getPage(filePath); + page.setBasePath(this.pagePath); + logger.log(`Start creating page for:${page.file}`); + if (page.exists) { + res.status(409); + res.send({error: `Error: page ${pagePath} already exists`}); + return; + } + page + .write(pageContent) + .then((data: any) => { + logger.log('Sending', data); + res.status(201); + res.send(data); + }) + .catch((reason) => { + logger.log(reason); + res.send({}); + }); + }); + } + + clonePage(route: IRoute) { + route.post(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { + body: { origin, destination }, + } = req; + const page = Backend.getPage(destination); + page.setBasePath(this.pagePath); + + logger.log(`Start cloning page for:${destination}`); + + page + .copyDirectory(origin, destination) + .then((data) => { + if (data) { + logger.log(JSON.stringify(data)); + res.send(data); + } else { + res.send({}); + } + }) + .catch((reason) => { + res.status(500).send({err: `${reason}`}); + }); + }); + } + + removeAssets(route: IRoute) { + route.delete(async (req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const origin = req.params[0]; + const page = Backend.getPage(origin); + + logger.log(`Start removing assets for:${origin}`); + + const originPath = origin.replace(/\/$/, ''); + const originStaticPath = path.join( + this.staticPath, + '/images/pages', + originPath, + ); + + page.removePageAssets(originStaticPath).then((error) => { + if (error) { + logger.log(error); + res.send({error}); + } else { + res.send({}); + } + }); + }); + } + + copyAssets(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { + body: { path_from: pathFrom, path_to: pathTo }, + } = req; + const assetStaticPathFrom = path.join(this.staticPath, pathFrom); + const assetStaticPathTo = path.join(this.staticPath, pathTo); + logger.log( + `Copy assets from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`, + ); + try { + copyFile(assetStaticPathFrom, assetStaticPathTo); + setTimeout(() => { + res.send({ status: 'success' }); + }, 500); + } catch (error: any) { + logger.log(error); + res.status(500).send(error); + } + }); + } + + moveAssets(route: IRoute) { + route.post((req: Request, res: Response) => { + if (!this.ensureSaveEnabled(res)) return; + const { + body: { path_from: pathFrom, path_to: pathTo }, + } = req; + const assetStaticPathFrom = path.join(this.staticPath, pathFrom); + const assetStaticPathTo = path.join(this.staticPath, pathTo); + logger.log( + `Move asset from: ${assetStaticPathFrom} to ${assetStaticPathTo}, cwd: ${process.cwd()}`, + ); + try { + moveFile(assetStaticPathFrom, assetStaticPathTo); + setTimeout(() => { + res.send({ status: 'success' }); + }, 500); + } catch (error: any) { + logger.log(error); + res.status(500).send(error); + } + }); + } + + static sanitizeOutput(data: string) { + return data.replace(/(http|https):\/\/[^@]+:[^@]+@/gi, '$1://****:****@'); + } + + start(port: string | number) { + logger.log('Start'); + this.app.listen(port, () => logger.log(`Backend listening on Port: ${port}`)); + } +} + +export default Backend; diff --git a/packages/bodiless-backend/src/fileHelper.js b/packages/bodiless-backend/src/fileHelper.ts similarity index 65% rename from packages/bodiless-backend/src/fileHelper.js rename to packages/bodiless-backend/src/fileHelper.ts index 5be924b6d5..fbe0470f65 100644 --- a/packages/bodiless-backend/src/fileHelper.js +++ b/packages/bodiless-backend/src/fileHelper.ts @@ -12,24 +12,26 @@ * limitations under the License. */ -const fs = require('fs'); -const fse = require('fs-extra'); -const path = require('path'); -const MD5 = require('crypto-js/md5'); +import fs from 'fs'; +import fse from 'fs-extra'; +import path from 'path'; +import MD5 from 'crypto-js/md5'; +import type { Files, File } from 'formidable'; -const backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; +const getStaticPath = () => (process.env.BODILESS_BACKEND_STATIC_PATH || ''); -const copyFilePromise = (from, to) => new Promise((resolve, reject) => { +const copyFilePromise = (from: string, to: string) => new Promise((resolve, reject) => { fs.copyFile(from, to, copyErr => { if (copyErr) reject(copyErr); fs.unlinkSync(from); + const backendStaticPath = getStaticPath(); resolve(`/${path.relative(backendStaticPath, to)}`); }); }); -const generateHash = str => MD5(str).toString(); +const generateHash = (str: string) => MD5(str).toString(); -const isImage = fileType => { +const isImage = (fileType: string) => { const imageFileTypes = [ 'image/jpeg', 'image/png', @@ -40,12 +42,19 @@ const isImage = fileType => { return imageFileTypes.includes(fileType); }; -const copyAllFiles = (files, baseResourcePath, nodePath) => { - const allFiles = []; - Object.keys(files).forEach(key => allFiles.push(files[key])); +const copyAllFiles = (files: Files, baseResourcePath: string, nodePath: string) => { + const allFiles: File[] = []; + Object.keys(files).forEach(key => { + if (Array.isArray(files[key])) { + allFiles.push(...files[key] as File[]); + } else { + allFiles.push(files[key] as File); + } + }); - return Promise.all(allFiles.map(file => { - const baseDir = isImage(file.type) ? 'images' : 'files'; + return Promise.all(allFiles.map((file: File) => { + const baseDir = isImage(file.type || '') ? 'images' : 'files'; + const backendStaticPath = getStaticPath(); const distFolderPath = path.join( backendStaticPath, baseDir, @@ -57,7 +66,7 @@ const copyAllFiles = (files, baseResourcePath, nodePath) => { fs.mkdirSync(distFolderPath, { recursive: true }); } - return copyFilePromise(file.path, path.join(distFolderPath, file.name)); + return copyFilePromise(file.path, path.join(distFolderPath, file.name || '')); })); }; @@ -71,10 +80,10 @@ const copyAllFiles = (files, baseResourcePath, nodePath) => { * @param pathFrom string - source file path * @param pathTo string - destination file path */ -const copyFile = (pathFrom, pathTo) => { +const copyFile = (pathFrom: string, pathTo: string) => { try { fse.copySync(pathFrom, pathTo); - } catch (err) { + } catch (err: any) { throw new Error(`Failed to copy file from ${pathFrom} to ${pathTo}: ${err.message}`); } }; @@ -85,15 +94,15 @@ const copyFile = (pathFrom, pathTo) => { * @param pathFrom string - source file path * @param pathTo string - destination file path */ -const moveFile = (pathFrom, pathTo) => { +const moveFile = (pathFrom: string, pathTo: string) => { try { fse.moveSync(pathFrom, pathTo, { overwrite: true }); - } catch (err) { + } catch (err: any) { throw new Error(`Failed to move file from ${pathFrom} to ${pathTo}: ${err.message}`); } }; -module.exports = { +export { copyAllFiles, moveFile, copyFile, diff --git a/packages/bodiless-backend/src/GitCmd.js b/packages/bodiless-backend/src/gitCmd.ts similarity index 69% rename from packages/bodiless-backend/src/GitCmd.js rename to packages/bodiless-backend/src/gitCmd.ts index 51090e17c3..b8524fc1cf 100644 --- a/packages/bodiless-backend/src/GitCmd.js +++ b/packages/bodiless-backend/src/gitCmd.ts @@ -12,14 +12,36 @@ * limitations under the License. */ -const { spawn } = require('child_process'); -const Logger = require('./logger'); +import { spawn } from 'child_process'; +import type { SpawnOptionsWithoutStdio } from 'child_process'; +import Logger from './logger'; const logger = new Logger('BACKEND'); + +export type GitInfoType = { + stdout: string, + stderr: string, + code: number | null; +}; + +export class GitCmdError extends Error { + code: number | null = 0; + + info?: GitInfoType; +} + /* This Class wraps spawn and lets us build out git commands with standard responses */ class GitCmd { + cmd: string = ''; + + params: string[] = []; + + files: string[] = []; + + options?: SpawnOptionsWithoutStdio; + constructor() { this.cmd = 'git'; this.params = []; @@ -27,17 +49,17 @@ class GitCmd { this.options = {}; } - add(...params) { + add(...params: string[]) { this.params.push(...params); return this; } - set(options) { + set(options: SpawnOptionsWithoutStdio) { this.options = { ...this.options, ...options }; return this; } - addFiles(...files) { + addFiles(...files: string[]) { this.files.push(...files); // const rawFiles = [...arguments] // this.files.push(...rawFiles.map((file) => file.replace(/ /,'\ '))) @@ -46,12 +68,17 @@ class GitCmd { spawn() { const args = [...this.params, ...this.files]; - logger.log([`Spawning command: ${this.cmd}`, ...args, Date.now(), process.cwd()]); + logger.log( + `Spawning command: ${this.cmd}`, + ...args, + Date.now().toString(), + process.cwd() + ); return spawn(this.cmd, args, this.options); } exec() { - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { const cmd = this.spawn(); let stderr = ''; let stdout = ''; @@ -67,7 +94,11 @@ class GitCmd { stderr += err.message; }); cmd.on('close', code => { - logger.log(stdout, stderr, code); + logger.log( + stdout, + stderr, + `${code || 0}`, + ); if (code === 0) { resolve({ stdout, stderr, code }); return; @@ -78,8 +109,8 @@ class GitCmd { return; } - const error = new Error(`${stderr}`); - error.code = `${code}`; + const error = new GitCmdError(`${stderr}`); + error.code = code; error.info = { stdout, stderr, code }; reject(error); }); @@ -91,4 +122,4 @@ class GitCmd { } } -module.exports = GitCmd; +export default GitCmd; diff --git a/packages/bodiless-backend/src/logger.js b/packages/bodiless-backend/src/logger.ts similarity index 83% rename from packages/bodiless-backend/src/logger.js rename to packages/bodiless-backend/src/logger.ts index 816b32fee3..1c4a83e8a4 100644 --- a/packages/bodiless-backend/src/logger.js +++ b/packages/bodiless-backend/src/logger.ts @@ -14,11 +14,13 @@ */ class Logger { - constructor(prefix) { + prefix: string = ''; + + constructor(prefix: string) { this.prefix = `[${prefix.toUpperCase()}]`; } - print(message, severity) { + print(message: string[], severity?: string) { const fullMessage = [this.prefix, new Date().toISOString(), ...message]; switch (severity) { case 'error': @@ -32,17 +34,17 @@ class Logger { } } - log(...args) { + log(...args: string[]) { this.print(args); } - error(...args) { + error(...args: string[]) { this.print(args, 'error'); } - warn(...args) { + warn(...args: string[]) { this.print(args, 'warn'); } } -module.exports = Logger; +export default Logger; diff --git a/packages/bodiless-backend/src/page.js b/packages/bodiless-backend/src/page.ts similarity index 66% rename from packages/bodiless-backend/src/page.js rename to packages/bodiless-backend/src/page.ts index 29d67c16ff..f4347c3750 100644 --- a/packages/bodiless-backend/src/page.js +++ b/packages/bodiless-backend/src/page.ts @@ -12,25 +12,23 @@ * limitations under the License. */ -const fs = require('fs'); -const fse = require('fs-extra'); -const path = require('path'); -const os = require('os'); -const replace = require('replace-in-file'); -const Logger = require('./logger'); +import fs from 'fs'; +import fse from 'fs-extra'; +import path from 'path'; +import os from 'os'; +import replace from 'replace-in-file'; +import type { Dirent, PathLike } from 'fs'; +import type { ReplaceInFileConfig, ReplaceResult } from 'replace-in-file'; +import Logger from './logger'; const logger = new Logger('BACKEND'); -const backendFilePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; -const backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; -const IMG_ASSETS_PATH = `/images${path.sep}pages`; - -const getDirectories = (dir) => fs +const getDirectories = (dir: string) => fs .readdirSync(dir) .filter((file) => fs.statSync(`${dir}/${file}`).isDirectory()); -// once we on node > 10.12.0 +// @todo: update to fs.mkdir - once we on node > 10.12.0 // we can leverage fs.mkdir since it supports { recursive: true } -function ensureDirectoryExistence(filePath) { +function ensureDirectoryExistence(filePath: string): void { const dirname = path.dirname(filePath); if (fs.existsSync(dirname)) { return; @@ -40,22 +38,34 @@ function ensureDirectoryExistence(filePath) { } class Page { - supportedExtensions = ['json', 'tsx', 'jsx', 'js']; + path: string = ''; + + basePath: string = ''; + + backendFilePath: string; + + backendStaticPath: string = process.env.BODILESS_BACKEND_STATIC_PATH || ''; - constructor(pagePath) { + static imgAssetsPath: string = `/images${path.sep}pages`; + + extensions = ['json', 'tsx', 'jsx', 'js']; + + constructor(pagePath: string) { this.path = pagePath; + this.backendFilePath = process.env.BODILESS_BACKEND_DATA_FILE_PATH || ''; + this.backendStaticPath = process.env.BODILESS_BACKEND_STATIC_PATH || ''; } getBasePath() { - return this.basePath || backendFilePath; + return this.basePath || this.backendFilePath; } - setBasePath(basePath) { + setBasePath(basePath: string) { this.basePath = basePath; } get supportedExtensions() { - return this.supportedExtensions; + return this.extensions; } get exists() { @@ -74,86 +84,94 @@ class Page { } read() { - const readPromise = new Promise((resolve) => { - fs.readFile(this.file, (err, data) => { - if (err) logger.log(err); - resolve(data || {}); - }); + return new Promise((resolve) => { + fs.readFile( + this.file, + 'utf8', + (err: NodeJS.ErrnoException | null, data: string) => { + if (err) logger.log(err.message); + resolve(data); + } + ); }); - return readPromise; } - write(data) { - const readPromise = new Promise((resolve, reject) => { + write(data: any) { + return new Promise((resolve, reject) => { ensureDirectoryExistence(this.file); fs.writeFile(this.file, JSON.stringify(data, null, 2), (err) => { if (err) { reject(err); } - resolve(this); + resolve('ok'); }); }); - return readPromise; } delete() { - const readPromise = new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { ensureDirectoryExistence(this.file); fs.unlink(this.file, (err) => { if (err) { reject(err); } - resolve(this); + resolve('ok'); }); }); - return readPromise; } - static dirHasSubObjects(dirPath, objType) { - return new Promise((resove) => { + static dirHasSubObjects(dirPath: string, objType?: string) { + // @todo: add reject + return new Promise((resolve) => { try { - fs.readdir(dirPath, { withFileTypes: true }, (err, files) => { - if (err) { - return resove([]); - } - - const filteredObjects = files.filter((item) => { - if (objType === 'file') { - return item.isFile(); - } - if (objType === 'directory') { - return item.isDirectory(); + fs.readdir( + dirPath, + { withFileTypes: true }, + (err: NodeJS.ErrnoException | null, files: Dirent[]) => { + if (err) { + return resolve([]); } - return true; - }); - if (!filteredObjects.length) { - return resove([]); + const filteredObjects = files.filter((item) => { + if (objType === 'file') { + return item.isFile(); + } + if (objType === 'directory') { + return item.isDirectory(); + } + return true; + }); + + if (!filteredObjects.length) { + return resolve([]); + } + return resolve(filteredObjects); } - return resove(filteredObjects); - }); + ); } catch (error) { - resove([]); + resolve([]); } }); } - static dirHasFiles(dirPath) { + static dirHasFiles(dirPath: string): Promise { return Page.dirHasSubObjects(dirPath, 'file'); } - static dirHasDirectories(dirPath) { + static dirHasDirectories(dirPath: string) { return Page.dirHasSubObjects(dirPath, 'directory'); } - static rmDirectories(destinationPath, dirPaths) { - const dels = []; + static rmDirectories(destinationPath: string, dirPaths: Dirent[]) { + const dels: Promise[] = []; dirPaths.forEach((dir) => { dels.push( - new Promise((resove) => { - fse.remove(`${destinationPath}/${dir.name}`, (err) => { - if (err) return console.error(err); - return resove(); + new Promise((resolve, reject) => { + fse.remove(`${destinationPath}/${dir.name}`, (err: any) => { + if (err) { + return reject(err); + } + return resolve('ok'); }); }), ); @@ -161,18 +179,22 @@ class Page { return Promise.resolve(Promise.all(dels)); } - static jsFilesPathResolve(originPath, destinationPath, files) { - const actions = []; + static jsFilesPathResolve( + originPath: string, + destinationPath: string, + files: Dirent[] + ) { + const actions: Promise[] = []; const reg = /from ('|")(\.\..*)('|")/g; - const readF = (file) => new Promise((resolve, reject) => { + const readF = (file: Dirent) => new Promise((resolve, reject) => { const filePath = `${destinationPath}/${file.name}`; fs.readFile(filePath, 'utf8', (err, content) => { if (err) return reject(); - const matchs = content.match(reg); - if (!matchs.length) return reject(); + const matches = content.match(reg); + if (!matches?.length) return reject(); let newContent = content; - matchs.forEach((item) => { + matches.forEach((item) => { const delimiter = item[item.search(/'|"/)]; const oldPath = item.split(' ')[1].replace(/'|"/g, ''); const from = path.dirname(filePath); @@ -186,7 +208,7 @@ class Page { }); fs.writeFile(filePath, newContent, (writeErr) => { if (writeErr) return reject(); - return resolve(); + return resolve('ok'); }); return true; }); @@ -199,19 +221,19 @@ class Page { return Promise.resolve(Promise.all(actions)); } - async copyDirectory(origin, destination) { - const bp = this.basePath; - const originPath = `${bp}${origin}`.replace(/\/$/, ''); - const destinationPath = `${bp}${destination}`.replace(/\/$/, ''); - + async copyDirectory(origin: string, destination: string): Promise { + const {basePath} = this; + const originPath = path.resolve(basePath, origin).replace(/\/$/, ''); + const destinationPath = path.resolve(basePath, destination).replace(/\/$/, ''); const isDestinationPathExists = await Page.dirHasFiles(destinationPath); + if (isDestinationPathExists.length) { - return Promise.reject(new Error(`page ${destination} already exists`)); + return Promise.reject(new Error(`page ${destinationPath} already exists`)); } const isOriginPathExists = await Page.dirHasFiles(originPath); if (!isOriginPathExists.length) { - return Promise.reject(new Error(`page ${origin} is not exists`)); + return Promise.reject(new Error(`page ${originPath} does not exist`)); } // Make sure the destination tree exist @@ -222,10 +244,10 @@ class Page { isOriginPathExists.map((file) => { const from = `${originPath}/${file.name}`; const to = `${destinationPath}/${file.name}`; - return new Promise((resove, reject) => { + return new Promise((resolve, reject) => { fse.copy(from, to, (err) => { if (err) return reject(err); - return resove(); + return resolve('ok'); }); }); }), @@ -249,16 +271,25 @@ class Page { } // Clone Image assets - Page.clonePageImgAssets(origin, destination, this.basePath); + this.clonePageImgAssets(origin, destination, this.basePath); - return 'success'; + return {status: 'ok'}; } - static clonePageImgAssets(origin, destination, basePath) { - Page.clonePageAssets(origin, destination, basePath, IMG_ASSETS_PATH); + clonePageImgAssets( + origin: string, + destination: string, + basePath: string + ) { + this.clonePageAssets(origin, destination, basePath, Page.imgAssetsPath); } - static async clonePageAssets(origin, destination, basePath, target) { + async clonePageAssets( + origin: string, + destination: string, + basePath: string, + target: string + ) { const originPath = origin.replace(/\/$/, ''); const originPathCrossPlatform = os.platform() === 'win32' ? originPath.replace('/', '\\\\') @@ -268,9 +299,9 @@ class Page { ? destinationPath.replace('/', '\\') : destinationPath; - const originStaticPath = path.join(backendStaticPath, target, originPath); + const originStaticPath = path.join(this.backendStaticPath, target, originPath); const destinationStaticPath = path.join( - backendStaticPath, + this.backendStaticPath, target, destinationPath, ); @@ -305,7 +336,7 @@ class Page { return new Promise((resolve, reject) => { fse.copy(fromPath, toPath, (err) => { if (err) return reject(err); - return resolve(); + return resolve('ok'); }); }); }), @@ -322,7 +353,7 @@ class Page { const fileToBeUpdated = path.join(destinationPagePath, item); // Make sure to not replace '/images/pages' part of the path // .e.g if the source page path is '/images'; - const imgAssetsPath = IMG_ASSETS_PATH.replace('\\', '\\\\\\\\'); + const imgAssetsPath = Page.imgAssetsPath.replace('\\', '\\\\\\\\'); // If homepage, originPathCrossPlatform will be empty, so this // template string handles both cases: // - '/images/pages' for homepage (/) @@ -332,10 +363,10 @@ class Page { // - '/images/pages/example2' const toPath = `${imgAssetsPath}${destinationPathCrossPlatform}`; - const options = { + const options: ReplaceInFileConfig = { files: fileToBeUpdated, from: fromPath, - to: match => match.replace(fromPath, toPath), + to: (match) => match.replace(fromPath, toPath), }; return Page.updateFileContent(options); }), @@ -343,20 +374,20 @@ class Page { } } } - } catch (err) { + } catch (err: any) { if (err) logger.log(err); } return 'success'; } - static updateFileContent(options) { - return new Promise((resolve) => { + static updateFileContent(options: ReplaceInFileConfig) { + return new Promise((resolve, reject) => { try { - replace(options, (error, results) => { + replace.replaceInFile(options, (error: Error, results: ReplaceResult[]) => { if (error) { - return resolve([]); + reject(error); } - return resolve(results); + resolve(results); }); } catch (error) { resolve([]); @@ -364,7 +395,7 @@ class Page { }); } - directoryExists(newDirectory) { + directoryExists(newDirectory: PathLike) { const readPromise = new Promise((resolve) => { fs.access(newDirectory, err => { if (!err) { @@ -376,7 +407,7 @@ class Page { return readPromise; } - removeFile(origin) { + removeFile(origin: string) { const readPromise = new Promise((resolve, reject) => { fs.unlink(origin, err => { if (err) { @@ -415,8 +446,8 @@ class Page { hasChildDirectory() { const readPromise = new Promise((resolve) => { - const subdirs = getDirectories(this.directory); - if (subdirs.length !== 0) { + const subDirs = getDirectories(this.directory); + if (subDirs.length !== 0) { resolve( 'The page cannot be deleted it has child pages. To delete this page, first delete or move all child pages, and retry.', ); @@ -428,16 +459,16 @@ class Page { } // eslint-disable-next-line class-methods-use-this - removePageAssets(path) { - return new Promise((resolve, reject) => { + removePageAssets(path: string) { + return new Promise((resolve, reject) => { fse.remove(path, err => { if (err) { reject(err); } - resolve(); + resolve('ok'); }); }); } } -module.exports = Page; +export default Page; diff --git a/packages/bodiless-backend/src/server.js b/packages/bodiless-backend/src/server.ts similarity index 84% rename from packages/bodiless-backend/src/server.js rename to packages/bodiless-backend/src/server.ts index fe13514382..7c91a57366 100644 --- a/packages/bodiless-backend/src/server.js +++ b/packages/bodiless-backend/src/server.ts @@ -12,15 +12,14 @@ * limitations under the License. */ +import dotenv from 'dotenv'; +import Backend from './backend'; + // Use the same .env file as gatsby develop. -require('dotenv').config({ +dotenv.config({ path: '.env.development', }); -const express = require('express'); -const Backend = require('./backend'); - const backendPort = process.env.BODILESS_BACKEND_PORT || 8001; - -const backend = new Backend(express()); +const backend = new Backend(); backend.start(backendPort); diff --git a/packages/bodiless-backend/src/git.js b/packages/bodiless-backend/src/tools/git.ts similarity index 82% rename from packages/bodiless-backend/src/git.js rename to packages/bodiless-backend/src/tools/git.ts index 7d3b437cdb..828b8e55f5 100644 --- a/packages/bodiless-backend/src/git.js +++ b/packages/bodiless-backend/src/tools/git.ts @@ -11,14 +11,43 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -const path = require('path'); -const util = require('util'); -const os = require('os'); -const rimraf = require('rimraf'); -const { v1 } = require('uuid'); -const copyfiles = require('copyfiles'); -const GitCmd = require('./GitCmd'); -const Logger = require('./logger'); +import path from 'path'; +import util from 'util'; +import os from 'os'; +import rimraf from 'rimraf'; +import { v1 } from 'uuid'; +import copyfiles from 'copyfiles'; +import GitCmd, { GitInfoType } from '../gitCmd'; +import Logger from '../logger'; + +export type GitBranchInfo = { + branch: string | null, + commits: string[], + files: string[], +}; + +export type GitChanges = { + upstream: GitBranchInfo, + production: GitBranchInfo, + local: GitBranchInfo, +}; + +export type GitConflictInfo = { + hasConflict: boolean, + files: string[], + target: string, +}; + +export type GitUtil = { + getCurrentBranch: () => Promise, + getUpstreamBranch: (branch: string, remote?: string) => Promise, + getUpstreamTrackingBranch: (branch: string) => Promise, + getChanges: () => Promise, + getConflicts: (branch?: string) => Promise, + getMergeBase: (b1: string, b2: string) => Promise, + compare: (show: string, comparedTo: string) => Promise<{commits: string[], files: string[]}>, + mergeMain: () => Promise<{}>, +}; /** * Returns the name of the current branch as a string. @@ -32,13 +61,13 @@ const getCurrentBranch = async () => { * Verify the existence of an upstream branch. * @todo: replace with getUpstreamTrackingBranch? */ -const getUpstreamBranch = async (branch, remote = 'origin') => { +const getUpstreamBranch = async (branch: string, remote = 'origin'): Promise => { try { await GitCmd.cmd().add('ls-remote', '--heads', '--exit-code', remote, branch).exec(); return `${remote}/${branch}`; - } catch (e) { + } catch (e: any) { // Catch only the error where the upstream branch doesn't exist. - if (e.code === '2') return undefined; + if (e.code === 2) return ''; throw e; } }; @@ -48,7 +77,7 @@ const getUpstreamBranch = async (branch, remote = 'origin') => { * * @param {string} local branch name. */ -const getUpstreamTrackingBranch = async branch => { +const getUpstreamTrackingBranch = async (branch: string) => { const result = await GitCmd.cmd() .add('for-each-ref', '--format="%(upstream:short)"', `refs/heads/${branch}`) .exec(); @@ -58,15 +87,15 @@ const getUpstreamTrackingBranch = async branch => { /** * Returns the merge-base between two branches. */ -const getMergeBase = async (a, b) => { +const getMergeBase = async (a: string, b: string) => { const mergeBase = await GitCmd.cmd() .add('merge-base', a, b) .exec(); return mergeBase.stdout.trim(); }; -const getGitCmdOutputString = result => result.stdout.trim().replace('\n$', ''); -const getGitCmdOutputArray = result => ( +const getGitCmdOutputString = (result: GitInfoType) => result.stdout.trim().replace('\n$', ''); +const getGitCmdOutputArray = (result: GitInfoType) => ( result.stdout.trim().split('\n').map(l => l.trim()).filter(l => l.length > 0) ); @@ -77,7 +106,7 @@ const getGitCmdOutputArray = result => ( * @param show The branch whose commits to show. * @param comparedTo The branch to compare it to. */ -const compare = async (show, comparedTo) => { +const compare = async (show: string, comparedTo: string) => { const mergeBase = await getMergeBase(show, comparedTo); const commitsPromise = GitCmd.cmd() .add('rev-list', '--oneline', '--left-only', `${show}...${comparedTo}`) @@ -150,7 +179,7 @@ const getChanges = async () => { }, }; return status; - } catch (e) { + } catch (e: any) { throw new Error(`Error occurred: ${e.message}`); } }; @@ -161,7 +190,7 @@ const getChanges = async () => { * @param {string} url - Repo url. * @param {array} options - Clone options [branch|directory]. */ -const clone = async (url, options = {}) => { +const clone = async (url: string, options: {branch?: string, directory?: string} = {}) => { const logger = new Logger('BACKEND'); let result = await GitCmd.cmd().add('config', '--get', 'user.name').exec(); const configName = result.stdout.trim().replace('\n', ''); @@ -173,7 +202,7 @@ const clone = async (url, options = {}) => { cmd.add('--config', `user.name=${configName}`); if (options.branch) cmd.add('-b', options.branch); if (options.directory) cmd.add(options.directory); - logger.log([`Clone to path: ${options.directory}`]); + logger.log(`Clone to path: ${options?.directory}`); return cmd.exec(); }; @@ -185,7 +214,7 @@ const clone = async (url, options = {}) => { * * @return {object} Results. */ -const getConflicts = async (target = 'upstream') => { +const getConflicts = async (target: string = 'upstream') => { // const remoteUrl = await getRemote('origin'); const logger = new Logger('BACKEND'); const tmpDir = path.resolve(process.env.BODILESS_BACKEND_TMP || os.tmpdir(), v1()); @@ -201,12 +230,12 @@ const getConflicts = async (target = 'upstream') => { const rootResult = await GitCmd.cmd().add('rev-parse', '--show-toplevel').exec(); const rootDir = getGitCmdOutputString(rootResult); - logger.log([`Repo root: ${rootDir}`]); + logger.log(`Repo root: ${rootDir}`); let workBranch = ''; let targetBranch = ''; let uncommittedResult; - let files = []; + let files: string[] = []; switch (target) { case 'edit': targetBranch = `origin-${branch}`; @@ -249,14 +278,17 @@ const getConflicts = async (target = 'upstream') => { await clone(rootDir, { directory: tmpDir, branch: targetBranch }); process.chdir(tmpDir); - const copyfilesPromised = util.promisify(copyfiles); + const copyfilesPromised = util.promisify(copyfiles); if (files.length) { - logger.log([`Copy Files: ${files} ${tmpDir}`, process.cwd()]); + logger.log(`Copy Files: ${files} ${tmpDir}`, process.cwd()); try { const result = await copyfilesPromised( [...files, tmpDir], - { error: true, up: (rootDir.match(/\//g) || []).length + 1 }, + { + error: true, + up: (rootDir.match(/\//g) || []).length + 1 + }, ); logger.log(`Result: ${result}`); @@ -266,12 +298,12 @@ const getConflicts = async (target = 'upstream') => { await GitCmd.cmd() .add('commit', '-m', 'TEMPORARY COMMIT') .exec(); - } catch (e) { + } catch (e: any) { logger.error(e); } } - let conflictFiles = []; + let conflictFiles: string[] = []; try { await GitCmd.cmd() .add('merge', '--no-commit', '--no-ff', 'origin/origin-main') @@ -356,7 +388,7 @@ const mergeMain = async () => { await GitCmd.cmd() .add('push') .exec(); - } catch (e) { + } catch (e: any) { logger.error(e); } @@ -371,7 +403,7 @@ const mergeMain = async () => { return {}; }; -module.exports = { +const gitUtil: GitUtil = { getCurrentBranch, getUpstreamBranch, getUpstreamTrackingBranch, @@ -381,3 +413,5 @@ module.exports = { compare, mergeMain, }; + +export default gitUtil; diff --git a/packages/bodiless-backend/tsconfig.json b/packages/bodiless-backend/tsconfig.json new file mode 100644 index 0000000000..b8867aff8d --- /dev/null +++ b/packages/bodiless-backend/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.settings.json", + "compilerOptions": { + "rootDir": "src", + "module": "CommonJS", + "outDir": "lib", + "baseUrl": "." + }, + "include": [ + "src" + ], + "exclude": [ + "lib", + "bin", + "node_modules", + "__tests__/**/*.*" + ] +} diff --git a/playwright/pages/base-page.ts b/playwright/pages/base-page.ts index e79199e1ab..110b992ad3 100644 --- a/playwright/pages/base-page.ts +++ b/playwright/pages/base-page.ts @@ -162,8 +162,8 @@ export class BasePage { async isImageVisible(imageXpath: string) { expect(await this.page.locator(imageXpath).isVisible()).toBeTruthy(); - const imageDimentions = await this.page.locator(imageXpath).boundingBox(); - expect(imageDimentions!.width).toBeGreaterThan(0); - expect(imageDimentions!.height).toBeGreaterThan(0); + const imageDimensions = await this.page.locator(imageXpath).boundingBox(); + expect(imageDimensions!.width).toBeGreaterThan(0); + expect(imageDimensions!.height).toBeGreaterThan(0); } } diff --git a/playwright/tests/smoke-deprecated/editorMenu.spec.ts b/playwright/tests/smoke-deprecated/editorMenu.spec.ts index ed3e8a6503..689e26b0c5 100644 --- a/playwright/tests/smoke-deprecated/editorMenu.spec.ts +++ b/playwright/tests/smoke-deprecated/editorMenu.spec.ts @@ -12,8 +12,10 @@ * limitations under the License. */ // editorMenu.spec.ts -/* eslint-disable jest/expect-expect */ -import { expect, Page, test } from '@playwright/test'; + +import { + expect, Page, test, BrowserContext +} from '@playwright/test'; import { EditorMenuPage } from '../../pages/editor-menu-page'; async function checkEditorMenuButtons(page: Page, editorMenuPage: EditorMenuPage) { @@ -34,7 +36,7 @@ async function checkAddNewPageButton(page: Page, editorMenuPage: EditorMenuPage) test.describe('Editor Menu (left and right)', () => { let page: Page; - let context:any; + let context: BrowserContext; let editorMenuPage: EditorMenuPage; test.beforeAll(async ({ browser }) => { context = await browser.newContext(); diff --git a/playwright/tests/smoke-deprecated/flowContainer.spec.ts b/playwright/tests/smoke-deprecated/flowContainer.spec.ts index 8538f11871..fab08bda04 100644 --- a/playwright/tests/smoke-deprecated/flowContainer.spec.ts +++ b/playwright/tests/smoke-deprecated/flowContainer.spec.ts @@ -103,7 +103,7 @@ test.describe('Flow container', async () => { ]); const containerWidth = await page.locator(flowContainerPage.flowContainer33Width).boundingBox(); const contentfulWidth = await page.locator(flowContainerPage.elementInside33Width).boundingBox(); - const ratio = Math.floor(((contentfulWidth.width + 40) / containerWidth.width) * 100); + const ratio = Math.floor((((contentfulWidth?.width || 0) + 40) / (containerWidth?.width || 0)) * 100); expect.soft(ratio).toBeCloseTo(32); }); diff --git a/tsconfig.settings.json b/tsconfig.settings.json index 84a58419dc..0ce25feb2b 100644 --- a/tsconfig.settings.json +++ b/tsconfig.settings.json @@ -27,6 +27,8 @@ "--vital--/*": ["__vital__/src/*"], "@bodiless/accordion": ["bodiless-accordion/src"], "@bodiless/accordion/*": ["bodiless-accordion/src/*"], + "@bodiless/backend": ["bodiless-backend/src"], + "@bodiless/backend/*": ["bodiless-backend/src/*"], "@bodiless/bv": ["bodiless-bv/src"], "@bodiless/bv/*": ["bodiless-bv/src/*"], "@bodiless/card": ["bodiless-card/src"],