diff --git a/.eslintrc b/.eslintrc index f77da8144e9c..c507546dbfef 100644 --- a/.eslintrc +++ b/.eslintrc @@ -312,6 +312,12 @@ "rules": { "max-len": "off" } + }, + { + "files": ["scripts/**/*"], + "rules": { + "import/no-extraneous-dependencies": "off" + } } ] -} +} \ No newline at end of file diff --git a/jest.config.js b/jest.config.js index 1e24444efd95..45bc1b4ace7e 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,10 +1,15 @@ -module.exports = { +const config = { + transform: { + '^.+\\.[t|j]sx?$': ['babel-jest', { configFile: './tests/babel.test.config.cjs' }] + }, verbose: true, collectCoverage: true, coverageReporters: ['text', 'lcov', 'json-summary'], coverageDirectory: 'coverage', - collectCoverageFrom: ['scripts/**/*.js'], - coveragePathIgnorePatterns: ['scripts/compose.js', 'scripts/tools/categorylist.js', 'scripts/tools/tags-color.js'], - // To disallow netlify edge function tests from running - testMatch: ['**/tests/**/*.test.*', '!**/netlify/**/*.test.*'] + collectCoverageFrom: ['scripts/**/*.ts'], + coveragePathIgnorePatterns: ['scripts/compose.ts', 'scripts/tools/categorylist.ts', 'scripts/tools/tags-color.ts'], + testMatch: ['**/tests/**/*.test.*', '!**/netlify/**/*.test.*'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json'] }; + +export default config; diff --git a/next-i18next.config.cjs b/next-i18next.config.cjs new file mode 100644 index 000000000000..6ee70f95d5fe --- /dev/null +++ b/next-i18next.config.cjs @@ -0,0 +1,13 @@ +// The file is required to be named next-i18next.config.cjs so we can use it in next.config.js. +// https://github.com/i18next/next-i18next/issues/2185#issuecomment-1618307556 +process.env.I18NEXT_DEFAULT_CONFIG_PATH = './next-i18next.config.cjs'; + +module.exports = { + i18n: { + locales: ['en', 'de'], + defaultLocale: 'en', + namespaces: ['landing-page', 'common', 'tools'], + defaultNamespace: 'landing-page', + react: { useSuspense: false } // this line + } +}; diff --git a/next-i18next.config.js b/next-i18next.config.js deleted file mode 100644 index 2848266d6554..000000000000 --- a/next-i18next.config.js +++ /dev/null @@ -1,10 +0,0 @@ -module.exports = { - i18n: { - locales: ['en', 'de'], - defaultLocale : 'en', - namespaces: ['landing-page', 'common', 'tools'], - defaultNamespace: 'landing-page', - react: { useSuspense: false },// this line - }, - - }; diff --git a/package-lock.json b/package-lock.json index 5e84331a4117..996dc4005045 100644 --- a/package-lock.json +++ b/package-lock.json @@ -78,10 +78,12 @@ "swiper": "^11.0.7", "tailwind-merge": "^2.2.1", "tailwindcss": "^3.4.3", - "typescript": "^5.3.3", + "tsx": "^4.19.2", + "winston": "^3.17.0", "yaml": "^2.3.4" }, "devDependencies": { + "@babel/preset-typescript": "^7.26.0", "@chromatic-com/storybook": "^1.6.1", "@netlify/functions": "^2.6.0", "@netlify/plugin-nextjs": "^4.41.3", @@ -93,6 +95,9 @@ "@storybook/nextjs": "^8.2.4", "@storybook/react": "^8.2.4", "@storybook/test": "^8.2.4", + "@types/fs-extra": "^11.0.4", + "@types/inquirer": "^9.0.7", + "@types/jest": "^29.5.14", "@types/lodash": "^4.17.0", "@types/node": "^20", "@types/react": "^18.0.1", @@ -102,6 +107,7 @@ "@types/react-youtube-embed": "^1.0.4", "@typescript-eslint/eslint-plugin": "^6.21.0", "@typescript-eslint/parser": "^6.21.0", + "babel-plugin-transform-import-meta": "^2.2.1", "dedent": "^1.5.1", "eslint": "^8", "eslint-config-airbnb-typescript": "^17.1.0", @@ -122,7 +128,8 @@ "remark-cli": "^12.0.1", "remark-lint": "^10.0.0", "remark-mdx": "^3.0.1", - "storybook": "^8.2.4" + "storybook": "^8.2.4", + "typescript": "^5.7.2" } }, "node_modules/@adobe/css-tools": { @@ -328,11 +335,12 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "dependencies": { - "@babel/highlight": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", "picocolors": "^1.0.0" }, "engines": { @@ -385,25 +393,26 @@ } }, "node_modules/@babel/generator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.7.tgz", - "integrity": "sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA==", + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.3.tgz", + "integrity": "sha512-6FF/urZvD0sTeO7k6/B15pMLC4CHUv1426lzr3N01aHJTl046uCAh9LXW/fzeXXjPNCJ6iABW5XaWOsIZB93aQ==", "dependencies": { - "@babel/types": "^7.24.7", + "@babel/parser": "^7.26.3", + "@babel/types": "^7.26.3", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^2.5.1" + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz", - "integrity": "sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.25.9.tgz", + "integrity": "sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g==", "dependencies": { - "@babel/types": "^7.24.7" + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -445,18 +454,16 @@ } }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.7.tgz", - "integrity": "sha512-kTkaDl7c9vO80zeX1rJxnuRpEsD5tA81yh11X1gQo+PhSti3JS+7qeZo9U4RHobKRiFPKaGK3svUAeb8D0Q7eg==", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-member-expression-to-functions": "^7.24.7", - "@babel/helper-optimise-call-expression": "^7.24.7", - "@babel/helper-replace-supers": "^7.24.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.9.tgz", + "integrity": "sha512-UTZQMvt0d/rSz6KI+qdu7GQze5TIajwTS++GUozlw8VBJDEOAqSXwm1WvmYEZwqdqSGQshRocPDqrt4HBZB3fQ==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-member-expression-to-functions": "^7.25.9", + "@babel/helper-optimise-call-expression": "^7.25.9", + "@babel/helper-replace-supers": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9", + "@babel/traverse": "^7.25.9", "semver": "^6.3.1" }, "engines": { @@ -548,39 +555,37 @@ } }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.7.tgz", - "integrity": "sha512-LGeMaf5JN4hAT471eJdBs/GK1DoYIJ5GCtZN/EsL6KUiiDZOvO/eKE11AMZJa2zP4zk4qe9V2O/hxAmkRc8p6w==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.25.9.tgz", + "integrity": "sha512-wbfdZ9w5vk0C0oyHqAJbc62+vet5prjj01jjJ8sKn3j9h3MQQlflEdXYvuqRWjHnM12coDEqiC1IRCi0U/EKwQ==", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", - "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.7.tgz", - "integrity": "sha512-1fuJEwIrp+97rM4RWdO+qrRsZlAeL1lQJoPqtCYWv0NL115XM93hIH4CSRln2w52SqvmY5hqdtauB6QFCDiZNQ==", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-simple-access": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7" + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -590,20 +595,20 @@ } }, "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz", - "integrity": "sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.25.9.tgz", + "integrity": "sha512-FIpuNaz5ow8VyrYcnXQTDRGvV6tTjkNtCK/RYNDXGSLlUD6cBuQTSw43CShGxjvfBTfcUA/r6UhUCbtYqkhcuQ==", "dependencies": { - "@babel/types": "^7.24.7" + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", - "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz", + "integrity": "sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw==", "engines": { "node": ">=6.9.0" } @@ -625,13 +630,13 @@ } }, "node_modules/@babel/helper-replace-supers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.7.tgz", - "integrity": "sha512-qTAxxBM81VEyoAY0TtLrx1oAEJc09ZK67Q9ljQToqCnA+55eNwCORaxlKyu+rNfX86o8OXRUSNUnrtsAZXM9sg==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.25.9.tgz", + "integrity": "sha512-IiDqTOTBQy0sWyeXyGSC5TBJpGFXBkRynjBeXsvbhQFKj2viwJC76Epz35YLU1fpe/Am6Vppb7W7zM4fPQzLsQ==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-member-expression-to-functions": "^7.24.7", - "@babel/helper-optimise-call-expression": "^7.24.7" + "@babel/helper-member-expression-to-functions": "^7.25.9", + "@babel/helper-optimise-call-expression": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -640,25 +645,13 @@ "@babel/core": "^7.0.0" } }, - "node_modules/@babel/helper-simple-access": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", - "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", - "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz", - "integrity": "sha512-IO+DLT3LQUElMbpzlatRASEyQtfhSE0+m465v++3jyyXeBTBUjtVZg28/gHeV5mrTJqvEKhKroBGAvhW+qPHiQ==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.25.9.tgz", + "integrity": "sha512-K4Du3BFa3gvyhzgPcntrkDgZzQaq6uozzcpGbOO1OEJaI+EJdqWIMTLgFgQf6lrfiDFo5FU+BxKepI9RmZqahA==", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -676,25 +669,25 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz", - "integrity": "sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.7.tgz", - "integrity": "sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", "engines": { "node": ">=6.9.0" } @@ -725,88 +718,13 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/highlight": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", - "dependencies": { - "@babel/helper-validator-identifier": "^7.24.7", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "node_modules/@babel/highlight/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "node_modules/@babel/parser": { + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.3.tgz", + "integrity": "sha512-WJ/CvmY8Mea8iDXo6a7RK2wbmJITT5fN3BEkRuFlxVyNx8jOKIIhmC4fSkTcPcf8JyavbBwIe6OpiCOBXt/IcA==", "dependencies": { - "has-flag": "^3.0.0" + "@babel/types": "^7.26.3" }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", - "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", "bin": { "parser": "bin/babel-parser.js" }, @@ -1020,11 +938,11 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz", - "integrity": "sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", + "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1128,11 +1046,11 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", - "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", + "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1506,13 +1424,12 @@ } }, "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.7.tgz", - "integrity": "sha512-iFI8GDxtevHJ/Z22J5xQpVqFLlMNstcLXh994xifFwxxGslr2ZXXLWgtBeLctOD63UFDArdvN6Tg8RFw+aEmjQ==", + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.26.3.tgz", + "integrity": "sha512-MgR55l4q9KddUDITEzEFYn5ZsGDXMSsU9E+kh7fjRXTIC3RHqfCo8RPRbyReYJh44HQ/yomFkqbOFohXvDCiIQ==", "dependencies": { - "@babel/helper-module-transforms": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-simple-access": "^7.24.7" + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1930,14 +1847,15 @@ } }, "node_modules/@babel/plugin-transform-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.24.7.tgz", - "integrity": "sha512-iLD3UNkgx2n/HrjBesVbYX6j0yqn/sJktvbtKKgcaLIQ4bTTQ8obAypc1VpyHPD2y4Phh9zHOaAt8e/L14wCpw==", + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.26.3.tgz", + "integrity": "sha512-6+5hpdr6mETwSKjmJUdYw0EIkATiQhnELWlE3kJFBwSg/BGIVwVaVbX+gOXBCdc7Ln1RXZxyWGecIXhUfnl7oA==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-create-class-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-typescript": "^7.24.7" + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-create-class-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9", + "@babel/plugin-syntax-typescript": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2157,15 +2075,15 @@ } }, "node_modules/@babel/preset-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.24.7.tgz", - "integrity": "sha512-SyXRe3OdWwIwalxDg5UtJnJQO+YPcTfwiIY2B0Xlddh9o7jpWLvv8X1RthIeDOxQ+O1ML5BLPCONToObyVQVuQ==", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.26.0.tgz", + "integrity": "sha512-NMk1IGZ5I/oHhoXEElcm+xUnL/szL6xflkFZmoEU9xj1qSJXpiS7rsspYo92B4DRCDvZn2erT5LdsCeXAKNCkg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-validator-option": "^7.24.7", - "@babel/plugin-syntax-jsx": "^7.24.7", - "@babel/plugin-transform-modules-commonjs": "^7.24.7", - "@babel/plugin-transform-typescript": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "@babel/plugin-syntax-jsx": "^7.25.9", + "@babel/plugin-transform-modules-commonjs": "^7.25.9", + "@babel/plugin-transform-typescript": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2334,31 +2252,28 @@ } }, "node_modules/@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", + "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/code-frame": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.7.tgz", - "integrity": "sha512-yb65Ed5S/QAcewNPh0nZczy9JdYXkkAbIsEo+P7BE7yO3txAY30Y/oPa3QkQ5It3xVG2kpKMg9MsdxZaO31uKA==", - "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.7", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-hoist-variables": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7", + "version": "7.26.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.4.tgz", + "integrity": "sha512-fH+b7Y4p3yqvApJALCPJcwb0/XaOSgtK4pzV6WVjPR5GLFQBRI7pfoX2V2iM48NXvX07NUxxm1Vw98YjqTcU5w==", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.3", + "@babel/parser": "^7.26.3", + "@babel/template": "^7.25.9", + "@babel/types": "^7.26.3", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -2367,13 +2282,12 @@ } }, "node_modules/@babel/types": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.7.tgz", - "integrity": "sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==", + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.3.tgz", + "integrity": "sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==", "dependencies": { - "@babel/helper-string-parser": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2413,6 +2327,24 @@ "yarn": ">=1.22.18" } }, + "node_modules/@colors/colors": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", + "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@dabh/diagnostics": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", + "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", + "dependencies": { + "colorspace": "1.1.x", + "enabled": "2.0.x", + "kuler": "^2.0.0" + } + }, "node_modules/@dagrejs/dagre": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/@dagrejs/dagre/-/dagre-1.1.4.tgz", @@ -2747,6 +2679,21 @@ "node": ">=12" } }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", + "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/openbsd-x64": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", @@ -7483,6 +7430,16 @@ "@types/send": "*" } }, + "node_modules/@types/fs-extra": { + "version": "11.0.4", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz", + "integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==", + "dev": true, + "dependencies": { + "@types/jsonfile": "*", + "@types/node": "*" + } + }, "node_modules/@types/geojson": { "version": "7946.0.15", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.15.tgz", @@ -7526,6 +7483,16 @@ "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" }, + "node_modules/@types/inquirer": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/inquirer/-/inquirer-9.0.7.tgz", + "integrity": "sha512-Q0zyBupO6NxGRZut/JdmqYKOnN95Eg5V8Csg3PGKkP+FnvsUZx1jAyK7fztIszxxMuoBA6E3KXWvdZVXIpx60g==", + "dev": true, + "dependencies": { + "@types/through": "*", + "rxjs": "^7.2.0" + } + }, "node_modules/@types/is-empty": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/@types/is-empty/-/is-empty-1.2.3.tgz", @@ -7556,6 +7523,48 @@ "@types/istanbul-lib-report": "*" } }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/jest/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@types/jest/node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@types/jest/node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true + }, "node_modules/@types/js-yaml": { "version": "4.0.9", "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", @@ -7572,6 +7581,15 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "node_modules/@types/jsonfile": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz", + "integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/lodash": { "version": "4.17.6", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.6.tgz", @@ -7778,6 +7796,20 @@ "integrity": "sha512-hcZhlNvMkQG/k1vcZ6yHOl6WAYftQ2MLfTHcYRZ2xYZFD8tGVnE3qFV0lj1smQeDSR7/yY0PyuUalauf33bJeA==", "dev": true }, + "node_modules/@types/through": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/through/-/through-0.0.33.tgz", + "integrity": "sha512-HsJ+z3QuETzP3cswwtzt2vEIiHBk/dCcHGhbmG5X3ecnwFD/lPrMpliGXxSCg03L9AhrdwA4Oz/qfspkDW+xGQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/triple-beam": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz", + "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==" + }, "node_modules/@types/unist": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", @@ -8796,6 +8828,11 @@ "astring": "bin/astring" } }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==" + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -9157,6 +9194,19 @@ "resolved": "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz", "integrity": "sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==" }, + "node_modules/babel-plugin-transform-import-meta": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-import-meta/-/babel-plugin-transform-import-meta-2.2.1.tgz", + "integrity": "sha512-AxNh27Pcg8Kt112RGa3Vod2QS2YXKKJ6+nSvRtv7qQTJAdx0MZa4UHZ4lnxHUWA2MNbLuZQv5FVab4P1CoLOWw==", + "dev": true, + "dependencies": { + "@babel/template": "^7.4.4", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@babel/core": "^7.10.0" + } + }, "node_modules/babel-preset-current-node-syntax": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", @@ -10339,7 +10389,6 @@ "version": "1.9.1", "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", - "dev": true, "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" @@ -10356,6 +10405,37 @@ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", "dev": true }, + "node_modules/colorspace": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", + "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", + "dependencies": { + "color": "^3.1.3", + "text-hex": "1.0.x" + } + }, + "node_modules/colorspace/node_modules/color": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", + "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", + "dependencies": { + "color-convert": "^1.9.3", + "color-string": "^1.6.0" + } + }, + "node_modules/colorspace/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/colorspace/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -11968,6 +12048,11 @@ "node": ">= 4" } }, + "node_modules/enabled": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", + "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" + }, "node_modules/encodeurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", @@ -13844,6 +13929,11 @@ "walk-up-path": "^3.0.1" } }, + "node_modules/fecha": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz", + "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==" + }, "node_modules/fetch-blob": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", @@ -13997,6 +14087,11 @@ "node": ">=0.4.0" } }, + "node_modules/fn.name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", + "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" + }, "node_modules/follow-redirects": { "version": "1.15.6", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", @@ -14513,7 +14608,6 @@ "version": "4.7.5", "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz", "integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==", - "dev": true, "dependencies": { "resolve-pkg-maps": "^1.0.0" }, @@ -17572,14 +17666,14 @@ } }, "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/json-bigint": { @@ -17712,6 +17806,11 @@ "node": ">= 8" } }, + "node_modules/kuler": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", + "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" + }, "node_modules/language-subtag-registry": { "version": "0.3.23", "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz", @@ -18060,6 +18159,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/logform": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.7.0.tgz", + "integrity": "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==", + "dependencies": { + "@colors/colors": "1.6.0", + "@types/triple-beam": "^1.3.2", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "safe-stable-stringify": "^2.3.1", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, "node_modules/long": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", @@ -22915,6 +23030,14 @@ "wrappy": "1" } }, + "node_modules/one-time": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", + "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", + "dependencies": { + "fn.name": "1.x.x" + } + }, "node_modules/onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", @@ -26095,7 +26218,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", - "dev": true, "funding": { "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" } @@ -26367,7 +26489,15 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/safer-buffer": { + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" @@ -26645,9 +26775,9 @@ } }, "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "bin": { "semver": "bin/semver.js" }, @@ -26950,7 +27080,6 @@ "version": "0.2.2", "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", - "dev": true, "dependencies": { "is-arrayish": "^0.3.1" } @@ -26958,8 +27087,7 @@ "node_modules/simple-swizzle/node_modules/is-arrayish": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "dev": true + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" }, "node_modules/sisteransi": { "version": "1.0.5", @@ -27024,6 +27152,14 @@ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" }, + "node_modules/stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==", + "engines": { + "node": "*" + } + }, "node_modules/stack-utils": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", @@ -28466,6 +28602,11 @@ "b4a": "^1.6.4" } }, + "node_modules/text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -28554,14 +28695,6 @@ "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "engines": { - "node": ">=4" - } - }, "node_modules/to-object-path": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", @@ -28630,6 +28763,14 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/triple-beam": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz", + "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==", + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/trough": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", @@ -28739,6 +28880,407 @@ "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", "dev": true }, + "node_modules/tsx": { + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.19.2.tgz", + "integrity": "sha512-pOUl6Vo2LUq/bSa8S5q7b91cgNSjctn9ugq/+Mvow99qW6x/UZYwzxy/3NmqoT66eHYfCVvFvACC58UBPFf28g==", + "dependencies": { + "esbuild": "~0.23.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", + "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", + "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", + "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", + "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", + "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", + "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", + "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", + "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", + "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", + "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", + "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", + "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", + "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", + "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", + "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", + "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", + "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", + "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", + "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", + "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", + "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", + "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", + "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz", + "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==", + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.23.1", + "@esbuild/android-arm": "0.23.1", + "@esbuild/android-arm64": "0.23.1", + "@esbuild/android-x64": "0.23.1", + "@esbuild/darwin-arm64": "0.23.1", + "@esbuild/darwin-x64": "0.23.1", + "@esbuild/freebsd-arm64": "0.23.1", + "@esbuild/freebsd-x64": "0.23.1", + "@esbuild/linux-arm": "0.23.1", + "@esbuild/linux-arm64": "0.23.1", + "@esbuild/linux-ia32": "0.23.1", + "@esbuild/linux-loong64": "0.23.1", + "@esbuild/linux-mips64el": "0.23.1", + "@esbuild/linux-ppc64": "0.23.1", + "@esbuild/linux-riscv64": "0.23.1", + "@esbuild/linux-s390x": "0.23.1", + "@esbuild/linux-x64": "0.23.1", + "@esbuild/netbsd-x64": "0.23.1", + "@esbuild/openbsd-arm64": "0.23.1", + "@esbuild/openbsd-x64": "0.23.1", + "@esbuild/sunos-x64": "0.23.1", + "@esbuild/win32-arm64": "0.23.1", + "@esbuild/win32-ia32": "0.23.1", + "@esbuild/win32-x64": "0.23.1" + } + }, "node_modules/tty-browserify": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", @@ -28886,9 +29428,9 @@ "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" }, "node_modules/typescript": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz", - "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", + "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -30331,6 +30873,66 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/winston": { + "version": "3.17.0", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz", + "integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==", + "dependencies": { + "@colors/colors": "^1.6.0", + "@dabh/diagnostics": "^2.0.2", + "async": "^3.2.3", + "is-stream": "^2.0.0", + "logform": "^2.7.0", + "one-time": "^1.0.0", + "readable-stream": "^3.4.0", + "safe-stable-stringify": "^2.3.1", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.9.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/winston-transport": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.9.0.tgz", + "integrity": "sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A==", + "dependencies": { + "logform": "^2.7.0", + "readable-stream": "^3.6.2", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/winston-transport/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/winston/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", diff --git a/package.json b/package.json index 22c6192d66fa..b9a73f415712 100644 --- a/package.json +++ b/package.json @@ -3,14 +3,15 @@ "version": "0.1.0", "description": "AsyncAPI website", "private": true, + "type": "module", "scripts": { "dev": "npm run build-scripts && next dev", "build": "npm run build-scripts && next build", "test": "jest --passWithNoTests", - "build:pages": "node scripts/build-pages.js && npm run format:mdx", - "build:posts": "node scripts/index.js", + "build:pages": "tsx scripts/build-pages.ts && npm run format:mdx", + "build:posts": "tsx scripts/index.ts", "build-scripts": "npm run build:pages && npm run lint:mdx && npm run build:posts", - "write:blog": "node ./scripts/compose.js", + "write:blog": "tsx ./scripts/compose.ts", "start": "npx serve@latest out", "export": "next export", "lint": "next lint", @@ -18,13 +19,13 @@ "format:mdx": "prettier --write \"**/*.mdx\"", "lint:mdx": "remark \"**/*.mdx\"", "generate:assets": "echo \"No assets to configure\"", - "generate:meetings": "node scripts/build-meetings.js", - "generate:dashboard": "node scripts/dashboard/build-dashboard.js", - "generate:videos": "node scripts/build-newsroom-videos.js", - "generate:tools": "node scripts/build-tools.js", + "generate:meetings": "tsx scripts/build-meetings.ts", + "generate:dashboard": "tsx scripts/dashboard/build-dashboard.ts", + "generate:videos": "tsx scripts/build-newsroom-videos.ts", + "generate:tools": "tsx scripts/build-tools.ts", "test:netlify": "deno test --allow-env --trace-ops netlify/**/*.test.ts", - "test:md": "node scripts/markdown/check-markdown.js", - "test:editlinks": "node scripts/markdown/check-edit-links.js", + "test:md": "tsx scripts/markdown/check-markdown.ts", + "test:editlinks": "tsx scripts/markdown/check-edit-links.ts", "dev:storybook": "storybook dev -p 6006", "build:storybook": "storybook build" }, @@ -115,10 +116,12 @@ "swiper": "^11.0.7", "tailwind-merge": "^2.2.1", "tailwindcss": "^3.4.3", - "typescript": "^5.3.3", + "tsx": "^4.19.2", + "winston": "^3.17.0", "yaml": "^2.3.4" }, "devDependencies": { + "@babel/preset-typescript": "^7.26.0", "@chromatic-com/storybook": "^1.6.1", "@netlify/functions": "^2.6.0", "@netlify/plugin-nextjs": "^4.41.3", @@ -130,6 +133,9 @@ "@storybook/nextjs": "^8.2.4", "@storybook/react": "^8.2.4", "@storybook/test": "^8.2.4", + "@types/fs-extra": "^11.0.4", + "@types/inquirer": "^9.0.7", + "@types/jest": "^29.5.14", "@types/lodash": "^4.17.0", "@types/node": "^20", "@types/react": "^18.0.1", @@ -139,6 +145,7 @@ "@types/react-youtube-embed": "^1.0.4", "@typescript-eslint/eslint-plugin": "^6.21.0", "@typescript-eslint/parser": "^6.21.0", + "babel-plugin-transform-import-meta": "^2.2.1", "dedent": "^1.5.1", "eslint": "^8", "eslint-config-airbnb-typescript": "^17.1.0", @@ -159,6 +166,7 @@ "remark-cli": "^12.0.1", "remark-lint": "^10.0.0", "remark-mdx": "^3.0.1", - "storybook": "^8.2.4" + "storybook": "^8.2.4", + "typescript": "^5.7.2" } } diff --git a/pages/_document.tsx b/pages/_document.tsx index b220a1b44841..fecd327edff1 100644 --- a/pages/_document.tsx +++ b/pages/_document.tsx @@ -1,7 +1,7 @@ import Document, { Head, Html, Main, NextScript } from 'next/document'; import React from 'react'; -import i18nextConfig from '../next-i18next.config'; +import i18nextConfig from '../next-i18next.config.cjs'; class MyDocument extends Document { static async getInitialProps(ctx: any) { diff --git a/postcss.config.js b/postcss.config.cjs similarity index 100% rename from postcss.config.js rename to postcss.config.cjs diff --git a/scripts/adopters/index.js b/scripts/adopters/index.js deleted file mode 100644 index 6a11697ad68f..000000000000 --- a/scripts/adopters/index.js +++ /dev/null @@ -1,6 +0,0 @@ -const { resolve } = require('path'); -const writeJSON = require('../utils/readAndWriteJson.js') - -module.exports = async function buildAdoptersList() { - writeJSON('config/adopters.yml',resolve(__dirname, '../../config', 'adopters.json')); -}; diff --git a/scripts/adopters/index.ts b/scripts/adopters/index.ts new file mode 100644 index 000000000000..07f7bb7ebca5 --- /dev/null +++ b/scripts/adopters/index.ts @@ -0,0 +1,15 @@ +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; + +import { writeJSON } from '../utils/readAndWriteJson'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Builds the adopters list by converting a YAML file to JSON and writing it to a specified path. + * @returns {Promise} + */ +export async function buildAdoptersList() { + writeJSON('config/adopters.yml', resolve(currentDirPath, '../../config', 'adopters.json')); +} diff --git a/scripts/build-docs.js b/scripts/build-docs.js deleted file mode 100644 index ac47b6751cee..000000000000 --- a/scripts/build-docs.js +++ /dev/null @@ -1,182 +0,0 @@ -const sortBy = require('lodash/sortBy') -function buildNavTree(navItems) { - try { - const tree = { - 'welcome': { - item: { title: 'Welcome', weight: 0, isRootSection: true, isSection: true, rootSectionId: 'welcome', sectionWeight: 0, slug: '/docs' }, - children: {} - } - } - - //first we make sure that list of items lists main section items and then sub sections, documents last - const sortedItems = sortBy(navItems, ['isRootSection', 'weight', 'isSection']); - - sortedItems.forEach(item => { - //identify main sections - if (item.isRootSection) { - tree[item.rootSectionId] = { item, children: {} } - } - - //identify subsections - if (item.parent) { - if (!tree[item.parent]) { - throw new Error(`Parent section ${item.parent} not found for item ${item.title}`); - } - tree[item.parent].children[item.sectionId] = { item, children: [] }; - } - - if (!item.isSection) { - if (item.sectionId) { - let section = tree[item.rootSectionId]?.children[item.sectionId]; - if (!section) { - tree[item.rootSectionId].children[item.sectionId] = { item, children: [] }; - } - tree[item.rootSectionId].children[item.sectionId].children.push(item); - } else { - tree[item.rootSectionId].children[item.title] = { item }; - } - } - }); - - for (const [rootKey, rootValue] of Object.entries(tree)) { - const allChildren = rootValue.children; - const allChildrenKeys = Object.keys(allChildren); - - rootValue.children = allChildrenKeys - .sort((prev, next) => { - return allChildren[prev].item.weight - allChildren[next].item.weight; - }) - .reduce((obj, key) => { - obj[key] = allChildren[key]; - return obj; - }, {}); - - //handling subsections - if (allChildrenKeys.length > 1) { - for (const key of allChildrenKeys) { - if (allChildren[key].children) { - allChildren[key].children.sort((prev, next) => { - return prev.weight - next.weight; - }); - } - - // point in slug for specification subgroup to the latest specification version - if (rootKey === 'reference' && key === 'specification') { - allChildren[key].item.href = allChildren[key].children.find(c => c.isPrerelease === undefined).slug; - } - } - } - } - - return tree; - - } catch (err) { - throw new Error(`Failed to build navigation tree: ${err.message}`); - } -} - -// A recursion function, works on the logic of Depth First Search to traverse all the root and child posts of the -// DocTree to get sequential order of the Doc Posts -const convertDocPosts = (docObject) => { - try { - let docsArray = [] - // certain entries in the DocPosts are either a parent to many posts or itself a post. - docsArray.push(docObject?.item || docObject) - if (docObject.children) { - let children = docObject.children - Object.keys(children).forEach((child) => { - let docChildArray = convertDocPosts(children[child]) - docsArray = [...docsArray, ...docChildArray] - }) - } - return docsArray - } - catch (err) { - throw new Error('Error in convertDocPosts:', err); - } -} - - -function addDocButtons(docPosts, treePosts) { - let structuredPosts = []; - let rootSections = []; - - try { - // Traversing the whole DocTree and storing each post inside them in sequential order - Object.keys(treePosts).forEach((rootElement) => { - structuredPosts.push(treePosts[rootElement].item); - if (treePosts[rootElement].children) { - let children = treePosts[rootElement].children; - Object.keys(children).forEach((child) => { - let docChildArray = convertDocPosts(children[child]); - structuredPosts = [...structuredPosts, ...docChildArray]; - }); - } - }); - - // Appending the content of welcome page of Docs from the posts.json - structuredPosts[0] = docPosts.filter(p => p.slug === '/docs')[0]; - - // Traversing the structuredPosts in order to add `nextPage` and `prevPage` details for each page - let countDocPages = structuredPosts.length; - structuredPosts = structuredPosts.map((post, index) => { - // post item specifying the root Section or sub-section in the docs are excluded as - // they doesn't comprise any Doc Page or content to be shown in website. - if (post?.isRootSection || post?.isSection || index == 0) { - if (post?.isRootSection || index == 0) - rootSections.push(post.title) - return post - } - - let nextPage = {}, prevPage = {} - let docPost = post; - - // checks whether the next page for the current docPost item exists or not - if (index + 1 < countDocPages) { - // checks whether the next item inside structuredPosts is a rootElement or a sectionElement - // if yes, it goes again to a next to next item in structuredPosts to link the nextPage - if (!structuredPosts[index + 1].isRootElement && !structuredPosts[index + 1].isSection) { - nextPage = { - title: structuredPosts[index + 1].title, - href: structuredPosts[index + 1].slug - } - } else { - nextPage = { - title: `${structuredPosts[index + 1].title} - ${structuredPosts[index + 2].title}`, - href: structuredPosts[index + 2].slug - } - } - docPost = { ...docPost, nextPage } - } - - // checks whether the previous page for the current docPost item exists or not - if (index > 0) { - // checks whether the previous item inside structuredPosts is a rootElement or a sectionElement - // if yes, it goes again to a next previous item in structuredPosts to link the prevPage - if (!structuredPosts[index - 1]?.isRootElement && !structuredPosts[index - 1]?.isSection) { - prevPage = { - title: structuredPosts[index - 1].title, - href: structuredPosts[index - 1].slug - } - docPost = { ...docPost, prevPage } - } else { - // additonal check for the first page of Docs so that it doesn't give any Segementation fault - if (index - 2 >= 0) { - prevPage = { - title: `${structuredPosts[index - 1]?.isRootSection ? rootSections[rootSections.length - 2] : rootSections[rootSections.length - 1]} - ${structuredPosts[index - 2].title}`, - href: structuredPosts[index - 2].slug - }; - docPost = { ...docPost, prevPage }; - } - } - } - return docPost; - }); - - } catch (err) { - throw new Error("An error occurred while adding doc buttons:", err); - } - return structuredPosts; -} - -module.exports = { buildNavTree, addDocButtons, convertDocPosts } \ No newline at end of file diff --git a/scripts/build-docs.ts b/scripts/build-docs.ts new file mode 100644 index 000000000000..c6b52f5873cd --- /dev/null +++ b/scripts/build-docs.ts @@ -0,0 +1,247 @@ +import lodash from 'lodash'; + +import type { NavTree, NavTreeItem, RecursiveChildren } from '@/types/scripts/build-docs'; +import type { Details, NavigationPage } from '@/types/scripts/build-posts-list'; + +const { sortBy } = lodash; + +/** + * Builds a navigation tree from the given navigation items. + * + * @param {Details[]} navItems - The navigation items to build the tree from. + * @returns {NavTree} - The built navigation tree. + * @throws {Error} - Throws an error if there is an issue during the tree building process. + */ +function buildNavTree(navItems: Details[]) { + try { + const tree: NavTree = { + welcome: { + item: { + title: 'Welcome', + weight: 0, + isRootSection: true, + isSection: true, + rootSectionId: 'welcome', + sectionWeight: 0, + slug: '/docs' + }, + children: {} as RecursiveChildren + } + }; + + // first we make sure that list of items lists main section items and then sub sections, documents last + const sortedItems = sortBy(navItems, ['isRootSection', 'weight', 'isSection']); + + sortedItems.forEach((item) => { + // identify main sections + if (item.isRootSection) { + tree[item.rootSectionId!] = { item, children: {} }; + } + + // identify subsections + if (item.parent) { + if (!tree[item.parent]) { + throw new Error(`Parent section ${item.parent} not found for item ${item.title}`); + } + + (tree[item.parent].children as RecursiveChildren)[item.sectionId!] = { item, children: [] as Details[] }; + } + + if (!item.isSection) { + const rootSectionChildren = tree[item.rootSectionId!].children as RecursiveChildren; + + if (item.sectionId) { + const section = rootSectionChildren[item.sectionId]; + + if (!section) { + rootSectionChildren[item.sectionId] = { + item, + children: [] as Details[] + }; + } + (rootSectionChildren[item.sectionId].children! as Details[]).push(item); + } else { + rootSectionChildren[item.title] = { item }; + } + } + }); + + for (const [rootKey, rootValue] of Object.entries(tree)) { + const allChildren = rootValue.children as RecursiveChildren; + const allChildrenKeys = Object.keys(allChildren as RecursiveChildren); + + rootValue.children = allChildrenKeys + .sort((prev, next) => { + return allChildren[prev]!.item.weight! - allChildren[next]!.item.weight!; + }) + .reduce( + ( + obj: { + [key: string]: NavTreeItem; + }, + key + ) => { + // eslint-disable-next-line no-param-reassign + obj[key] = allChildren[key]; + + return obj; + }, + {} + ); + + // handling subsections + if (allChildrenKeys.length > 1) { + for (const key of allChildrenKeys) { + const childrenOfAllChildren = allChildren[key].children as Details[]; + + /* istanbul ignore else */ + // eslint-disable-next-line max-depth + if (childrenOfAllChildren) { + childrenOfAllChildren!.sort((prev, next) => { + return prev.weight! - next.weight!; + }); + } + + // point in slug for specification subgroup to the latest specification version + // eslint-disable-next-line max-depth + if (rootKey === 'reference' && key === 'specification') { + allChildren[key].item.href = childrenOfAllChildren.find((c) => c.isPrerelease === undefined)!.slug; + } + } + } + } + + return tree; + } catch (err) { + throw new Error(`Failed to build navigation tree: ${err}`); + } +} + +/** + * Recursively converts document posts to a sequential array. + * + * @param {NavTree | Details} docObject - The document object to convert. + * @returns {Details[]} - The sequential array of document posts. + * @throws {Error} - Throws an error if there is an issue during the conversion process. + */ +const convertDocPosts = (docObject: NavTree | Details) => { + try { + let docsArray: Details[] = []; + + // certain entries in the DocPosts are either a parent to many posts or itself a post. + + docsArray.push(docObject?.item || docObject); + if (docObject.children) { + const { children } = docObject; + + Object.keys(children).forEach((child) => { + const docChildArray = convertDocPosts(children[child] as Details); + + docsArray = [...docsArray, ...docChildArray]; + }); + } + + return docsArray; + } catch (err) { + throw new Error('Error in convertDocPosts:', err as Error); + } +}; + +/** + * Adds navigation buttons to the document posts. + * + * @param {Details[]} docPosts - The document posts to add buttons to. + * @param {NavTree} treePosts - The navigation tree of the document posts. + * @returns {Details[]} - The document posts with added navigation buttons. + * @throws {Error} - Throws an error if there is an issue during the button adding process. + */ +function addDocButtons(docPosts: Details[], treePosts: NavTree) { + let structuredPosts: Details[] = []; + const rootSections: string[] = []; + + try { + // Traversing the whole DocTree and storing each post inside them in sequential order + Object.keys(treePosts).forEach((rootElement) => { + structuredPosts.push(treePosts[rootElement].item); + if (treePosts[rootElement].children) { + const { children } = treePosts[rootElement]; + const childrenTyped = children as NavTree | Details; + + Object.keys(childrenTyped).forEach((child) => { + const docChildArray = convertDocPosts(childrenTyped[child]); + + structuredPosts = [...structuredPosts, ...docChildArray]; + }); + } + }); + + // Appending the content of welcome page of Docs from the posts.json + [structuredPosts[0]] = docPosts.filter((p) => p.slug === '/docs'); + + // Traversing the structuredPosts in order to add `nextPage` and `prevPage` details for each page + const countDocPages = structuredPosts.length; + + structuredPosts = structuredPosts.map((post, index) => { + // post item specifying the root Section or sub-section in the docs are excluded as + // they doesn't comprise any Doc Page or content to be shown in website. + if (post?.isRootSection || post?.isSection || index === 0) { + if (post?.isRootSection || index === 0) rootSections.push(post.title); + + return post; + } + + let nextPage = {} as NavigationPage; + let prevPage = {} as NavigationPage; + let docPost = post as Details; + + // checks whether the next page for the current docPost item exists or not + if (index + 1 < countDocPages) { + // checks whether the next item inside structuredPosts is a rootElement or a sectionElement + // if yes, it goes again to a next to next item in structuredPosts to link the nextPage + if (!structuredPosts[index + 1].isRootElement && !structuredPosts[index + 1].isSection) { + nextPage = { + title: structuredPosts[index + 1].title, + href: structuredPosts[index + 1].slug + }; + } else { + nextPage = { + title: `${structuredPosts[index + 1].title} - ${structuredPosts[index + 2].title}`, + href: structuredPosts[index + 2].slug + }; + } + + docPost = { ...docPost, nextPage }; + } + + // checks whether the previous page for the current docPost item exists or not + /* istanbul ignore else */ + if (index > 0) { + // checks whether the previous item inside structuredPosts is a rootElement or a sectionElement + // if yes, it goes again to a next previous item in structuredPosts to link the prevPage + /* istanbul ignore else */ + if (!structuredPosts[index - 1]?.isRootElement && !structuredPosts[index - 1]?.isSection) { + prevPage = { + title: structuredPosts[index - 1].title, + href: structuredPosts[index - 1].slug + }; + docPost = { ...docPost, prevPage } as Details; + } else if (index - 2 >= 0) { + // additional check for the first page of Docs so that it doesn't give any Segementation fault + prevPage = { + title: `${structuredPosts[index - 1]?.isRootSection ? rootSections[rootSections.length - 2] : rootSections[rootSections.length - 1]} - ${structuredPosts[index - 2].title}`, + href: structuredPosts[index - 2].slug + }; + docPost = { ...docPost, prevPage } as Details; + } + } + + return docPost; + }); + } catch (err) { + throw new Error('An error occurred while adding doc buttons:', err as Error); + } + + return structuredPosts; +} + +export { addDocButtons, buildNavTree, convertDocPosts }; diff --git a/scripts/build-meetings.js b/scripts/build-meetings.js deleted file mode 100644 index ee95803d9d44..000000000000 --- a/scripts/build-meetings.js +++ /dev/null @@ -1,67 +0,0 @@ -const { writeFileSync } = require('fs'); -const { resolve } = require('path'); -const { google } = require('googleapis'); - -async function buildMeetings(writePath) { - let auth; - let calendar; - - try { - auth = new google.auth.GoogleAuth({ - scopes: ['https://www.googleapis.com/auth/calendar'], - credentials: process.env.CALENDAR_SERVICE_ACCOUNT ? JSON.parse(process.env.CALENDAR_SERVICE_ACCOUNT) : undefined, - }); - - calendar = google.calendar({ version: 'v3', auth }); - - } catch (err) { - throw new Error(`Authentication failed: ${err.message}`); - } - - let eventsItems; - - try { - //cron job runs this always on midnight - const currentTime = new Date(Date.now()).toISOString(); - const timeMin = new Date( - Date.parse(currentTime) - 100 * 24 * 60 * 60 * 1000 - ).toISOString(); - const timeMax = new Date( - Date.parse(currentTime) + 30 * 24 * 60 * 60 * 1000 - ).toISOString(); - - const eventsList = await calendar.events.list({ - calendarId: process.env.CALENDAR_ID, - timeMax: timeMax, - timeMin: timeMin, - }); - - eventsItems = eventsList.data.items.map((e) => { - return { - title: e.summary, - calLink: e.htmlLink, - url: - e.extendedProperties?.private && - `https://github.com/asyncapi/community/issues/${e.extendedProperties.private.ISSUE_ID}`, - banner: - e.extendedProperties?.private && e.extendedProperties.private.BANNER, - date: new Date(e.start.dateTime), - }; - }); - - const eventsForHuman = JSON.stringify(eventsItems, null, ' '); - console.log('The following events got fetched', eventsForHuman); - - writeFileSync(writePath, eventsForHuman); - - } catch (err) { - throw new Error(`Failed to fetch or process events: ${err.message}`); - } -} - -/* istanbul ignore next */ -if (require.main === module) { - buildMeetings(resolve(__dirname, '../config', 'meetings.json')); -} - -module.exports = { buildMeetings }; diff --git a/scripts/build-meetings.ts b/scripts/build-meetings.ts new file mode 100644 index 000000000000..a3b4193a02a4 --- /dev/null +++ b/scripts/build-meetings.ts @@ -0,0 +1,82 @@ +import { writeFileSync } from 'fs'; +import { google } from 'googleapis'; +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; + +import { logger } from './utils/logger'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Fetches upcoming meetings from Google Calendar and writes the data to a specified path. + * + * @param {string} writePath - The path to write the meeting data. + * @throws {Error} - Throws an error if there is an issue during the fetch or write process. + */ +async function buildMeetings(writePath: string) { + let auth; + let calendar; + + try { + auth = new google.auth.GoogleAuth({ + scopes: ['https://www.googleapis.com/auth/calendar'], + credentials: process.env.CALENDAR_SERVICE_ACCOUNT ? JSON.parse(process.env.CALENDAR_SERVICE_ACCOUNT) : undefined + }); + + calendar = google.calendar({ version: 'v3', auth }); + } catch (err) { + throw new Error(`Authentication failed: ${err}`); + } + + let eventsItems; + + try { + // cron job runs this always on midnight + const currentTime = new Date(Date.now()).toISOString(); + const timeMin = new Date(Date.parse(currentTime) - 100 * 24 * 60 * 60 * 1000).toISOString(); + const timeMax = new Date(Date.parse(currentTime) + 30 * 24 * 60 * 60 * 1000).toISOString(); + + const eventsList = await calendar.events.list({ + calendarId: process.env.CALENDAR_ID, + timeMax, + timeMin + }); + + // check if the response is valid and not undefined + if (!eventsList.data.items || !Array.isArray(eventsList.data.items)) { + throw new Error('Invalid data structure received from Google Calendar API'); + } + + eventsItems = eventsList.data.items.map((e) => { + if (!e.start || !e.start.dateTime) { + throw new Error('start.dateTime is missing in the event'); + } + + return { + title: e.summary, + calLink: e.htmlLink, + url: + e.extendedProperties?.private && + `https://github.com/asyncapi/community/issues/${e.extendedProperties.private.ISSUE_ID}`, + banner: e.extendedProperties?.private && e.extendedProperties.private.BANNER, + date: new Date(e.start.dateTime) + }; + }); + + const eventsForHuman = JSON.stringify(eventsItems, null, ' '); + + logger.info(`The following events got fetched: ${eventsForHuman}`); + + writeFileSync(writePath, eventsForHuman); + } catch (err) { + throw new Error(`Failed to fetch or process events: ${(err as Error).message}`); + } +} + +/* istanbul ignore next */ +if (process.argv[1] === fileURLToPath(import.meta.url)) { + buildMeetings(resolve(currentDirPath, '../config', 'meetings.json')); +} + +export { buildMeetings }; diff --git a/scripts/build-newsroom-videos.js b/scripts/build-newsroom-videos.js deleted file mode 100644 index 383927765d36..000000000000 --- a/scripts/build-newsroom-videos.js +++ /dev/null @@ -1,51 +0,0 @@ -const { writeFileSync } = require('fs-extra'); -const { resolve } = require('path'); -const fetch = require('node-fetch-2'); - -async function buildNewsroomVideos(writePath) { - try { - const response = await fetch('https://youtube.googleapis.com/youtube/v3/search?' + new URLSearchParams({ - key: process.env.YOUTUBE_TOKEN, - part: 'snippet', - channelId: 'UCIz9zGwDLbrYQcDKVXdOstQ', - eventType: 'completed', - type: 'video', - order: 'Date', - maxResults: 5, - })); - - if (!response.ok) { - throw new Error(`HTTP error! with status code: ${response.status}`); - } - - const data = await response.json(); - console.log(data); - - if (!data.items || !Array.isArray(data.items)) { - throw new Error('Invalid data structure received from YouTube API'); - } - - const videoDataItems = data.items.map((video) => ({ - image_url: video.snippet.thumbnails.high.url, - title: video.snippet.title, - description: video.snippet.description, - videoId: video.id.videoId, - })); - - const videoData = JSON.stringify(videoDataItems, null, ' '); - console.log('The following are the Newsroom Youtube videos: ', videoData); - - writeFileSync(writePath, videoData); - - return videoData; - } catch (err) { - throw new Error(`Failed to build newsroom videos: ${err.message}`); - } -} - -/* istanbul ignore next */ -if (require.main === module) { - buildNewsroomVideos(resolve(__dirname, '../config', 'newsroom_videos.json')) -} - -module.exports = { buildNewsroomVideos }; diff --git a/scripts/build-newsroom-videos.ts b/scripts/build-newsroom-videos.ts new file mode 100644 index 000000000000..f7e3e4e463b4 --- /dev/null +++ b/scripts/build-newsroom-videos.ts @@ -0,0 +1,73 @@ +import { writeFileSync } from 'fs'; +import type { youtube_v3 } from 'googleapis'; +import fetch from 'node-fetch-2'; +import { dirname, resolve } from 'path'; +import process from 'process'; +import { fileURLToPath } from 'url'; + +import { logger } from './utils/logger'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Fetches the latest YouTube videos from the AsyncAPI channel and writes the data to a specified path. + * + * @param {string} writePath - The path to write the video data. + * @returns {Promise} - A promise that resolves to the video data in JSON format. + * @throws {Error} - Throws an error if there is an issue during the fetch or write process. + */ +async function buildNewsroomVideos(writePath: string) { + try { + if (!process.env.YOUTUBE_TOKEN) { + throw new Error('YOUTUBE_TOKEN environment variable is required'); + } + const response = await fetch( + `https://youtube.googleapis.com/youtube/v3/search?${new URLSearchParams({ + key: process.env.YOUTUBE_TOKEN!, + part: 'snippet', + channelId: 'UCIz9zGwDLbrYQcDKVXdOstQ', + eventType: 'completed', + type: 'video', + order: 'Date', + maxResults: '5' + })}` + ); + + if (!response.ok) { + throw new Error(`HTTP error! with status code: ${response.status}`); + } + + const data = await response.json(); + + if (!data.items || !Array.isArray(data.items)) { + throw new Error('Invalid data structure received from YouTube API'); + } + + const videoDataItems = data.items.map((video: youtube_v3.Schema$SearchResult) => { + return { + image_url: video.snippet?.thumbnails?.high?.url, + title: video.snippet?.title, + description: video.snippet?.description, + videoId: video.id?.videoId + }; + }); + + const videoData = JSON.stringify(videoDataItems, null, ' '); + + logger.info(`The following are the Newsroom Youtube videos: ${videoData}`); + + writeFileSync(writePath, videoData); + + return videoData; + } catch (err) { + throw new Error(`Failed to build newsroom videos: ${(err as Error).message}`); + } +} + +/* istanbul ignore next */ +if (process.argv[1] === fileURLToPath(import.meta.url)) { + buildNewsroomVideos(resolve(currentDirPath, '../config', 'newsroom_videos.json')); +} + +export { buildNewsroomVideos }; diff --git a/scripts/build-pages.js b/scripts/build-pages.ts similarity index 62% rename from scripts/build-pages.js rename to scripts/build-pages.ts index de46315bc326..d1a0ec8e568b 100644 --- a/scripts/build-pages.js +++ b/scripts/build-pages.ts @@ -1,30 +1,47 @@ -const fs = require('fs'); -const path = require('path'); +import type { PathLike } from 'fs'; +import fs from 'fs'; +import path from 'path'; const SRC_DIR = 'markdown'; const TARGET_DIR = 'pages'; const capitalizeTags = ['table', 'tr', 'td', 'th', 'thead', 'tbody']; -// Check if target directory doesn't exist then create it -function ensureDirectoryExists(directory) { +/** + * Ensures that the specified directory exists. If it doesn't, creates it. + * + * @param {PathLike} directory - The directory path to check or create. + */ +export function ensureDirectoryExists(directory: PathLike) { if (!fs.existsSync(directory)) { fs.mkdirSync(directory, { recursive: true }); } } - ensureDirectoryExists(TARGET_DIR); -function capitalizeJsxTags(content) { - return content.replace(/<\/?(\w+)/g, function (match, letter) { +/** + * Capitalizes JSX tags in the provided content string. + * + * @param {string} content - The content string to process. + * @returns {string} - The content string with capitalized JSX tags. + */ +export function capitalizeJsxTags(content: string) { + return content.replace(/<\/?(\w+)/g, function (match: string, letter: string): string { if (capitalizeTags.includes(letter.toLowerCase())) { return `<${match[1] === '/' ? '/' : ''}${letter[0].toUpperCase()}${letter.slice(1)}`; } + return match; }); } -function copyAndRenameFiles(srcDir, targetDir) { +/** + * Copies and renames files from the source directory to the target directory. + * + * @param {string} srcDir - The source directory. + * @param {string} targetDir - The target directory. + */ +export function copyAndRenameFiles(srcDir: string, targetDir: string) { // Read all files and directories from source directory const entries = fs.readdirSync(srcDir, { withFileTypes: true }); @@ -32,6 +49,8 @@ function copyAndRenameFiles(srcDir, targetDir) { const srcPath = path.join(srcDir, entry.name); const targetPath = path.join(targetDir, entry.name); + /* istanbul ignore else */ + if (entry.isDirectory()) { // If entry is a directory, create it in target directory and recurse if (!fs.existsSync(targetPath)) { @@ -60,5 +79,3 @@ function copyAndRenameFiles(srcDir, targetDir) { } copyAndRenameFiles(SRC_DIR, TARGET_DIR); - -module.exports = { copyAndRenameFiles,capitalizeJsxTags, ensureDirectoryExists } \ No newline at end of file diff --git a/scripts/build-post-list.js b/scripts/build-post-list.js deleted file mode 100644 index 44b7273c2106..000000000000 --- a/scripts/build-post-list.js +++ /dev/null @@ -1,209 +0,0 @@ -const { readdir, stat, pathExists, readFile, writeFile } = require('fs-extra') -const { basename, join, normalize, sep, posix, relative, parse } = require('path') -const frontMatter = require('gray-matter') -const toc = require('markdown-toc') -const readingTime = require('reading-time') -const { markdownToTxt } = require('markdown-to-txt') -const { buildNavTree, addDocButtons } = require('./build-docs') - -let specWeight = 100 -const result = { - docs: [], - blog: [], - about: [], - docsTree: {} -} -const releaseNotes = [] - -const addItem = (details) => { - if (!details || typeof details.slug !== 'string') { - throw new Error('Invalid details object provided to addItem'); - } - const sectionMap = { - '/docs': 'docs', - '/blog': 'blog', - '/about': 'about' - }; - const section = Object.keys(sectionMap).find(key => details.slug.startsWith(key)); - if (section) { - result[sectionMap[section]].push(details); - } -}; - -function getVersionDetails(slug, weight) { - const fileBaseName = basename(slug); - const versionName = fileBaseName.split('-')[0]; - return { - title: versionName.startsWith('v') - ? capitalize(versionName.slice(1)) - : capitalize(versionName), - weight - }; -} - -/** - * Builds a list of posts from the specified directories and writes it to a file - * @param {Array>} postDirectories - Array of [directory, slug] tuples - * @param {string} basePath - Base path for resolving relative paths - * @param {string} writeFilePath - Path where the output JSON will be written - * @throws {Error} If required parameters are missing or if any operation fails - * @returns {Promise} - */ -async function buildPostList(postDirectories, basePath, writeFilePath) { - try { - - if (!basePath) { - throw new Error('Error while building post list: basePath is required'); - } - - if (!writeFilePath) { - throw new Error('Error while building post list: writeFilePath is required'); - } - - if (postDirectories.length === 0) { - throw new Error('Error while building post list: postDirectories array is empty'); - } - const normalizedBasePath = normalize(basePath) - await walkDirectories(postDirectories, result, normalizedBasePath) - const treePosts = buildNavTree(result.docs.filter((p) => p.slug.startsWith('/docs/'))) - result.docsTree = treePosts - result.docs = addDocButtons(result.docs, treePosts) - await writeFile(writeFilePath, JSON.stringify(result, null, ' ')) - } catch (error) { - throw new Error(`Error while building post list: ${error.message}`, { cause: error }); - } -} - -function handleSpecificationVersion(details, fileBaseName) { - if (fileBaseName.includes('next-spec') || fileBaseName.includes('next-major-spec')) { - details.isPrerelease = true; - details.title += " (Pre-release)"; - } - if (fileBaseName.includes('explorer')) { - details.title += " - Explorer"; - } - return details; -} - -async function walkDirectories( - directories, - resultObj, - basePath, - sectionTitle, - sectionId, - rootSectionId, - sectionWeight = 0 -) { - for (let dir of directories) { - const directory = posix.normalize(dir[0]); - const sectionSlug = dir[1] || ''; - const files = await readdir(directory) - - for (let file of files) { - let details; - const fileName = normalize(join(directory, file)); - const fileNameWithSection = normalize(join(fileName, '_section.mdx')) - const slug = `/${normalize(relative(basePath, fileName)).replace(/\\/g, '/')}` - const slugElements = slug.split('/') - - if (await isDirectory(fileName)) { - if (await pathExists(fileNameWithSection)) { - // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057 - details = frontMatter(await readFile(fileNameWithSection, 'utf-8'), {}).data - details.title = details.title || capitalize(basename(fileName)) - } else { - details = { - title: capitalize(basename(fileName)), - } - } - details.isSection = true - if (slugElements.length > 3) { - details.parent = slugElements[slugElements.length - 2] - details.sectionId = slugElements[slugElements.length - 1] - } - if (!details.parent) { - details.isRootSection = true - details.rootSectionId = slugElements[slugElements.length - 1] - } - details.sectionWeight = sectionWeight - details.slug = slug - addItem(details) - const rootId = details.parent || details.rootSectionId - await walkDirectories([[fileName, slug]], resultObj, basePath, details.title, details.sectionId, rootId, details.sectionWeight) - } else if (file.endsWith('.mdx') && !fileName.endsWith(sep + '_section.mdx')) { - const fileContent = await readFile(fileName, 'utf-8') - // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057 - const { data, content } = frontMatter(fileContent, {}) - details = data - details.toc = toc(content, { slugify: slugifyToC }).json - details.readingTime = Math.ceil(readingTime(content).minutes) - details.excerpt = details.excerpt || markdownToTxt(content).substr(0, 200) - details.sectionSlug = sectionSlug || slug.replace(/\.mdx$/, '') - details.sectionWeight = sectionWeight - details.sectionTitle = sectionTitle - details.sectionId = sectionId - details.rootSectionId = rootSectionId - details.id = fileName.replace(/\\/g, '/') - details.isIndex = fileName.endsWith(join('index.mdx')) - details.slug = details.isIndex ? sectionSlug : slug.replace(/\.mdx$/, '') - if (details.slug.includes('/reference/specification/') && !details.title) { - const fileBaseName = basename(details.slug) - const versionDetails = getVersionDetails(details.slug, specWeight--); - details.title = versionDetails.title; - details.weight = versionDetails.weight; - - if (releaseNotes.includes(details.title)) { - details.releaseNoteLink = `/blog/release-notes-${details.title}` - } - - details = handleSpecificationVersion(details, fileBaseName); - } - - // To create a list of available ReleaseNotes list, which will be used to add details.releaseNoteLink attribute. - if (file.startsWith('release-notes') && dir[1] === '/blog') { - const { name } = parse(file); - const version = name.split('-').pop(); - releaseNotes.push(version); - } - - addItem(details) - } - } - } -} - -// Matches heading IDs in two formats: -// 1. {#my-heading-id} -// 2. -const HEADING_ID_REGEX = /[\s]*(?:\{#([a-zA-Z0-9\-_]+)\}| word.charAt(0).toUpperCase() + word.slice(1)) - .join(' '); -} - -module.exports = { slugifyToC, buildPostList, addItem } diff --git a/scripts/build-post-list.ts b/scripts/build-post-list.ts new file mode 100644 index 000000000000..6f91a1d9ebba --- /dev/null +++ b/scripts/build-post-list.ts @@ -0,0 +1,284 @@ +/* eslint-disable no-await-in-loop */ +/* eslint-disable max-depth */ +import type { PathLike } from 'fs'; +import { readdir, readFile, stat, writeFile } from 'fs/promises'; +import { pathExists } from 'fs-extra'; +import frontMatter from 'gray-matter'; +import { markdownToTxt } from 'markdown-to-txt'; +import toc from 'markdown-toc'; +import { basename, join, normalize, parse, posix, relative, sep } from 'path'; +import readingTime from 'reading-time'; + +import type { Details, Result } from '@/types/scripts/build-posts-list'; + +import { addDocButtons, buildNavTree } from './build-docs'; + +let specWeight = 100; +const finalResult: Result = { + docs: [], + blog: [], + about: [], + docsTree: {} +}; +const releaseNotes: (string | undefined)[] = []; +// Matches heading IDs in two formats: +// 1. {#my-heading-id} +// 2. +const HEADING_ID_REGEX = /[\s]*(?:\{#([a-zA-Z0-9\-_]+)\}| word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); +} + +/** + * Adds an item to the final result based on its details. + * + * @param {Details} details - The details of the item to add. + * @throws {Error} - Throws an error if the details object is invalid. + */ +export const addItem = (details: Details) => { + if (!details || typeof details.slug !== 'string') { + throw new Error('Invalid details object provided to addItem'); + } + const sectionMap: { + [key: string]: 'docs' | 'blog' | 'about'; + } = { + '/docs': 'docs', + '/blog': 'blog', + '/about': 'about' + }; + const section = Object.keys(sectionMap).find((key) => details.slug!.startsWith(key)); + + /* istanbul ignore else */ + + if (section) { + finalResult[sectionMap[section]].push(details); + } +}; + +/** + * Gets version details based on the slug and weight. + * + * @param {string} slug - The slug of the item. + * @param {number} weight - The weight of the item. + * @returns {object} - The version details. + */ +function getVersionDetails(slug: string, weight: number) { + const fileBaseName = basename(slug); + const versionName = fileBaseName.split('-')[0]; + + return { + title: versionName.startsWith('v') ? capitalize(versionName.slice(1)) : capitalize(versionName), + weight + }; +} + +/** + * Handles specification version details. + * + * @param {Details} details - The details of the item. + * @param {string} fileBaseName - The base name of the file. + * @returns {Details} - The updated details. + */ +function handleSpecificationVersion(details: Details, fileBaseName: string) { + const detailsObj = details; + + if (fileBaseName.includes('next-spec') || fileBaseName.includes('next-major-spec')) { + detailsObj.isPrerelease = true; + detailsObj.title += ' (Pre-release)'; + } + if (fileBaseName.includes('explorer')) { + detailsObj.title += ' - Explorer'; + } + + return detailsObj; +} + +/** + * Checks if the given path is a directory. + * + * @param {PathLike} dir - The path to check. + * @returns {Promise} - A promise that resolves to true if the path is a directory, false otherwise. + */ +async function isDirectory(dir: PathLike) { + return (await stat(dir)).isDirectory(); +} + +/** + * Walks through directories and processes files. + * + * @param {string[][]} directories - The directories to walk through. + * @param {Result} resultObj - The result object to store the processed data. + * @param {string} basePath - The base path for the directories. + * @param {string} [sectionTitle] - The title of the section. + * @param {string} [sectionId] - The ID of the section. + * @param {string} [rootSectionId] - The root ID of the section. + * @param {number} [sectionWeight=0] - The weight of the section. + */ +async function walkDirectories( + directories: string[][], + resultObj: Result, + basePath: string, + sectionTitle?: string, + sectionId?: string | undefined, + rootSectionId?: string | undefined, + sectionWeight = 0 +) { + for (const dir of directories) { + const directory = posix.normalize(dir[0]); + /* istanbul ignore next */ + const sectionSlug = dir[1] || ''; + const files = await readdir(directory); + + for (const file of files) { + let details: Details; + const fileName = normalize(join(directory, file)); + const fileNameWithSection = normalize(join(fileName, '_section.mdx')); + const slug = `/${normalize(relative(basePath, fileName)).replace(/\\/g, '/')}`; + const slugElements = slug.split('/'); + + if (await isDirectory(fileName)) { + if (await pathExists(fileNameWithSection)) { + // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057 + details = frontMatter(await readFile(fileNameWithSection, 'utf-8'), {}).data as Details; + /* istanbul ignore next */ + details.title = details.title || capitalize(basename(fileName)); + } else { + details = { + title: capitalize(basename(fileName)) + }; + } + details.isSection = true; + if (slugElements.length > 3) { + details.parent = slugElements[slugElements.length - 2]; + details.sectionId = slugElements[slugElements.length - 1]; + } + if (!details.parent) { + details.isRootSection = true; + details.rootSectionId = slugElements[slugElements.length - 1]; + } + details.sectionWeight = sectionWeight; + details.slug = slug; + addItem(details); + const rootId = details.parent || details.rootSectionId; + + await walkDirectories( + [[fileName, slug]], + resultObj, + basePath, + details.title, + details.sectionId, + rootId, + details.sectionWeight + ); + } else if (file.endsWith('.mdx') && !fileName.endsWith(`${sep}_section.mdx`)) { + const fileContent = await readFile(fileName, 'utf-8'); + // Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057 + const { data, content } = frontMatter(fileContent, {}); + + details = data as Details; + details.toc = toc(content, { slugify: slugifyToC }).json; + details.readingTime = Math.ceil(readingTime(content).minutes); + details.excerpt = details.excerpt || markdownToTxt(content).substr(0, 200); + /* istanbul ignore next */ + details.sectionSlug = sectionSlug || slug.replace(/\.mdx$/, ''); + details.sectionWeight = sectionWeight; + details.sectionTitle = sectionTitle; + details.sectionId = sectionId; + details.rootSectionId = rootSectionId; + details.id = fileName.replace(/\\/g, '/'); + details.isIndex = fileName.endsWith(join('index.mdx')); + details.slug = details.isIndex ? sectionSlug : slug.replace(/\.mdx$/, ''); + if (details.slug.includes('/reference/specification/') && !details.title) { + const fileBaseName = basename(details.slug); + const versionDetails = getVersionDetails(details.slug, specWeight--); + + details.title = versionDetails.title; + details.weight = versionDetails.weight; + + if (releaseNotes.includes(details.title)) { + details.releaseNoteLink = `/blog/release-notes-${details.title}`; + } + + details = handleSpecificationVersion(details, fileBaseName); + } + + // To create a list of available ReleaseNotes list, which will be used to add details.releaseNoteLink attribute. + if (file.startsWith('release-notes') && dir[1] === '/blog') { + const { name } = parse(file); + const version = name.split('-').pop(); + + releaseNotes.push(version); + } + + addItem(details); + } + } + } +} +// Builds a list of posts from the specified directories and writes it to a file +/** + * Builds a list of posts from the specified directories and writes it to a file. + * + * @param {string[][]} postDirectories - The directories containing the posts. + * @param {string} basePath - The base path for the directories. + * @param {string} writeFilePath - The path to write the resulting post list. + * @returns {Promise} - A promise that resolves when the post list is built and written. + * @throws {Error} - Throws an error if there is an issue during the build process. + */ +export async function buildPostList( + postDirectories: string[][], + basePath: string, + writeFilePath: string +): Promise { + try { + if (!basePath) { + throw new Error('Error while building post list: basePath is required'); + } + if (!writeFilePath) { + throw new Error('Error while building post list: writeFilePath is required'); + } + if (postDirectories.length === 0) { + throw new Error('Error while building post list: postDirectories array is empty'); + } + const normalizedBasePath = normalize(basePath); + + await walkDirectories(postDirectories, finalResult, normalizedBasePath); + const treePosts = buildNavTree(finalResult.docs.filter((p) => p.slug!.startsWith('/docs/'))); + + finalResult.docsTree = treePosts; + finalResult.docs = addDocButtons(finalResult.docs, treePosts); + await writeFile(writeFilePath, JSON.stringify(finalResult, null, ' ')); + } catch (error) { + throw new Error(`Error while building post list: ${(error as Error).message}`, { cause: error }); + } +} diff --git a/scripts/build-rss.js b/scripts/build-rss.js deleted file mode 100644 index 64da620fdb9b..000000000000 --- a/scripts/build-rss.js +++ /dev/null @@ -1,111 +0,0 @@ -const fs = require('fs').promises -const json2xml = require('jgexml/json2xml') - -function getAllPosts() { - return require('../config/posts.json'); -} - -function clean(s) { - s = s.split('<span>').join('') - s = s.split('&').join('&') - s = s.split(''').join("'") - s = s.split('<').join('<') - s = s.split('>').join('>') - s = s.split('"').join('"') - return s -} - -module.exports = async function rssFeed(type, title, desc, outputPath) { - try { - - let posts = getAllPosts()[`${type}`] - const missingDatePosts = posts.filter(post => !post.date); - posts = posts.filter(post => post.date); - posts.sort((i1, i2) => { - const i1Date = new Date(i1.date); - const i2Date = new Date(i2.date); - if (i1.featured && !i2.featured) return -1; - if (!i1.featured && i2.featured) return 1; - return i2Date - i1Date; - }); - - if (missingDatePosts.length > 0) { - throw new Error(`Missing date in posts: ${missingDatePosts.map(p => p.title || p.slug).join(', ')}`); - } - - const base = 'https://www.asyncapi.com' - const tracking = '?utm_source=rss'; - - const feed = {} - const rss = {} - rss['@version'] = '2.0' - rss["@xmlns:atom"] = 'http://www.w3.org/2005/Atom' - rss.channel = {} - rss.channel.title = title - rss.channel.link = `${base}/${outputPath}` - rss.channel["atom:link"] = {} - rss.channel["atom:link"]["@rel"] = 'self' - rss.channel["atom:link"]["@href"] = rss.channel.link - rss.channel["atom:link"]["@type"] = 'application/rss+xml' - rss.channel.description = desc - rss.channel.language = 'en-gb'; - rss.channel.copyright = 'Made with :love: by the AsyncAPI Initiative.'; - rss.channel.webMaster = 'info@asyncapi.io (AsyncAPI Initiative)' - rss.channel.pubDate = new Date().toUTCString() - rss.channel.generator = 'next.js' - rss.channel.item = [] - - const invalidPosts = posts.filter(post => - !post.title || !post.slug || !post.excerpt || !post.date - ); - - if (invalidPosts.length > 0) { - throw new Error(`Missing required fields in posts: ${invalidPosts.map(p => p.title || p.slug).join(', ')}`); - } - - const mimeTypes = { - '.jpeg': 'image/jpeg', - '.jpg': 'image/jpeg', - '.png': 'image/png', - '.svg': 'image/svg+xml', - '.webp': 'image/webp', - '.gif': 'image/gif', - '.bmp': 'image/bmp', - '.tiff': 'image/tiff', - '.ico': 'image/x-icon' - }; - - for (let post of posts) { - const link = `${base}${post.slug}${tracking}`; - const { title, excerpt, date } = post; - const pubDate = new Date(date).toUTCString(); - const description = clean(excerpt); - const guid = { '@isPermaLink': true, '': link }; - const item = { - title, - description, - link, - category: type, - guid, - pubDate - }; - if (post.cover) { - const fileExtension = post.cover.substring(post.cover.lastIndexOf('.')).toLowerCase(); - const mimeType = mimeTypes[fileExtension] || 'image/jpeg'; - item.enclosure = { - "@url": base + post.cover, - "@length": 15026, // dummy value, anything works - "@type": mimeType - }; - } - rss.channel.item.push(item) - } - - feed.rss = rss - - const xml = json2xml.getXml(feed, '@', '', 2); - await fs.writeFile(`./public/${outputPath}`, xml, 'utf8'); - } catch (err) { - throw new Error(`Failed to generate RSS feed: ${err.message}`); - } -}; diff --git a/scripts/build-rss.ts b/scripts/build-rss.ts new file mode 100644 index 000000000000..111d82679234 --- /dev/null +++ b/scripts/build-rss.ts @@ -0,0 +1,144 @@ +import fs from 'fs/promises'; +import json2xml from 'jgexml/json2xml'; + +import type { BlogPostTypes, RSS, RSSItemType } from '@/types/scripts/build-rss'; + +/** + * Retrieves all blog posts from the configuration file. + * + * @returns {Promise} - A promise that resolves to the list of all blog posts. + */ +async function getAllPosts() { + const posts = ((await import('../config/posts.json')) as any).default; + + return posts; +} + +/** + * Cleans a string by replacing HTML entities with their corresponding characters. + * + * @param {string} s - The string to clean. + * @returns {string} - The cleaned string. + */ +function clean(s: string) { + let cleanS = s; + + cleanS = cleanS.split('<span>').join(''); + cleanS = cleanS.split('&').join('&'); + cleanS = cleanS.split(''').join("'"); + cleanS = cleanS.split('<').join('<'); + cleanS = cleanS.split('>').join('>'); + cleanS = cleanS.split('"').join('"'); + + return cleanS; +} + +/** + * Generates an RSS feed for the specified blog post type. + * + * @param {BlogPostTypes} type - The type of blog posts to include in the RSS feed. + * @param {string} rssTitle - The title of the RSS feed. + * @param {string} desc - The description of the RSS feed. + * @param {string} outputPath - The output path for the generated RSS feed file. + * @throws {Error} - Throws an error if there is an issue during the RSS feed generation. + */ +export async function rssFeed(type: BlogPostTypes, rssTitle: string, desc: string, outputPath: string) { + try { + let posts = (await getAllPosts())[`${type}`] as any[]; + const missingDatePosts = posts.filter((post) => !post.date); + + posts = posts.filter((post) => post.date); + posts.sort((i1, i2) => { + const i1Date = new Date(i1.date); + const i2Date = new Date(i2.date); + + if (i1.featured && !i2.featured) return -1; + if (!i1.featured && i2.featured) return 1; + + return i2Date.getTime() - i1Date.getTime(); + }); + /* istanbul ignore next */ + if (missingDatePosts.length > 0) { + throw new Error(`Missing date in posts: ${missingDatePosts.map((p) => p.title || p.slug).join(', ')}`); + } + + const base = 'https://www.asyncapi.com'; + const tracking = '?utm_source=rss'; + + const feed = {} as { rss: RSS }; + const rss = {} as RSS; + + rss['@version'] = '2.0'; + rss['@xmlns:atom'] = 'http://www.w3.org/2005/Atom'; + rss.channel = {} as RSS['channel']; + rss.channel.title = rssTitle; + rss.channel.link = `${base}/${outputPath}`; + rss.channel['atom:link'] = {} as RSS['channel']['atom:link']; + rss.channel['atom:link']['@rel'] = 'self'; + rss.channel['atom:link']['@href'] = rss.channel.link; + rss.channel['atom:link']['@type'] = 'application/rss+xml'; + rss.channel.description = desc; + rss.channel.language = 'en-gb'; + rss.channel.copyright = 'Made with :love: by the AsyncAPI Initiative.'; + rss.channel.webMaster = 'info@asyncapi.io (AsyncAPI Initiative)'; + rss.channel.pubDate = new Date().toUTCString(); + rss.channel.generator = 'next.js'; + rss.channel.item = []; + + const invalidPosts = posts.filter((post) => !post.title || !post.slug || !post.excerpt || !post.date); + + if (invalidPosts.length > 0) { + throw new Error(`Missing required fields in posts: ${invalidPosts.map((p) => p.title || p.slug).join(', ')}`); + } + const mimeTypes: { + [key: string]: string; + } = { + '.jpeg': 'image/jpeg', + '.jpg': 'image/jpeg', + '.png': 'image/png', + '.svg': 'image/svg+xml', + '.webp': 'image/webp', + '.gif': 'image/gif', + '.bmp': 'image/bmp', + '.tiff': 'image/tiff', + '.ico': 'image/x-icon' + }; + + for (const post of posts) { + const link = `${base}${post.slug}${tracking}`; + const { title, excerpt, date } = post; + const pubDate = new Date(date).toUTCString(); + const description = clean(excerpt); + const guid = { '@isPermaLink': true, '': link }; + const item: RSSItemType = { + title, + description, + link, + category: type, + guid, + pubDate + } as RSSItemType; + + if (post.cover) { + const fileExtension = post.cover.substring(post.cover.lastIndexOf('.')).toLowerCase(); + /* istanbul ignore next */ + const mimeType = mimeTypes[fileExtension] || 'image/jpeg'; + + item.enclosure = { + '@url': base + post.cover, + '@length': 15026, // dummy value, anything works + '@type': mimeType + }; + } + rss.channel.item.push(item); + } + + feed.rss = rss; + + const xml = json2xml.getXml(feed, '@', '', 2); + + await fs.writeFile(`./public/${outputPath}`, xml, 'utf8'); + } catch (err) { + throw new Error(`Failed to generate RSS feed: ${(err as Error).message}`); + } +} diff --git a/scripts/build-tools.js b/scripts/build-tools.js deleted file mode 100644 index c5cce74a7cb1..000000000000 --- a/scripts/build-tools.js +++ /dev/null @@ -1,30 +0,0 @@ -const { getData } = require('./tools/extract-tools-github'); -const { convertTools } = require('./tools/tools-object'); -const { combineTools } = require('./tools/combine-tools'); -const fs = require('fs-extra'); -const { resolve } = require('path'); - -const buildTools = async (automatedToolsPath, manualToolsPath, toolsPath, tagsPath) => { - try { - let githubExtractData = await getData(); - let automatedTools = await convertTools(githubExtractData); - - await fs.writeFile(automatedToolsPath, JSON.stringify(automatedTools, null, ' ')); - - await combineTools(automatedTools, require(manualToolsPath), toolsPath, tagsPath); - } catch (err) { - throw new Error(`An error occurred while building tools: ${err.message}`); - } -}; - -/* istanbul ignore next */ -if (require.main === module) { - const automatedToolsPath = resolve(__dirname, '../config', 'tools-automated.json'); - const manualToolsPath = resolve(__dirname, '../config', 'tools-manual.json'); - const toolsPath = resolve(__dirname, '../config', 'tools.json'); - const tagsPath = resolve(__dirname, '../config', 'all-tags.json'); - - buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath); -} - -module.exports = { buildTools }; diff --git a/scripts/build-tools.ts b/scripts/build-tools.ts new file mode 100644 index 000000000000..1cdc96bd3f9a --- /dev/null +++ b/scripts/build-tools.ts @@ -0,0 +1,46 @@ +import fs from 'fs-extra'; +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; + +import { combineTools } from './tools/combine-tools'; +import { getData } from './tools/extract-tools-github'; +import { convertTools } from './tools/tools-object'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Builds the tools by combining automated and manual tools data, and writes the result to the specified paths. + * + * @param {string} automatedToolsPath - The path to write the automated tools data. + * @param {string} manualToolsPath - The path to read the manual tools data. + * @param {string} toolsPath - The path to write the combined tools data. + * @param {string} tagsPath - The path to write the tags data. + * @throws {Error} - Throws an error if there is an issue during the build process. + */ +async function buildTools(automatedToolsPath: string, manualToolsPath: string, toolsPath: string, tagsPath: string) { + try { + const githubExtractData = await getData(); + const automatedTools = await convertTools(githubExtractData); + + await fs.writeFile(automatedToolsPath, JSON.stringify(automatedTools, null, ' ')); + + const manualTools = JSON.parse(await fs.readFile(manualToolsPath, 'utf-8')); + + await combineTools(automatedTools, manualTools, toolsPath, tagsPath); + } catch (err) { + throw new Error(`An error occurred while building tools: ${(err as Error).message}`); + } +} + +/* istanbul ignore next */ +if (process.argv[1] === fileURLToPath(import.meta.url)) { + const automatedToolsPath = resolve(currentDirPath, '../config', 'tools-automated.json'); + const manualToolsPath = resolve(currentDirPath, '../config', 'tools-manual.json'); + const toolsPath = resolve(currentDirPath, '../config', 'tools.json'); + const tagsPath = resolve(currentDirPath, '../config', 'all-tags.json'); + + buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath); +} + +export { buildTools }; diff --git a/scripts/casestudies/index.js b/scripts/casestudies/index.js deleted file mode 100644 index 77695e06fd38..000000000000 --- a/scripts/casestudies/index.js +++ /dev/null @@ -1,19 +0,0 @@ -const { readdir, writeFile, readFile } = require('fs').promises; -const { convertToJson } = require('../../scripts/utils'); - -module.exports = async function buildCaseStudiesList(dirWithCaseStudy, writeFilePath) { - try { - let files = await readdir(dirWithCaseStudy); - let caseStudiesList = []; - for (let file of files) { - const caseStudyFileName = [dirWithCaseStudy, file].join('/'); - const caseStudyContent = await readFile(caseStudyFileName, 'utf-8'); - const jsonContent = convertToJson(caseStudyContent); - - caseStudiesList.push(jsonContent); - await writeFile(writeFilePath, JSON.stringify(caseStudiesList)) - } - } catch (err) { - throw new Error(err); - } -}; diff --git a/scripts/casestudies/index.ts b/scripts/casestudies/index.ts new file mode 100644 index 000000000000..d1edc4f2e1cb --- /dev/null +++ b/scripts/casestudies/index.ts @@ -0,0 +1,33 @@ +import { readdir, readFile, writeFile } from 'fs/promises'; +import { join } from 'path'; + +import { convertToJson } from '../utils'; + +/** + * Builds a list of case studies from files in a directory and writes it to a specified file. + * @param {string} dirWithCaseStudy - The directory containing case study files. + * @param {string} writeFilePath - The path to write the case studies list to. + * @returns {Promise} - The list of case studies. + */ +export async function buildCaseStudiesList(dirWithCaseStudy: string, writeFilePath: string) { + try { + const files = await readdir(dirWithCaseStudy); + + // Process all files in parallel using Promise.all + const caseStudiesList = await Promise.all( + files.map(async (file) => { + const caseStudyFileName = join(dirWithCaseStudy, file); + const caseStudyContent = await readFile(caseStudyFileName, 'utf-8'); + + return convertToJson(caseStudyContent); + }) + ); + + // Write the complete list once after all files are processed + await writeFile(writeFilePath, JSON.stringify(caseStudiesList)); + + return caseStudiesList; + } catch (err) { + throw new Error(err instanceof Error ? err.message : String(err)); + } +} diff --git a/scripts/compose.js b/scripts/compose.ts similarity index 72% rename from scripts/compose.js rename to scripts/compose.ts index 8c4f0e3a4a36..4a08f8bf75e6 100644 --- a/scripts/compose.js +++ b/scripts/compose.ts @@ -2,25 +2,41 @@ * Script based on https://github.com/timlrx/tailwind-nextjs-starter-blog/blob/master/scripts/compose.js */ -const fs = require('fs') -const inquirer = require('inquirer') -const dedent = require('dedent') -const moment = require('moment') - -const genFrontMatter = (answers) => { - let d = new Date() - const date = [ - d.getFullYear(), - ('0' + (d.getMonth() + 1)).slice(-2), - ('0' + d.getDate()).slice(-2), - ].join('-') - const tagArray = answers.tags.split(',') - tagArray.forEach((tag, index) => (tagArray[index] = tag.trim())) - const tags = "'" + tagArray.join("','") + "'" +import dedent from 'dedent'; +import fs from 'fs'; +import inquirer from 'inquirer'; +import moment from 'moment'; + +import { logger } from './utils/logger'; + +/** + * Type definition for the answers from the compose prompt. + */ +type ComposePromptType = { + title: string; + excerpt: string; + tags: string; + type: string; + canonical: string; +}; + +/** + * Generates the front matter for a blog post based on the provided answers. + * + * @param {ComposePromptType} answers - The answers from the compose prompt. + * @returns {string} - The generated front matter. + */ +function genFrontMatter(answers: ComposePromptType) { + const tagArray = answers.tags.split(','); + + tagArray.forEach((tag: string, index: number) => { + tagArray[index] = tag.trim(); + }); + const tags = `'${tagArray.join("','")}'`; let frontMatter = dedent`--- title: ${answers.title ? answers.title : 'Untitled'} - date: ${moment().format("YYYY-MM-DDTh:mm:ssZ")} + date: ${moment().format('YYYY-MM-DDTh:mm:ssZ')} type: ${answers.type} canonical: ${answers.canonical ? answers.canonical : ''} tags: [${answers.tags ? tags : ''}] @@ -90,11 +106,11 @@ const genFrontMatter = (answers) => {
- ` + `; - frontMatter = frontMatter + '\n---' + frontMatter += '\n---'; - return frontMatter + return frontMatter; } inquirer @@ -102,52 +118,53 @@ inquirer { name: 'title', message: 'Enter post title:', - type: 'input', + type: 'input' }, { name: 'excerpt', message: 'Enter post excerpt:', - type: 'input', + type: 'input' }, { name: 'tags', message: 'Any Tags? Separate them with , or leave empty if no tags.', - type: 'input', + type: 'input' }, { name: 'type', message: 'Enter the post type:', type: 'list', - choices: ['Communication', 'Community', 'Engineering', 'Marketing', 'Strategy', 'Video'], + choices: ['Communication', 'Community', 'Engineering', 'Marketing', 'Strategy', 'Video'] }, { name: 'canonical', message: 'Enter the canonical URL if any:', - type: 'input', - }, + type: 'input' + } ]) - .then((answers) => { + .then((answers: ComposePromptType) => { // Remove special characters and replace space with - const fileName = answers.title .toLowerCase() .replace(/[^a-zA-Z0-9 ]/g, '') .replace(/ /g, '-') - .replace(/-+/g, '-') - const frontMatter = genFrontMatter(answers) - const filePath = `pages/blog/${fileName ? fileName : 'untitled'}.md` + .replace(/-+/g, '-'); + const frontMatter = genFrontMatter(answers); + const filePath = `pages/blog/${fileName || 'untitled'}.md`; + fs.writeFile(filePath, frontMatter, { flag: 'wx' }, (err) => { if (err) { - throw err + throw err; } else { - console.log(`Blog post generated successfully at ${filePath}`) + logger.info(`Blog post generated successfully at ${filePath}`); } - }) + }); }) .catch((error) => { - console.error(error) + logger.error(error); if (error.isTtyError) { - console.log("Prompt couldn't be rendered in the current environment") + logger.error("Prompt couldn't be rendered in the current environment"); } else { - console.log('Something went wrong, sorry!') + logger.error('Something went wrong, sorry!'); } - }) + }); diff --git a/scripts/dashboard/build-dashboard.js b/scripts/dashboard/build-dashboard.js deleted file mode 100644 index 066cf8b84c94..000000000000 --- a/scripts/dashboard/build-dashboard.js +++ /dev/null @@ -1,184 +0,0 @@ -const { writeFile } = require('fs-extra'); -const { resolve } = require('path'); -const { graphql } = require('@octokit/graphql'); -const { Queries } = require('./issue-queries'); - -/** - * Introduces a delay in the execution flow. - * @param {number} ms - The number of milliseconds to pause. - * @returns {Promise} A promise that resolves after the specified delay. - */ -async function pause(ms) { - return new Promise((res) => { - setTimeout(res, ms); - }); -} - -async function getDiscussions(query, pageSize, endCursor = null) { - try { - const result = await graphql(query, { - first: pageSize, - after: endCursor, - headers: { - authorization: `token ${process.env.GITHUB_TOKEN}` - } - }); - - if (result.rateLimit.remaining <= 100) { - console.log( - `[WARNING] GitHub GraphQL rateLimit`, - `cost = ${result.rateLimit.cost}`, - `limit = ${result.rateLimit.limit}`, - `remaining = ${result.rateLimit.remaining}`, - `resetAt = ${result.rateLimit.resetAt}` - ); - } - - await pause(500); - - const { hasNextPage } = result.search.pageInfo; - - if (!hasNextPage) { - return result.search.nodes; - } - return result.search.nodes.concat(await getDiscussions(query, pageSize, result.search.pageInfo.endCursor)); - } catch (e) { - console.error(e); - return Promise.reject(e); - } -} - -async function getDiscussionByID(isPR, id) { - try { - const result = await graphql(isPR ? Queries.pullRequestById : Queries.issueById, { - id, - headers: { - authorization: `token ${process.env.GITHUB_TOKEN}` - } - }); - - return result; - } catch (e) { - console.error(e); - return Promise.reject(e); - } -} - -async function processHotDiscussions(batch) { - return Promise.all( - batch.map(async (discussion) => { - try { - const isPR = discussion.__typename === 'PullRequest'; - if (discussion.comments.pageInfo.hasNextPage) { - const fetchedDiscussion = await getDiscussionByID(isPR, discussion.id); - discussion = fetchedDiscussion.node; - } - - const interactionsCount = - discussion.reactions.totalCount + - discussion.comments.totalCount + - discussion.comments.nodes.reduce((acc, curr) => acc + curr.reactions.totalCount, 0); - - const finalInteractionsCount = isPR - ? interactionsCount + - discussion.reviews.totalCount + - discussion.reviews.nodes.reduce((acc, curr) => acc + curr.comments.totalCount, 0) - : interactionsCount; - - return { - id: discussion.id, - isPR, - isAssigned: !!discussion.assignees.totalCount, - title: discussion.title, - author: discussion.author ? discussion.author.login : '', - resourcePath: discussion.resourcePath, - repo: `asyncapi/${discussion.repository.name}`, - labels: discussion.labels ? discussion.labels.nodes : [], - score: finalInteractionsCount / (monthsSince(discussion.timelineItems.updatedAt) + 2) ** 1.8 - }; - } catch (e) { - console.error(`there were some issues while parsing this item: ${JSON.stringify(discussion)}`); - throw e; - } - }) - ); -} - -async function getHotDiscussions(discussions) { - const result = []; - const batchSize = 5; - - for (let i = 0; i < discussions.length; i += batchSize) { - const batch = discussions.slice(i, i + batchSize); - const batchResults = await processHotDiscussions(batch); - await pause(1000); - result.push(...batchResults); - } - - result.sort((ElemA, ElemB) => ElemB.score - ElemA.score); - const filteredResult = result.filter((issue) => issue.author !== 'asyncapi-bot'); - return filteredResult.slice(0, 12); -} - -async function writeToFile(content, writePath) { - try { - await writeFile(writePath, JSON.stringify(content, null, ' ')); - } catch (error) { - console.error('Failed to write dashboard data:', { - error: error.message, - writePath - }); - throw error; - } -} - -async function mapGoodFirstIssues(issues) { - return issues.map((issue) => ({ - id: issue.id, - title: issue.title, - isAssigned: !!issue.assignees.totalCount, - resourcePath: issue.resourcePath, - repo: `asyncapi/${issue.repository.name}`, - author: issue.author.login, - area: getLabel(issue, 'area/') || 'Unknown', - labels: issue.labels.nodes.filter( - (label) => !label.name.startsWith('area/') && !label.name.startsWith('good first issue') - ) - })); -} - -function getLabel(issue, filter) { - const result = issue.labels.nodes.find((label) => label.name.startsWith(filter)); - return result?.name.split('/')[1]; -} - -function monthsSince(date) { - const seconds = Math.floor((new Date() - new Date(date)) / 1000); - // 2592000 = number of seconds in a month = 30 * 24 * 60 * 60 - const months = seconds / 2592000; - return Math.floor(months); -} - -async function start(writePath) { - try { - const issues = await getDiscussions(Queries.hotDiscussionsIssues, 20); - const PRs = await getDiscussions(Queries.hotDiscussionsPullRequests, 20); - const rawGoodFirstIssues = await getDiscussions(Queries.goodFirstIssues, 20); - const discussions = issues.concat(PRs); - const [hotDiscussions, goodFirstIssues] = await Promise.all([ - getHotDiscussions(discussions), - mapGoodFirstIssues(rawGoodFirstIssues) - ]); - return await writeToFile({ hotDiscussions, goodFirstIssues }, writePath); - } catch (e) { - console.log('There were some issues parsing data from github.'); - console.log(e); - } -} - -/* istanbul ignore next */ -if (require.main === module) { - start(resolve(__dirname, '..', '..', 'dashboard.json')); -} - -module.exports = { getLabel, monthsSince, mapGoodFirstIssues, getHotDiscussions, getDiscussionByID, getDiscussions, writeToFile, start, processHotDiscussions, pause }; diff --git a/scripts/dashboard/build-dashboard.ts b/scripts/dashboard/build-dashboard.ts new file mode 100644 index 000000000000..b6ecbfd914d9 --- /dev/null +++ b/scripts/dashboard/build-dashboard.ts @@ -0,0 +1,280 @@ +import { graphql } from '@octokit/graphql'; +import { writeFile } from 'fs/promises'; +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; + +import type { + Discussion, + GoodFirstIssues, + HotDiscussionsIssuesNode, + HotDiscussionsPullRequestsNode, + IssueById, + MappedIssue, + ProcessedDiscussion, + PullRequestById +} from '@/types/scripts/dashboard'; + +import { pause } from '../utils'; +import { logger } from '../utils/logger'; +import { Queries } from './issue-queries'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Calculates the number of months since a given date. + * @param {string} date - The date to calculate from. + * @returns {number} - The number of months since the date. + */ +function monthsSince(date: string) { + const seconds = Math.floor((new Date().valueOf() - new Date(date).valueOf()) / 1000); + // 2592000 = number of seconds in a month = 30 * 24 * 60 * 60 + const months = seconds / 2592000; + + return Math.floor(months); +} + +/** + * Retrieves a label from an issue based on a filter. + * @param {GoodFirstIssues} issue - The issue to retrieve the label from. + * @param {string} filter - The filter to apply to the label. + * @returns {string | undefined} - The label if found, otherwise undefined. + */ +function getLabel(issue: GoodFirstIssues, filter: string) { + const result = issue.labels.nodes.find((label) => label.name.startsWith(filter)); + + return result?.name.split('/')[1]; +} + +/** + * Fetches discussions from GitHub GraphQL API. + * @param {string} query - The GraphQL query to execute. + * @param {number} pageSize - The number of results per page. + * @param {null | string} [endCursor=null] - The cursor for pagination. + * @returns {Promise} - The fetched discussions. + */ +async function getDiscussions( + query: string, + pageSize: number, + endCursor: null | string = null +): Promise { + try { + const result: Discussion = await graphql(query, { + first: pageSize, + after: endCursor, + headers: { + authorization: `token ${process.env.GITHUB_TOKEN}` + } + }); + + if (result.rateLimit.remaining <= 100) { + logger.warn( + 'GitHub GraphQL rateLimit \n' + + `cost = ${result.rateLimit.cost}\n` + + `limit = ${result.rateLimit.limit}\n` + + `remaining = ${result.rateLimit.remaining}\n` + + `resetAt = ${result.rateLimit.resetAt}` + ); + } + + await pause(500); + + const { hasNextPage } = result.search.pageInfo; + + if (!hasNextPage) { + return result.search.nodes; + } + + return result.search.nodes.concat(await getDiscussions(query, pageSize, result.search.pageInfo.endCursor)); + } catch (e) { + logger.error(e); + + return Promise.reject(e); + } +} + +/** + * Fetches a discussion by its ID. + * @param {boolean} isPR - Whether the discussion is a pull request. + * @param {string} id - The ID of the discussion. + * @returns {Promise} - The fetched discussion. + */ +async function getDiscussionByID(isPR: boolean, id: string): Promise { + try { + const result: PullRequestById | IssueById = await graphql(isPR ? Queries.pullRequestById : Queries.issueById, { + id, + headers: { + authorization: `token ${process.env.GITHUB_TOKEN}` + } + }); + + return result; + } catch (e) { + logger.error(e); + + return Promise.reject(e); + } +} + +/** + * Processes a batch of hot discussions. + * @param {HotDiscussionsIssuesNode[]} batch - The batch of discussions to process. + * @returns {Promise} - The processed discussions. + */ +async function processHotDiscussions(batch: HotDiscussionsIssuesNode[]) { + return Promise.all( + batch.map(async (discussion) => { + try { + // eslint-disable-next-line no-underscore-dangle + const isPR = discussion.__typename === 'PullRequest'; + + if (discussion.comments.pageInfo?.hasNextPage) { + const fetchedDiscussion = await getDiscussionByID(isPR, discussion.id); + + // eslint-disable-next-line no-param-reassign + discussion = fetchedDiscussion.node; + } + + const interactionsCount = + discussion.reactions.totalCount + + discussion.comments.totalCount + + discussion.comments.nodes.reduce((acc, curr) => acc + curr.reactions.totalCount, 0); + + const finalInteractionsCount = isPR + ? interactionsCount + + discussion.reviews.totalCount + + discussion.reviews.nodes!.reduce((acc, curr) => acc + curr.comments.totalCount, 0) + : interactionsCount; + + /* istanbul ignore next */ + + return { + id: discussion.id, + isPR, + isAssigned: !!discussion.assignees.totalCount, + title: discussion.title, + author: discussion.author ? discussion.author.login : '', + resourcePath: discussion.resourcePath, + repo: `asyncapi/${discussion.repository.name}`, + labels: discussion.labels ? discussion.labels.nodes : [], + score: finalInteractionsCount / (monthsSince(discussion.timelineItems.updatedAt) + 2) ** 1.8 + }; + } catch (e) { + logger.error(`there were some issues while parsing this item: ${JSON.stringify(discussion)}`); + throw e; + } + }) + ); +} + +/** + * Retrieves and processes hot discussions. + * @param {HotDiscussionsIssuesNode[]} discussions - The discussions to process. + * @returns {Promise} - The processed hot discussions. + */ +async function getHotDiscussions(discussions: HotDiscussionsIssuesNode[]) { + const result: ProcessedDiscussion[] = []; + const batchSize = 5; + + for (let i = 0; i < discussions.length; i += batchSize) { + const batch = discussions.slice(i, i + batchSize); + // eslint-disable-next-line no-await-in-loop + const batchResults = await processHotDiscussions(batch); + + // eslint-disable-next-line no-await-in-loop + await pause(1000); + result.push(...batchResults); + } + + result.sort((ElemA, ElemB) => ElemB.score - ElemA.score); + const filteredResult = result.filter((issue) => issue.author !== 'asyncapi-bot'); + + return filteredResult.slice(0, 12); +} + +/** + * Writes content to a file. + * @param {object} content - The content to write. + * @param {ProcessedDiscussion[]} content.hotDiscussions - The hot discussions to write. + * @param {MappedIssue[]} content.goodFirstIssues - The good first issues to write. + * @param {string} writePath - The path to write the file to. + * @returns {Promise} + */ +async function writeToFile( + content: { + hotDiscussions: ProcessedDiscussion[]; + goodFirstIssues: MappedIssue[]; + }, + writePath: string +) { + try { + await writeFile(writePath, JSON.stringify(content, null, ' ')); + } catch (error) { + logger.error('Failed to write dashboard data:', { + error: (error as Error).message, + writePath + }); + throw error; + } +} + +/** + * Maps good first issues to a simplified format. + * @param {GoodFirstIssues[]} issues - The issues to map. + * @returns {Promise} - The mapped issues. + */ +async function mapGoodFirstIssues(issues: GoodFirstIssues[]) { + /* istanbul ignore next */ + return issues.map((issue) => ({ + id: issue.id, + title: issue.title, + isAssigned: !!issue.assignees.totalCount, + resourcePath: issue.resourcePath, + repo: `asyncapi/${issue.repository.name}`, + author: issue.author.login, + area: getLabel(issue, 'area/') || 'Unknown', + labels: issue.labels.nodes.filter( + (label) => !label.name.startsWith('area/') && !label.name.startsWith('good first issue') + ) + })); +} + +/** + * Starts the process of fetching and writing dashboard data. + * @param {string} writePath - The path to write the dashboard data to. + * @returns {Promise} + */ +async function start(writePath: string): Promise { + try { + const issues = (await getDiscussions(Queries.hotDiscussionsIssues, 20)) as HotDiscussionsIssuesNode[]; + const PRs = (await getDiscussions(Queries.hotDiscussionsPullRequests, 20)) as HotDiscussionsPullRequestsNode[]; + const rawGoodFirstIssues: GoodFirstIssues[] = await getDiscussions(Queries.goodFirstIssues, 20); + const discussions = issues.concat(PRs); + const [hotDiscussions, goodFirstIssues] = await Promise.all([ + getHotDiscussions(discussions), + mapGoodFirstIssues(rawGoodFirstIssues) + ]); + + await writeToFile({ hotDiscussions, goodFirstIssues }, writePath); + } catch (e) { + logger.error('There were some issues parsing data from github.'); + logger.error(e); + } +} + +/* istanbul ignore next */ +if (process.argv[1] === fileURLToPath(import.meta.url)) { + start(resolve(currentDirPath, '..', '..', 'dashboard.json')); +} + +export { + getDiscussionByID, + getDiscussions, + getHotDiscussions, + getLabel, + mapGoodFirstIssues, + monthsSince, + processHotDiscussions, + start, + writeToFile +}; diff --git a/scripts/dashboard/issue-queries.js b/scripts/dashboard/issue-queries.ts similarity index 99% rename from scripts/dashboard/issue-queries.js rename to scripts/dashboard/issue-queries.ts index 629214a9ea90..7eafae9fbd3a 100644 --- a/scripts/dashboard/issue-queries.js +++ b/scripts/dashboard/issue-queries.ts @@ -1,4 +1,4 @@ -exports.Queries = Object.freeze({ +export const Queries = Object.freeze({ pullRequestById: ` query IssueByID($id: ID!) { node(id: $id) { @@ -274,5 +274,5 @@ query($first: Int!, $after: String) { resetAt } } -`, +` }); diff --git a/scripts/finance/index.js b/scripts/finance/index.js deleted file mode 100644 index 3f4a5edcfb6e..000000000000 --- a/scripts/finance/index.js +++ /dev/null @@ -1,25 +0,0 @@ -const { - promises: { mkdir } -} = require('fs'); -const { resolve } = require('path'); -const writeJSON = require('../utils/readAndWriteJson.js'); - -module.exports = async function buildFinanceInfoList({ currentDir, configDir, financeDir, year, jsonDataDir }) { - try { - const expensesPath = resolve(currentDir, configDir, financeDir, year, 'Expenses.yml'); - const expensesLinkPath = resolve(currentDir, configDir, financeDir, year, 'ExpensesLink.yml'); - - // Ensure the directory exists before writing the files - const jsonDirectory = resolve(currentDir, configDir, financeDir, jsonDataDir); - await mkdir(jsonDirectory, { recursive: true }); - - // Write Expenses and ExpensesLink to JSON files - const expensesJsonPath = resolve(jsonDirectory, 'Expenses.json'); - await writeJSON(expensesPath, expensesJsonPath); - - const expensesLinkJsonPath = resolve(jsonDirectory, 'ExpensesLink.json'); - await writeJSON(expensesLinkPath, expensesLinkJsonPath); - } catch (err) { - throw new Error(err); - } -}; \ No newline at end of file diff --git a/scripts/finance/index.ts b/scripts/finance/index.ts new file mode 100644 index 000000000000..0e9081898730 --- /dev/null +++ b/scripts/finance/index.ts @@ -0,0 +1,51 @@ +import { mkdir } from 'fs/promises'; +import { resolve } from 'path'; + +import { writeJSON } from '../utils/readAndWriteJson'; + +/** + * Interface for the properties required to build the finance info list. + */ +interface BuildFinanceInfoListProps { + currentDir: string; + configDir: string; + financeDir: string; + year: string; + jsonDataDir: string; +} + +/** + * Builds the finance info list by reading YAML files and writing them as JSON files. + * + * @param {BuildFinanceInfoListProps} props - The properties required to build the finance info list. + * @returns {Promise} A promise that resolves when the finance info list has been built. + * @throws {Error} Throws an error if there is an issue building the finance info list. + */ +export async function buildFinanceInfoList({ + currentDir, + configDir, + financeDir, + year, + jsonDataDir +}: BuildFinanceInfoListProps): Promise { + try { + const expensesPath = resolve(currentDir, configDir, financeDir, year, 'Expenses.yml'); + const expensesLinkPath = resolve(currentDir, configDir, financeDir, year, 'ExpensesLink.yml'); + + // Ensure the directory exists before writing the files + const jsonDirectory = resolve(currentDir, configDir, financeDir, jsonDataDir); + + await mkdir(jsonDirectory, { recursive: true }); + + // Write Expenses and ExpensesLink to JSON files + const expensesJsonPath = resolve(jsonDirectory, 'Expenses.json'); + + await writeJSON(expensesPath, expensesJsonPath); + + const expensesLinkJsonPath = resolve(jsonDirectory, 'ExpensesLink.json'); + + await writeJSON(expensesLinkPath, expensesLinkJsonPath); + } catch (err) { + throw new Error(`Error in buildFinanceInfoList: ${err}`); + } +} diff --git a/scripts/index.js b/scripts/index.ts similarity index 51% rename from scripts/index.js rename to scripts/index.ts index 0fbbe3940851..2abe60ae1ddb 100644 --- a/scripts/index.js +++ b/scripts/index.ts @@ -1,33 +1,33 @@ -const { resolve } = require('path'); -const fs = require('fs'); -const rssFeed = require('./build-rss'); -const { buildPostList } = require('./build-post-list'); -const buildCaseStudiesList = require('./casestudies'); -const buildAdoptersList = require('./adopters'); -const buildFinanceInfoList = require('./finance'); - +import fs from 'fs'; +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; + +import { buildAdoptersList } from './adopters/index'; +import { buildPostList } from './build-post-list'; +import { rssFeed } from './build-rss'; +import { buildCaseStudiesList } from './casestudies/index'; +import { buildFinanceInfoList } from './finance/index'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Main function to start the build process for posts, RSS feed, case studies, adopters, and finance info. + * + * @throws {Error} - Throws an error if no finance data is found in the finance directory. + */ async function start() { - const postDirectories = [ ['pages/blog', '/blog'], ['pages/docs', '/docs'], ['pages/about', '/about'] ]; const basePath = 'pages'; - const writeFilePath = resolve(__dirname, '../config', 'posts.json'); + const writeFilePath = resolve(currentDirPath, '../config', 'posts.json'); await buildPostList(postDirectories, basePath, writeFilePath); - - rssFeed( - 'blog', - 'AsyncAPI Initiative Blog RSS Feed', - 'AsyncAPI Initiative Blog', - 'rss.xml' - ); - await buildCaseStudiesList( - 'config/casestudies', - resolve(__dirname, '../config', 'case-studies.json') - ); + await rssFeed('blog', 'AsyncAPI Initiative Blog RSS Feed', 'AsyncAPI Initiative Blog', 'rss.xml'); + await buildCaseStudiesList('config/casestudies', resolve(currentDirPath, '../config', 'case-studies.json')); await buildAdoptersList(); const financeDir = resolve('.', 'config', 'finance'); @@ -58,6 +58,6 @@ async function start() { }); } -module.exports = start; +export { start }; start(); diff --git a/scripts/markdown/check-edit-links.js b/scripts/markdown/check-edit-links.ts similarity index 62% rename from scripts/markdown/check-edit-links.js rename to scripts/markdown/check-edit-links.ts index 8d22a23713ba..a8d719d37092 100644 --- a/scripts/markdown/check-edit-links.js +++ b/scripts/markdown/check-edit-links.ts @@ -1,8 +1,11 @@ -const fs = require('fs').promises; -const path = require('path'); -const fetch = require('node-fetch-2'); -const editUrls = require('../../config/edit-page-config.json'); -const { pause } = require('../dashboard/build-dashboard'); +import fs from 'fs/promises'; +import fetch from 'node-fetch-2'; +import path, { dirname } from 'path'; +import { fileURLToPath } from 'url'; + +import editUrls from '../../config/edit-page-config.json'; +import { pause } from '../utils'; +import { logger } from '../utils/logger'; const ignoreFiles = [ 'reference/specification/v2.x.md', @@ -10,13 +13,20 @@ const ignoreFiles = [ 'reference/specification/v3.0.0.md' ]; +interface PathObject { + filePath: string; + urlPath: string; + editLink: string | null; +} + /** * Process a batch of URLs to check for 404s - * @param {object[]} batch - Array of path objects to check + * @param {PathObject[]} batch - Array of path objects to check * @returns {Promise} Array of URLs that returned 404 */ -async function processBatch(batch) { +async function processBatch(batch: PathObject[]): Promise<(PathObject | null)[]> { const TIMEOUT_MS = Number(process.env.DOCS_LINK_CHECK_TIMEOUT) || 5000; + return Promise.all( batch.map(async ({ filePath, urlPath, editLink }) => { try { @@ -28,13 +38,15 @@ async function processBatch(batch) { method: 'HEAD', signal: controller.signal }); + clearTimeout(timeout); if (response.status === 404) { return { filePath, urlPath, editLink }; } + return null; } catch (error) { - return Promise.reject(new Error(`Error checking ${editLink}: ${error.message}`)); + return Promise.reject(new Error(`Error checking ${editLink}: ${error}`)); } }) ); @@ -42,29 +54,34 @@ async function processBatch(batch) { /** * Check all URLs in batches - * @param {object[]} paths - Array of all path objects to check + * @param {PathObject[]} paths - Array of all path objects to check * @returns {Promise} Array of URLs that returned 404 */ -async function checkUrls(paths) { - const result = []; +async function checkUrls(paths: PathObject[]): Promise { + const result: PathObject[] = []; const batchSize = Number(process.env.DOCS_LINK_CHECK_BATCH_SIZE) || 5; - const batches = []; + const batches: PathObject[][] = []; + for (let i = 0; i < paths.length; i += batchSize) { const batch = paths.slice(i, i + batchSize); + batches.push(batch); } - console.log(`Processing ${batches.length} batches concurrently...`); + logger.info(`Processing ${batches.length} batches concurrently...`); const batchResultsArray = await Promise.all( batches.map(async (batch) => { const batchResults = await processBatch(batch); + await pause(1000); - return batchResults.filter((url) => url !== null); + + return batchResults.filter((url) => url !== null) as PathObject[]; }) ); result.push(...batchResultsArray.flat()); + return result; } @@ -75,19 +92,24 @@ async function checkUrls(paths) { * @param {object[]} editOptions - Array of edit link options * @returns {string|null} The generated edit link or null if no match */ -function determineEditLink(urlPath, filePath, editOptions) { +function determineEditLink( + urlPath: string, + filePath: string, + editOptions: { value: string; href: string }[] +): string | null { // Remove leading 'docs/' if present for matching const pathForMatching = urlPath.startsWith('docs/') ? urlPath.slice(5) : urlPath; const target = editOptions.find((edit) => pathForMatching.includes(edit.value)); // Handle the empty value case (fallback) - if (target.value === '') { + if (target?.value === '') { return `${target.href}/docs/${urlPath}.md`; } // For other cases with specific targets - return `${target.href}/${path.basename(filePath)}`; + /* istanbul ignore next */ + return target ? `${target.href}/${path.basename(filePath)}` : null; } /** @@ -95,10 +117,15 @@ function determineEditLink(urlPath, filePath, editOptions) { * @param {string} folderPath - The path to the folder to process * @param {object[]} editOptions - Array of edit link options * @param {string} [relativePath=''] - The relative path for URL generation - * @param {object[]} [result=[]] - Accumulator for results - * @returns {Promise} Array of objects containing file paths and edit links + * @param {PathObject[]} [result=[]] - Accumulator for results + * @returns {Promise} Array of objects containing file paths and edit links */ -async function generatePaths(folderPath, editOptions, relativePath = '', result = []) { +async function generatePaths( + folderPath: string, + editOptions: { value: string; href: string }[], + relativePath = '', + result: PathObject[] = [] +): Promise { try { const files = await fs.readdir(folderPath); @@ -114,10 +141,13 @@ async function generatePaths(folderPath, editOptions, relativePath = '', result const stats = await fs.stat(filePath); + /* istanbul ignore else */ + if (stats.isDirectory()) { await generatePaths(filePath, editOptions, relativeFilePath, result); } else if (stats.isFile() && file.endsWith('.md')) { const urlPath = relativeFilePath.split(path.sep).join('/').replace('.md', ''); + result.push({ filePath, urlPath, @@ -129,34 +159,41 @@ async function generatePaths(folderPath, editOptions, relativePath = '', result return result; } catch (err) { - throw new Error(`Error processing directory ${folderPath}: ${err.message}`); + throw new Error(`Error processing directory ${folderPath}: ${err}`); } } +/** + * Main function to check edit links in markdown files. + * It generates paths for markdown files, checks the URLs, and logs invalid URLs. + */ async function main() { const editOptions = editUrls; try { - const docsFolderPath = path.resolve(__dirname, '../../markdown/docs'); + const currentFilePath = fileURLToPath(import.meta.url); + const currentDirPath = dirname(currentFilePath); + const docsFolderPath = path.resolve(currentDirPath, '../../markdown/docs'); const paths = await generatePaths(docsFolderPath, editOptions); - console.log('Starting URL checks...'); + + logger.info('Starting URL checks...'); const invalidUrls = await checkUrls(paths); if (invalidUrls.length > 0) { - console.log('\nURLs returning 404:\n'); - invalidUrls.forEach((url) => console.log(`- ${url.editLink} generated from ${url.filePath}\n`)); - console.log(`\nTotal invalid URLs found: ${invalidUrls.length}`); + logger.info('\nURLs returning 404:\n'); + invalidUrls.forEach((url) => logger.info(`- ${url.editLink} generated from ${url.filePath}\n`)); + logger.info(`\nTotal invalid URLs found: ${invalidUrls.length}`); } else { - console.log('All URLs are valid.'); + logger.info('All URLs are valid.'); } } catch (error) { - throw new Error(`Failed to check edit links: ${error.message}`); + throw new Error(`Failed to check edit links: ${error}`); } } /* istanbul ignore next */ -if (require.main === module) { +if (process.argv[1] === fileURLToPath(import.meta.url)) { main(); } -module.exports = { generatePaths, processBatch, checkUrls, determineEditLink, main }; +export { checkUrls, determineEditLink, generatePaths, main, processBatch }; diff --git a/scripts/markdown/check-markdown.js b/scripts/markdown/check-markdown.js deleted file mode 100644 index cd3bd7ddd1c5..000000000000 --- a/scripts/markdown/check-markdown.js +++ /dev/null @@ -1,158 +0,0 @@ -const fs = require('fs').promises; -const matter = require('gray-matter'); -const path = require('path'); - -/** - * Checks if a given string is a valid URL. - * @param {string} str - The string to validate as a URL. - * @returns {boolean} True if the string is a valid URL, false otherwise. - */ -function isValidURL(str) { - try { - new URL(str); - return true; - } catch (err) { - return false; - } -} - -/** - * Validates the frontmatter of a blog post. - * @param {object} frontmatter - The frontmatter object to validate. - * @param {string} filePath - The path to the file being validated. - * @returns {string[]|null} An array of validation error messages, or null if no errors. - */ -function validateBlogs(frontmatter) { - const requiredAttributes = ['title', 'date', 'type', 'tags', 'cover', 'authors']; - const errors = []; - - // Check for required attributes - requiredAttributes.forEach(attr => { - if (!frontmatter.hasOwnProperty(attr)) { - errors.push(`${attr} is missing`); - } - }); - - // Validate date format - if (frontmatter.date && Number.isNaN(Date.parse(frontmatter.date))) { - errors.push(`Invalid date format: ${frontmatter.date}`); - } - - // Validate tags format (must be an array) - if (frontmatter.tags && !Array.isArray(frontmatter.tags)) { - errors.push(`Tags should be an array`); - } - - // Validate cover is a string - if (frontmatter.cover && typeof frontmatter.cover !== 'string') { - errors.push(`Cover must be a string`); - } - - // Validate authors (must be an array with valid attributes) - if (frontmatter.authors) { - if (!Array.isArray(frontmatter.authors)) { - errors.push('Authors should be an array'); - } else { - frontmatter.authors.forEach((author, index) => { - if (!author.name) { - errors.push(`Author at index ${index} is missing a name`); - } - if (author.link && !isValidURL(author.link)) { - errors.push(`Invalid URL for author at index ${index}: ${author.link}`); - } - if (!author.photo) { - errors.push(`Author at index ${index} is missing a photo`); - } - }); - } - } - - return errors.length ? errors : null; -} - -/** - * Validates the frontmatter of a documentation file. - * @param {object} frontmatter - The frontmatter object to validate. - * @param {string} filePath - The path to the file being validated. - * @returns {string[]|null} An array of validation error messages, or null if no errors. - */ -function validateDocs(frontmatter) { - const errors = []; - - // Check if title exists and is a string - if (!frontmatter.title || typeof frontmatter.title !== 'string') { - errors.push('Title is missing or not a string'); - } - - // Check if weight exists and is a number - if (frontmatter.weight === undefined || typeof frontmatter.weight !== 'number') { - errors.push('Weight is missing or not a number'); - } - - return errors.length ? errors : null; -} - -/** - * Recursively checks markdown files in a folder and validates their frontmatter. - * @param {string} folderPath - The path to the folder to check. - * @param {Function} validateFunction - The function used to validate the frontmatter. - * @param {string} [relativePath=''] - The relative path of the folder for logging purposes. - */ -async function checkMarkdownFiles(folderPath, validateFunction, relativePath = '') { - try { - const files = await fs.readdir(folderPath); - const filePromises = files.map(async (file) => { - const filePath = path.join(folderPath, file); - const relativeFilePath = path.join(relativePath, file); - - // Skip the folder 'docs/reference/specification' - if (relativeFilePath.includes('reference/specification')) { - return; - } - - const stats = await fs.stat(filePath); - - // Recurse if directory, otherwise validate markdown file - if (stats.isDirectory()) { - await checkMarkdownFiles(filePath, validateFunction, relativeFilePath); - } else if (path.extname(file) === '.md') { - const fileContent = await fs.readFile(filePath, 'utf-8'); - const { data: frontmatter } = matter(fileContent); - - const errors = validateFunction(frontmatter); - if (errors) { - console.log(`Errors in file ${relativeFilePath}:`); - errors.forEach(error => console.log(` - ${error}`)); - process.exitCode = 1; - } - } - }); - - await Promise.all(filePromises); - } catch (err) { - console.error(`Error in directory ${folderPath}:`, err); - throw err; - } -} - -const docsFolderPath = path.resolve(__dirname, '../../markdown/docs'); -const blogsFolderPath = path.resolve(__dirname, '../../markdown/blog'); - -async function main() { - try { - await Promise.all([ - checkMarkdownFiles(docsFolderPath, validateDocs), - checkMarkdownFiles(blogsFolderPath, validateBlogs) - ]); - } catch (error) { - console.error('Failed to validate markdown files:', error); - process.exit(1); - } -} - -/* istanbul ignore next */ -if (require.main === module) { - main(); -} - -module.exports = { validateBlogs, validateDocs, checkMarkdownFiles, main, isValidURL }; diff --git a/scripts/markdown/check-markdown.ts b/scripts/markdown/check-markdown.ts new file mode 100644 index 000000000000..1ff34faf88c9 --- /dev/null +++ b/scripts/markdown/check-markdown.ts @@ -0,0 +1,199 @@ +import fs from 'fs/promises'; +import matter from 'gray-matter'; +import path, { dirname } from 'path'; +import { fileURLToPath } from 'url'; + +import { logger } from '../utils/logger'; + +const currentFilePath = fileURLToPath(import.meta.url); +const currentDirPath = dirname(currentFilePath); + +/** + * Checks if a string is a valid URL. + * @param {string} str - The string to check. + * @returns {boolean} - True if the string is a valid URL, false otherwise. + */ +function isValidURL(str: string) { + try { + // eslint-disable-next-line no-new + new URL(str); + + return true; + } catch (err) { + return false; + } +} + +/** + * Interface representing the frontmatter of a markdown file. + */ +interface FrontMatter { + title: string; + date: string; + type: string; + tags: string[]; + cover: string; + weight?: number; + authors: { name: string; link: string; photo: string }[]; +} + +/** + * Validates the frontmatter of blog markdown files. + * @param {FrontMatter} frontmatter - The frontmatter to validate. + * @returns {string[] | null} - An array of error messages, or null if valid. + */ +function validateBlogs(frontmatter: FrontMatter) { + const requiredAttributes = ['title', 'date', 'type', 'tags', 'cover', 'authors']; + const errors = []; + + if (!frontmatter) { + errors.push('Frontmatter is missing'); + + return errors; + } + + // Check for required attributes + requiredAttributes.forEach((attr) => { + if (!Object.prototype.hasOwnProperty.call(frontmatter, attr)) { + errors.push(`${attr} is missing`); + } + }); + + // Validate date format + if (frontmatter.date && Number.isNaN(Date.parse(frontmatter.date))) { + errors.push(`Invalid date format: ${frontmatter.date}`); + } + + // Validate tags format (must be an array) + if (frontmatter.tags && !Array.isArray(frontmatter.tags)) { + errors.push('Tags should be an array'); + } + + // Validate cover is a string + if (frontmatter.cover && typeof frontmatter.cover !== 'string') { + errors.push('Cover must be a string'); + } + + // Validate authors (must be an array with valid attributes) + + if (frontmatter.authors) { + if (Array.isArray(frontmatter.authors)) { + frontmatter.authors.forEach((author, index) => { + if (!author.name) { + errors.push(`Author at index ${index} is missing a name`); + } + if (author.link && !isValidURL(author.link)) { + errors.push(`Invalid URL for author at index ${index}: ${author.link}`); + } + if (!author.photo) { + errors.push(`Author at index ${index} is missing a photo`); + } + }); + } else { + errors.push('Authors should be an array'); + } + } + + /* istanbul ignore next */ + return errors.length ? errors : null; +} + +/** + * Validates the frontmatter of documentation markdown files. + * @param {FrontMatter} frontmatter - The frontmatter to validate. + * @returns {string[] | null} - An array of error messages, or null if valid. + */ +function validateDocs(frontmatter: FrontMatter) { + const errors = []; + + // Check if title exists and is a string + /* istanbul ignore else */ + if (!frontmatter.title || typeof frontmatter.title !== 'string') { + errors.push('Title is missing or not a string'); + } + + // Check if weight exists and is a number + /* istanbul ignore else */ + if (frontmatter.weight === undefined || typeof frontmatter.weight !== 'number') { + errors.push('Weight is missing or not a number'); + } + + /* istanbul ignore next */ + return errors.length ? errors : null; +} + +/** + * Recursively checks markdown files in a folder and validates their frontmatter. + * @param {string} folderPath - The path to the folder to check. + * @param {Function} validateFunction - The function used to validate the frontmatter. + * @param {string} [relativePath=''] - The relative path of the folder for logging purposes. + */ +async function checkMarkdownFiles( + folderPath: string, + validateFunction: (frontmatter: FrontMatter) => string[] | null, + relativePath = '' +) { + try { + const files = await fs.readdir(folderPath); + const filePromises = files.map(async (file) => { + const filePath = path.join(folderPath, file); + const relativeFilePath = path.join(relativePath, file); + + // Skip the folder 'docs/reference/specification' + + /* istanbul ignore next */ + if (relativeFilePath.includes('reference/specification')) { + return; + } + + const stats = await fs.stat(filePath); + + // Recurse if directory, otherwise validate markdown file + /* istanbul ignore else */ + if (stats.isDirectory()) { + await checkMarkdownFiles(filePath, validateFunction, relativeFilePath); + } else if (path.extname(file) === '.md') { + const fileContent = await fs.readFile(filePath, 'utf-8'); + const { data: frontmatter } = matter(fileContent); + + const errors = validateFunction(frontmatter as FrontMatter); + + if (errors) { + logger.warn(`Errors in file ${relativeFilePath}:`); + errors.forEach((error) => logger.warn(` - ${error}`)); + process.exitCode = 1; + } + } + }); + + await Promise.all(filePromises); + } catch (err) { + logger.error(`Error in directory ${folderPath}:`, err); + throw err; + } +} + +const docsFolderPath = path.resolve(currentDirPath, '../../markdown/docs'); +const blogsFolderPath = path.resolve(currentDirPath, '../../markdown/blog'); + +/** + * Main function to validate markdown files in the docs and blog folders. + */ +async function main() { + try { + await Promise.all([ + checkMarkdownFiles(docsFolderPath, validateDocs), + checkMarkdownFiles(blogsFolderPath, validateBlogs) + ]); + } catch (error) { + logger.error('Failed to validate markdown files:', error); + process.exit(1); + } +} + +/* istanbul ignore next */ +if (process.argv[1] === fileURLToPath(import.meta.url)) { + main(); +} + +export { checkMarkdownFiles, isValidURL, main, validateBlogs, validateDocs }; diff --git a/scripts/tools/categorylist.js b/scripts/tools/categorylist.js deleted file mode 100644 index 11fcc3790e9e..000000000000 --- a/scripts/tools/categorylist.js +++ /dev/null @@ -1,100 +0,0 @@ -// Various categories to define the category in which a tool has to be listed -const categoryList = [ - { - name: "APIs", - tag: "api", - description: "The following is a list of APIs that expose functionality related to AsyncAPI." - }, - { - name: "Code-first tools", - tag: "code-first", - description: "The following is a list of tools that generate AsyncAPI documents from your code." - }, - { - name: "Code Generators", - tag: "code-generator", - description: "The following is a list of tools that generate code from an AsyncAPI document; not the other way around." - }, - { - name: "Converters", - tag: "converter", - description: "The following is a list of tools that do not yet belong to any specific category but are also useful for the community." - }, - { - name: "Directories", - tag: "directory", - description: "The following is a list of directories that index public AsyncAPI documents." - }, - { - name: "Documentation Generators", - tag: "documentation-generator", - description: "The following is a list of tools that generate human-readable documentation from an AsyncAPI document." - }, - { - name: "Editors", - tag: "editor", - description: "The following is a list of editors or related tools that allow editing of AsyncAPI document." - }, - { - name: "UI components", - tag: "ui-component", - description: "The following is a list of UI components to view AsyncAPI documents." - }, - { - name: "DSL", - tag: "dsl", - description: "Writing YAML by hand is no fun, and maybe you don't want a GUI, so use a Domain Specific Language to write AsyncAPI in your language of choice." - }, - { - name: "Frameworks", - tag: "framework", - description: "The following is a list of API/application frameworks that make use of AsyncAPI." - }, - { - name: "GitHub Actions", - tag: "github-action", - description: "The following is a list of GitHub Actions that you can use in your workflows" - }, - { - name: "Mocking and Testing", - tag: "mocking-and-testing", - description: "The tools below take specification documents as input, then publish fake messages to broker destinations for simulation purposes. They may also check that publisher messages are compliant with schemas." - }, - { - name: "Validators", - tag: "validator", - description: "The following is a list of tools that validate AsyncAPI documents." - }, - { - name: "Compare tools", - tag: "compare-tool", - description: "The following is a list of tools that compare AsyncAPI documents." - }, - { - name: "CLIs", - tag: "cli", - description: "The following is a list of tools that you can work with in terminal or do some CI/CD automation." - }, - { - name: "Bundlers", - tag: "bundler", - description: "The following is a list of tools that you can work with to bundle AsyncAPI documents." - }, - { - name: "IDE Extensions", - tag: "ide-extension", - description: "The following is a list of extensions for different IDEs like VSCode, IntelliJ IDEA and others" - }, - { - name: "AsyncAPI Generator Templates", - tag: "generator-template", - description: "The following is a list of templates compatible with AsyncAPI Generator. You can use them to generate apps, clients or documentation from your AsyncAPI documents." - }, - { - name: "Others", - tag: "other", - description: "The following is a list of tools that comes under Other category." - } -] - -module.exports = {categoryList} diff --git a/scripts/tools/categorylist.ts b/scripts/tools/categorylist.ts new file mode 100644 index 000000000000..81bc3a4664f3 --- /dev/null +++ b/scripts/tools/categorylist.ts @@ -0,0 +1,108 @@ +import type { CategoryListItem } from '@/types/scripts/tools'; + +// Various categories to define the category in which a tool has to be listed +const categoryList: CategoryListItem[] = [ + { + name: 'APIs', + tag: 'api', + description: 'The following is a list of APIs that expose functionality related to AsyncAPI.' + }, + { + name: 'Code-first tools', + tag: 'code-first', + description: 'The following is a list of tools that generate AsyncAPI documents from your code.' + }, + { + name: 'Code Generators', + tag: 'code-generator', + description: + 'The following is a list of tools that generate code from an AsyncAPI document; not the other way around.' + }, + { + name: 'Converters', + tag: 'converter', + description: + 'The following is a list of tools that do not yet belong to any specific category but are also useful for the community.' + }, + { + name: 'Directories', + tag: 'directory', + description: 'The following is a list of directories that index public AsyncAPI documents.' + }, + { + name: 'Documentation Generators', + tag: 'documentation-generator', + description: + 'The following is a list of tools that generate human-readable documentation from an AsyncAPI document.' + }, + { + name: 'Editors', + tag: 'editor', + description: 'The following is a list of editors or related tools that allow editing of AsyncAPI document.' + }, + { + name: 'UI components', + tag: 'ui-component', + description: 'The following is a list of UI components to view AsyncAPI documents.' + }, + { + name: 'DSL', + tag: 'dsl', + description: + "Writing YAML by hand is no fun, and maybe you don't want a GUI, so use a Domain Specific Language to write AsyncAPI in your language of choice." + }, + { + name: 'Frameworks', + tag: 'framework', + description: 'The following is a list of API/application frameworks that make use of AsyncAPI.' + }, + { + name: 'GitHub Actions', + tag: 'github-action', + description: 'The following is a list of GitHub Actions that you can use in your workflows' + }, + { + name: 'Mocking and Testing', + tag: 'mocking-and-testing', + description: + 'The tools below take specification documents as input, then publish fake messages to broker destinations for simulation purposes. They may also check that publisher messages are compliant with schemas.' + }, + { + name: 'Validators', + tag: 'validator', + description: 'The following is a list of tools that validate AsyncAPI documents.' + }, + { + name: 'Compare tools', + tag: 'compare-tool', + description: 'The following is a list of tools that compare AsyncAPI documents.' + }, + { + name: 'CLIs', + tag: 'cli', + description: 'The following is a list of tools that you can work with in terminal or do some CI/CD automation.' + }, + { + name: 'Bundlers', + tag: 'bundler', + description: 'The following is a list of tools that you can work with to bundle AsyncAPI documents.' + }, + { + name: 'IDE Extensions', + tag: 'ide-extension', + description: 'The following is a list of extensions for different IDEs like VSCode, IntelliJ IDEA and others' + }, + { + name: 'AsyncAPI Generator Templates', + tag: 'generator-template', + description: + 'The following is a list of templates compatible with AsyncAPI Generator. You can use them to generate apps, clients or documentation from your AsyncAPI documents.' + }, + { + name: 'Others', + tag: 'other', + description: 'The following is a list of tools that comes under Other category.' + } +]; + +export { categoryList }; diff --git a/scripts/tools/combine-tools.js b/scripts/tools/combine-tools.js deleted file mode 100644 index 1b1367b15ccb..000000000000 --- a/scripts/tools/combine-tools.js +++ /dev/null @@ -1,149 +0,0 @@ -const { languagesColor, technologiesColor } = require("./tags-color") -const { categoryList } = require("./categorylist.js") -const { createToolObject } = require("./tools-object") -const fs = require('fs') -const schema = require("./tools-schema.json"); -const Ajv = require("ajv") -const addFormats = require("ajv-formats") -const Fuse = require("fuse.js"); -const ajv = new Ajv() -addFormats(ajv, ["uri"]) -const validate = ajv.compile(schema) - -let finalTools = {}; -for (var category of categoryList) { - finalTools[category.name] = { - description: category.description, - toolsList: [] - }; -} - -// Config options set for the Fuse object -const options = { - includeScore: true, - shouldSort: true, - threshold: 0.39, - keys: ['name', 'color', 'borderColor'] -} - -// Two seperate lists and Fuse objects initialised to search languages and technologies tags -// from specified list of same. -let languageList = [...languagesColor], technologyList = [...technologiesColor]; -let languageFuse = new Fuse(languageList, options), technologyFuse = new Fuse(technologyList, options) - -// takes individual tool object and inserts borderColor and backgroundColor of the tags of -// languages and technologies, for Tool Card in website. -const getFinalTool = async (toolObject) => { - let finalObject = toolObject; - - //there might be a tool without language - if (toolObject.filters.language) { - let languageArray = [] - if (typeof toolObject.filters.language === 'string') { - const languageSearch = await languageFuse.search(toolObject.filters.language) - if (languageSearch.length) { - languageArray.push(languageSearch[0].item); - } else { - // adds a new language object in the Fuse list as well as in tool object - // so that it isn't missed out in the UI. - let languageObject = { - name: toolObject.filters.language, - color: 'bg-[#57f281]', - borderColor: 'border-[#37f069]' - } - languageList.push(languageObject); - languageArray.push(languageObject) - languageFuse = new Fuse(languageList, options) - } - } else { - for (const language of toolObject?.filters?.language) { - const languageSearch = await languageFuse.search(language) - if (languageSearch.length > 0) { - languageArray.push(languageSearch[0].item); - } - else { - // adds a new language object in the Fuse list as well as in tool object - // so that it isn't missed out in the UI. - let languageObject = { - name: language, - color: 'bg-[#57f281]', - borderColor: 'border-[#37f069]' - } - languageList.push(languageObject); - languageArray.push(languageObject) - languageFuse = new Fuse(languageList, options) - } - } - } - finalObject.filters.language = languageArray - } - let technologyArray = []; - if (toolObject.filters.technology) { - for (const technology of toolObject?.filters?.technology) { - const technologySearch = await technologyFuse.search(technology) - if (technologySearch.length > 0) { - technologyArray.push(technologySearch[0].item); - } - else { - // adds a new technology object in the Fuse list as well as in tool object - // so that it isn't missed out in the UI. - let technologyObject = { - name: technology, - color: 'bg-[#61d0f2]', - borderColor: 'border-[#40ccf7]' - } - technologyList.push(technologyObject); - technologyArray.push(technologyObject); - technologyFuse = new Fuse(technologyList, options) - } - } - } - finalObject.filters.technology = technologyArray; - return finalObject; -} - - -// Combine the automated tools and manual tools list into single JSON object file, and -// lists down all the language and technology tags in one JSON file. -const combineTools = async (automatedTools, manualTools, toolsPath, tagsPath) => { - try { - for (const key in automatedTools) { - let finalToolsList = []; - if (automatedTools[key].toolsList.length) { - for (const tool of automatedTools[key].toolsList) { - finalToolsList.push(await getFinalTool(tool)) - } - } - if (manualTools[key]?.toolsList?.length) { - for (const tool of manualTools[key].toolsList) { - let isAsyncAPIrepo; - const isValid = await validate(tool) - if (isValid) { - if (tool?.links?.repoUrl) { - const url = new URL(tool.links.repoUrl) - isAsyncAPIrepo = url.href.startsWith("https://github.com/asyncapi/") - } else isAsyncAPIrepo = false - let toolObject = await createToolObject(tool, "", "", isAsyncAPIrepo) - finalToolsList.push(await getFinalTool(toolObject)) - } else { - console.error({ - message: 'Tool validation failed', - tool: tool.title, - source: 'manual-tools.json', - errors: validate.errors, - note: 'Script continues execution, error logged for investigation' - }); - } - } - } - finalToolsList.sort((tool, anotherTool) => tool.title.localeCompare(anotherTool.title)); - finalTools[key].toolsList = finalToolsList - } - fs.writeFileSync(toolsPath, JSON.stringify(finalTools)); - fs.writeFileSync(tagsPath, JSON.stringify({ languages: languageList, technologies: technologyList }),) - } catch (err) { - throw new Error(`Error combining tools: ${err}`); - } -} - -module.exports = { combineTools } diff --git a/scripts/tools/combine-tools.ts b/scripts/tools/combine-tools.ts new file mode 100644 index 000000000000..4e29a4dc4272 --- /dev/null +++ b/scripts/tools/combine-tools.ts @@ -0,0 +1,205 @@ +/* eslint-disable no-await-in-loop */ + +import Ajv from 'ajv'; +import addFormats from 'ajv-formats'; +import fs from 'fs'; +import Fuse from 'fuse.js'; + +import type { + AsyncAPITool, + FinalAsyncAPITool, + FinalToolsListObject, + LanguageColorItem, + ToolsListObject +} from '@/types/scripts/tools'; + +import { logger } from '../utils/logger'; +import { categoryList } from './categorylist'; +import { languagesColor, technologiesColor } from './tags-color'; +import { createToolObject } from './tools-object'; +import schema from './tools-schema.json'; + +const ajv = new Ajv(); + +addFormats(ajv, ['uri']); +const validate = ajv.compile(schema); + +const finalTools: FinalToolsListObject = {}; + +for (const category of categoryList) { + finalTools[category.name] = { + description: category.description, + toolsList: [] + }; +} + +// Config options set for the Fuse object +const options = { + includeScore: true, + shouldSort: true, + threshold: 0.39, + keys: ['name', 'color', 'borderColor'] +}; + +// Two seperate lists and Fuse objects initialised to search languages and technologies tags +// from specified list of same. +const languageList = [...languagesColor]; +const technologyList = [...technologiesColor]; +let languageFuse = new Fuse(languageList, options); +let technologyFuse = new Fuse(technologyList, options); + +/** + * Takes individual tool object and inserts borderColor and backgroundColor of the tags of + * languages and technologies, for Tool Card in website. + * + * @param {AsyncAPITool} toolObject - The tool object to process. + * @returns {Promise} - The processed tool object with additional properties. + */ +export async function getFinalTool(toolObject: AsyncAPITool): Promise { + const finalObject: FinalAsyncAPITool = { + ...toolObject, + filters: { + language: [], + technology: [], + categories: toolObject.filters.categories, + hasCommercial: toolObject.filters.hasCommercial + } + } as FinalAsyncAPITool; + + // there might be a tool without language + if (toolObject.filters.language) { + const languageArray: LanguageColorItem[] = []; + + if (typeof toolObject.filters.language === 'string') { + const languageSearch = await languageFuse.search(toolObject.filters.language); + + if (languageSearch.length) { + languageArray.push(languageSearch[0].item); + } else { + // adds a new language object in the Fuse list as well as in tool object + // so that it isn't missed out in the UI. + const languageObject = { + name: toolObject.filters.language, + color: 'bg-[#57f281]', + borderColor: 'border-[#37f069]' + }; + + languageList.push(languageObject); + languageArray.push(languageObject); + languageFuse = new Fuse(languageList, options); + } + } else { + for (const language of toolObject.filters.language) { + const languageSearch = await languageFuse.search(language); + + if (languageSearch.length > 0) { + languageArray.push(languageSearch[0].item); + } else { + // adds a new language object in the Fuse list as well as in tool object + // so that it isn't missed out in the UI. + const languageObject = { + name: language, + color: 'bg-[#57f281]', + borderColor: 'border-[#37f069]' + }; + + languageList.push(languageObject); + languageArray.push(languageObject); + languageFuse = new Fuse(languageList, options); + } + } + } + finalObject.filters.language = languageArray; + } + const technologyArray = []; + + if (toolObject.filters.technology) { + for (const technology of toolObject.filters.technology) { + const technologySearch = await technologyFuse.search(technology); + + if (technologySearch.length > 0) { + technologyArray.push(technologySearch[0].item); + } else { + // adds a new technology object in the Fuse list as well as in tool object + // so that it isn't missed out in the UI. + const technologyObject = { + name: technology, + color: 'bg-[#61d0f2]', + borderColor: 'border-[#40ccf7]' + }; + + technologyList.push(technologyObject); + technologyArray.push(technologyObject); + technologyFuse = new Fuse(technologyList, options); + } + } + } + finalObject.filters.technology = technologyArray; + + return finalObject; +} + +const processManualTool = async (tool: AsyncAPITool) => { + const isValid = await validate(tool); + + if (!isValid) { + logger.error( + JSON.stringify({ + message: 'Tool validation failed', + tool: tool.title, + source: 'manual-tools.json', + errors: validate.errors, + note: 'Script continues execution, error logged for investigation' + }), + null, + 2 + ); + + return null; + } + const isAsyncAPIrepo = tool?.links?.repoUrl + ? new URL(tool.links.repoUrl).href.startsWith('https://github.com/asyncapi/') + : false; + const toolObject = await createToolObject(tool, '', '', isAsyncAPIrepo); + + return getFinalTool(toolObject); +}; + +/** + * Combine the automated tools and manual tools list into a single JSON object file, and + * lists down all the language and technology tags in one JSON file. + * + * @param {ToolsListObject} automatedTools - The list of automated tools. + * @param {ToolsListObject} manualTools - The list of manual tools. + * @param {string} toolsPath - The path to save the combined tools JSON file. + * @param {string} tagsPath - The path to save the tags JSON file. + */ +const combineTools = async ( + automatedTools: ToolsListObject, + manualTools: ToolsListObject, + toolsPath: string, + tagsPath: string +) => { + try { + // eslint-disable-next-line no-restricted-syntax + for (const key in automatedTools) { + /* istanbul ignore next */ + if (Object.prototype.hasOwnProperty.call(automatedTools, key)) { + const automatedResults = await Promise.all(automatedTools[key].toolsList.map(getFinalTool)); + const manualResults = manualTools[key]?.toolsList?.length + ? (await Promise.all(manualTools[key].toolsList.map(processManualTool))).filter(Boolean) + : []; + + finalTools[key].toolsList = [...automatedResults, ...manualResults].sort((tool, anotherTool) => + tool!.title.localeCompare(anotherTool!.title) + ) as FinalAsyncAPITool[]; + } + } + fs.writeFileSync(toolsPath, JSON.stringify(finalTools)); + fs.writeFileSync(tagsPath, JSON.stringify({ languages: languageList, technologies: technologyList })); + } catch (err) { + throw new Error(`Error combining tools: ${err}`); + } +}; + +export { combineTools }; diff --git a/scripts/tools/extract-tools-github.js b/scripts/tools/extract-tools-github.js deleted file mode 100644 index 55e96124b752..000000000000 --- a/scripts/tools/extract-tools-github.js +++ /dev/null @@ -1,22 +0,0 @@ -const axios = require('axios'); -require('dotenv').config() - -const getData = async () => { - try { - const result = await axios.get( - 'https://api.github.com/search/code?q=filename:.asyncapi-tool', - { - headers: { - accept: 'application/vnd.github.text-match+json', - authorization: `token ${process.env.GITHUB_TOKEN}`, - }, - } - ); - - return result.data; - } catch (err) { - throw err; - } -}; - -module.exports = { getData }; \ No newline at end of file diff --git a/scripts/tools/extract-tools-github.ts b/scripts/tools/extract-tools-github.ts new file mode 100644 index 000000000000..1e74402d645d --- /dev/null +++ b/scripts/tools/extract-tools-github.ts @@ -0,0 +1,57 @@ +/* eslint-disable no-await-in-loop */ +import axios from 'axios'; +import dotenv from 'dotenv'; + +import { pause } from '../utils'; +import { logger } from '../utils/logger'; + +dotenv.config(); + +/** + * Fetches tool data from the GitHub API. + * + * @returns {Promise} The data from the GitHub API. + * @throws {Error} If there is an error fetching the data. + */ +export async function getData(): Promise { + // eslint-disable-next-line no-useless-catch + try { + const allItems = []; + let page = 1; + + const maxPerPage = 50; + const getReqUrl = (PerPage: number, pageNo: number) => + `https://api.github.com/search/code?q=filename:.asyncapi-tool&per_page=${PerPage}&page=${pageNo}`; + const headers = { + accept: 'application/vnd.github.text-match+json', + authorization: `token ${process.env.GITHUB_TOKEN}` + }; + const result = await axios.get(getReqUrl(maxPerPage, page), { + headers + }); + const totalResults = result.data.total_count; + + allItems.push(...result.data.items); + + while (allItems.length < totalResults) { + page++; + + logger.info(`Fetching page: ${page}`); + // pause for 1 second to avoid rate limiting + await pause(1000); + const nextPageData = await axios.get(getReqUrl(maxPerPage, page), { + headers + }); + + const { data } = nextPageData; + + allItems.push(...data.items); + } + + result.data.items.push(...allItems); + + return result.data; + } catch (err) { + throw err; + } +} diff --git a/scripts/tools/tags-color.js b/scripts/tools/tags-color.js deleted file mode 100644 index 9a18ca2058d5..000000000000 --- a/scripts/tools/tags-color.js +++ /dev/null @@ -1,178 +0,0 @@ -// Language and Technology tags along with their colors in UI are defined here. -const languagesColor = [ - { - name: "Go/Golang", - color: "bg-[#8ECFDF]", - borderColor: "border-[#00AFD9]" - }, - { - name: "Java", - color: "bg-[#ECA2A4]", - borderColor: "border-[#EC2125]" - }, - { - name: "JavaScript", - color: "bg-[#F2F1C7]", - borderColor: "border-[#BFBE86]" - }, - { - name: "HTML", - color: "bg-[#E2A291]", - borderColor: "border-[#E44D26]" - }, - { - name: "C/C++", - color: "bg-[#93CDEF]", - borderColor: "border-[#0080CC]" - }, - { - name: "C#", - color: "bg-[#E3AFE0]", - borderColor: "border-[#9B4F96]" - }, - { - name: "Python", - color: "bg-[#A8D0EF]", - borderColor: "border-[#3878AB]" - }, - { - name: "TypeScript", - color: "bg-[#7DBCFE]", - borderColor: "border-[#2C78C7]" - }, - { - name: "Kotlin", - color: "bg-[#B1ACDF]", - borderColor: "border-[#756BD9]" - }, - { - name: "Scala", - color: "bg-[#FFA299]", - borderColor: "border-[#DF301F]" - }, - { - name: "Markdown", - color: "bg-[#BABEBF]", - borderColor: "border-[#445B64]" - }, - { - name: "YAML", - color: "bg-[#FFB764]", - borderColor: "border-[#F1901F]" - }, - { - name: "R", - color: "bg-[#84B5ED]", - borderColor: "border-[#246BBE]" - }, - { - name: "Ruby", - color: "bg-[#FF8289]", - borderColor: "border-[#FF000F]" - }, - { - name: "Rust", - color: "bg-[#FFB8AA]", - borderColor: "border-[#E43716]" - }, - { - name: "Shell", - color: "bg-[#87D4FF]", - borderColor: "border-[#389ED7]" - }, - { - name: "Groovy", - color: "bg-[#B6D5E5]", - borderColor: "border-[#609DBC]" - } -] - -const technologiesColor = [ - { - name: "Node.js", - color: "bg-[#BDFF67]", - borderColor: "border-[#84CE24]" - }, - { - name: "Hermes", - color: "bg-[#8AEEBD]", - borderColor: "border-[#2AB672]" - }, - { - name: "React JS", - color: "bg-[#9FECFA]", - borderColor: "border-[#08D8FE]" - }, - { - name: ".NET", - color: "bg-[#A184FF]", - borderColor: "border-[#5026D4]" - }, - { - name: "ASP.NET", - color: "bg-[#71C2FB]", - borderColor: "border-[#1577BC]" - }, - { - name: "Springboot", - color: "bg-[#98E279]", - borderColor: "border-[#68BC44]" - }, - { - name: "AWS", - color: "bg-[#FF9F59]", - borderColor: "border-[#EF6703]" - }, - { - name: "Docker", - color: "bg-[#B8E0FF]", - borderColor: "border-[#2596ED]" - }, - { - name: "Node-RED", - color: "bg-[#FF7474]", - borderColor: "border-[#8F0101]" - }, - { - name: "Maven", - color: "bg-[#FF6B80]", - borderColor: "border-[#CA1A33]" - }, - { - name: "Saas", - color: "bg-[#6AB8EC]", - borderColor: "border-[#2275AD]" - }, - { - name: "Kubernetes-native", - color: "bg-[#D7C7F2]", - borderColor: "border-[#A387D2]" - }, - { - name: "Scala", - color: "bg-[#D7C7F2]", - borderColor: "border-[#A387D2]" - }, - { - name: "Azure", - color: "bg-[#4B93FF]", - borderColor: "border-[#015ADF]" - }, - { - name: "Jenkins", - color: "bg-[#D7C7F2]", - borderColor: "border-[#A387D2]" - }, - { - name: "Flask", - color: "bg-[#D7C7F2]", - borderColor: "border-[#A387D2]" - }, - { - name: "Nest Js", - color: "bg-[#E1224E]", - borderColor: "border-[#B9012b]" - } -] - -module.exports = {languagesColor, technologiesColor} \ No newline at end of file diff --git a/scripts/tools/tags-color.ts b/scripts/tools/tags-color.ts new file mode 100644 index 000000000000..7bc04e43b60e --- /dev/null +++ b/scripts/tools/tags-color.ts @@ -0,0 +1,175 @@ +import type { LanguageColorItem } from '@/types/scripts/tools'; + +// Language and Technology tags along with their colors in UI are defined here. +const languagesColor: LanguageColorItem[] = [ + { + name: 'Go/Golang', + color: 'bg-[#8ECFDF]', + borderColor: 'border-[#00AFD9]' + }, + { + name: 'Java', + color: 'bg-[#ECA2A4]', + borderColor: 'border-[#EC2125]' + }, + { + name: 'JavaScript', + color: 'bg-[#F2F1C7]', + borderColor: 'border-[#BFBE86]' + }, + { + name: 'HTML', + color: 'bg-[#E2A291]', + borderColor: 'border-[#E44D26]' + }, + { + name: 'C/C++', + color: 'bg-[#93CDEF]', + borderColor: 'border-[#0080CC]' + }, + { + name: 'C#', + color: 'bg-[#E3AFE0]', + borderColor: 'border-[#9B4F96]' + }, + { + name: 'Python', + color: 'bg-[#A8D0EF]', + borderColor: 'border-[#3878AB]' + }, + { + name: 'TypeScript', + color: 'bg-[#7DBCFE]', + borderColor: 'border-[#2C78C7]' + }, + { + name: 'Kotlin', + color: 'bg-[#B1ACDF]', + borderColor: 'border-[#756BD9]' + }, + { + name: 'Markdown', + color: 'bg-[#BABEBF]', + borderColor: 'border-[#445B64]' + }, + { + name: 'YAML', + color: 'bg-[#FFB764]', + borderColor: 'border-[#F1901F]' + }, + { + name: 'R', + color: 'bg-[#84B5ED]', + borderColor: 'border-[#246BBE]' + }, + { + name: 'Ruby', + color: 'bg-[#FF8289]', + borderColor: 'border-[#FF000F]' + }, + { + name: 'Rust', + color: 'bg-[#FFB8AA]', + borderColor: 'border-[#E43716]' + }, + { + name: 'Shell', + color: 'bg-[#87D4FF]', + borderColor: 'border-[#389ED7]' + }, + { + name: 'Groovy', + color: 'bg-[#B6D5E5]', + borderColor: 'border-[#609DBC]' + } +]; + +const technologiesColor: LanguageColorItem[] = [ + { + name: 'Node.js', + color: 'bg-[#BDFF67]', + borderColor: 'border-[#84CE24]' + }, + { + name: 'Hermes', + color: 'bg-[#8AEEBD]', + borderColor: 'border-[#2AB672]' + }, + { + name: 'React JS', + color: 'bg-[#9FECFA]', + borderColor: 'border-[#08D8FE]' + }, + { + name: '.NET', + color: 'bg-[#A184FF]', + borderColor: 'border-[#5026D4]' + }, + { + name: 'ASP.NET', + color: 'bg-[#71C2FB]', + borderColor: 'border-[#1577BC]' + }, + { + name: 'Springboot', + color: 'bg-[#98E279]', + borderColor: 'border-[#68BC44]' + }, + { + name: 'AWS', + color: 'bg-[#FF9F59]', + borderColor: 'border-[#EF6703]' + }, + { + name: 'Docker', + color: 'bg-[#B8E0FF]', + borderColor: 'border-[#2596ED]' + }, + { + name: 'Node-RED', + color: 'bg-[#FF7474]', + borderColor: 'border-[#8F0101]' + }, + { + name: 'Maven', + color: 'bg-[#FF6B80]', + borderColor: 'border-[#CA1A33]' + }, + { + name: 'Saas', + color: 'bg-[#6AB8EC]', + borderColor: 'border-[#2275AD]' + }, + { + name: 'Kubernetes-native', + color: 'bg-[#D7C7F2]', + borderColor: 'border-[#A387D2]' + }, + { + name: 'Scala', + color: 'bg-[#D7C7F2]', + borderColor: 'border-[#A387D2]' + }, + { + name: 'Azure', + color: 'bg-[#4B93FF]', + borderColor: 'border-[#015ADF]' + }, + { + name: 'Jenkins', + color: 'bg-[#D7C7F2]', + borderColor: 'border-[#A387D2]' + }, + { + name: 'Flask', + color: 'bg-[#D7C7F2]', + borderColor: 'border-[#A387D2]' + }, + { + name: 'Nest Js', + color: 'bg-[#E1224E]', + borderColor: 'border-[#B9012b]' + } +]; + +export { languagesColor, technologiesColor }; diff --git a/scripts/tools/tools-object.js b/scripts/tools/tools-object.js deleted file mode 100644 index 1d8c73f8074b..000000000000 --- a/scripts/tools/tools-object.js +++ /dev/null @@ -1,116 +0,0 @@ -const schema = require("./tools-schema.json"); -const axios = require('axios') -const Ajv = require("ajv") -const addFormats = require("ajv-formats") -const Fuse = require("fuse.js") -const { categoryList } = require("./categorylist") -const ajv = new Ajv() -addFormats(ajv, ["uri"]) -const validate = ajv.compile(schema) -const { convertToJson } = require('../utils'); - -// Config options set for the Fuse object -const options = { - includeScore: true, - shouldSort: true, - threshold: 0.4, - keys: ["tag"] -} - -const fuse = new Fuse(categoryList, options) - -// using the contents of each toolFile (extracted from Github), along with Github URL -// (repositoryUrl) of the tool, it's repository description (repoDescription) and -// isAsyncAPIrepo boolean variable to define whether the tool repository is under -// AsyncAPI organization or not, to create a JSON tool object as required in the frontend -// side to show ToolCard. -const createToolObject = async (toolFile, repositoryUrl = '', repoDescription = '', isAsyncAPIrepo = '') => { - const resultantObject = { - title: toolFile.title, - description: toolFile?.description ? toolFile.description : repoDescription, - links: { - ...toolFile.links, - repoUrl: toolFile?.links?.repoUrl ? toolFile.links.repoUrl : repositoryUrl - }, - filters: { - ...toolFile.filters, - hasCommercial: toolFile?.filters?.hasCommercial ? toolFile.filters.hasCommercial : false, - isAsyncAPIOwner: isAsyncAPIrepo - } - }; - return resultantObject; -}; - -// Each result obtained from the Github API call will be tested and verified -// using the defined JSON schema, categorising each tool inside their defined categories -// and creating a JSON tool object in which all the tools are listed in defined -// categories order, which is then updated in `automated-tools.json` file. -async function convertTools(data) { - try { - let finalToolsObject = {}; - const dataArray = data.items; - - // initialising finalToolsObject with all categories inside it with proper elements in each category - finalToolsObject = Object.fromEntries( - categoryList.map((category) => [ - category.name, - { - description: category.description, - toolsList: [] - } - ]) - ); - - await Promise.all(dataArray.map(async (tool) => { - try { - if (tool.name.startsWith('.asyncapi-tool')) { - const referenceId = tool.url.split('=')[1]; - const downloadUrl = `https://raw.githubusercontent.com/${tool.repository.full_name}/${referenceId}/${tool.path}`; - - const { data: toolFileContent } = await axios.get(downloadUrl); - - //some stuff can be YAML - const jsonToolFileContent = await convertToJson(toolFileContent) - - //validating against JSON Schema for tools file - const isValid = await validate(jsonToolFileContent) - - if (isValid) { - const repositoryUrl = tool.repository.html_url; - const repoDescription = tool.repository.description; - const isAsyncAPIrepo = tool.repository.owner.login === 'asyncapi'; - const toolObject = await createToolObject( - jsonToolFileContent, - repositoryUrl, - repoDescription, - isAsyncAPIrepo - ); - - // Tool Object is appended to each category array according to Fuse search for categories inside Tool Object - await Promise.all(jsonToolFileContent.filters.categories.map(async (category) => { - const categorySearch = await fuse.search(category); - const targetCategory = categorySearch.length ? categorySearch[0].item.name : 'Others'; - const { toolsList } = finalToolsObject[targetCategory]; - if (!toolsList.includes(toolObject)) { - toolsList.push(toolObject); - } - })); - } else { - console.error('Script is not failing, it is just dropping errors for further investigation'); - console.error('Invalid .asyncapi-tool file.'); - console.error(`Located in: ${tool.html_url}`); - console.error('Validation errors:', JSON.stringify(validate.errors, null, 2)); - } - } - } catch (err) { - console.error(err) - throw err; - } - })) - return finalToolsObject; - } catch (err) { - throw new Error(`Error processing tool: ${err.message}`) - } -} - -module.exports = { convertTools, createToolObject } diff --git a/scripts/tools/tools-object.ts b/scripts/tools/tools-object.ts new file mode 100644 index 000000000000..a477f711d031 --- /dev/null +++ b/scripts/tools/tools-object.ts @@ -0,0 +1,153 @@ +import Ajv from 'ajv'; +import addFormats from 'ajv-formats'; +import axios from 'axios'; +import Fuse from 'fuse.js'; + +import type { AsyncAPITool, ToolsData, ToolsListObject } from '@/types/scripts/tools'; + +import { convertToJson } from '../utils'; +import { logger } from '../utils/logger'; +import { categoryList } from './categorylist'; +import schema from './tools-schema.json'; + +const ajv = new Ajv(); + +addFormats(ajv, ['uri']); +const validate = ajv.compile(schema); + +// Config options set for the Fuse object +const options = { + includeScore: true, + shouldSort: true, + threshold: 0.4, + keys: ['tag'] +}; + +const fuse = new Fuse(categoryList, options); + +/** + * Creates a tool object for the frontend ToolCard. + * Using the contents of each toolFile (extracted from Github), along with Github URL + * (repositoryUrl) of the tool, it's repository description (repoDescription) and + * isAsyncAPIrepo boolean variable to define whether the tool repository is under + * AsyncAPI organization or not, to create a JSON tool object as required in the frontend + * side to show ToolCard. + * + * @param {AsyncAPITool} toolFile - The tool file content. + * @param {string} [repositoryUrl=''] - The URL of the tool's repository. + * @param {string} [repoDescription=''] - The description of the repository. + * @param {boolean | string} [isAsyncAPIrepo=''] - Whether the tool repository is under the AsyncAPI organization. + * @returns {Promise} The tool object. + */ +async function createToolObject( + toolFile: AsyncAPITool, + repositoryUrl = '', + repoDescription = '', + isAsyncAPIrepo: boolean | string = '' +) { + const resultantObject = { + title: toolFile.title, + description: toolFile?.description ? toolFile.description : repoDescription, + links: { + ...toolFile.links, + repoUrl: toolFile?.links?.repoUrl ? toolFile.links.repoUrl : repositoryUrl + }, + filters: { + ...toolFile.filters, + hasCommercial: toolFile?.filters?.hasCommercial ? toolFile.filters.hasCommercial : false, + isAsyncAPIOwner: isAsyncAPIrepo + } + }; + + return resultantObject; +} + +// Each result obtained from the Github API call will be tested and verified +// using the defined JSON schema, categorising each tool inside their defined categories +// and creating a JSON tool object in which all the tools are listed in defined +// categories order, which is then updated in `automated-tools.json` file. + +/** + * Converts tools data into a categorized tools list object. + * + * @param {ToolsData} data - The tools data from the GitHub API. + * @returns {Promise} The categorized tools list object. + * @throws {Error} If there is an error processing the tools. + */ +async function convertTools(data: ToolsData) { + try { + let finalToolsObject: ToolsListObject = {}; + const dataArray = data.items; + + // initialising finalToolsObject with all categories inside it with proper elements in each category + finalToolsObject = Object.fromEntries( + categoryList.map((category) => [ + category.name, + { + description: category.description, + toolsList: [] + } + ]) + ); + + await Promise.all( + dataArray.map(async (tool) => { + try { + /* istanbul ignore else */ + if (tool.name.startsWith('.asyncapi-tool')) { + const referenceId = tool.url.split('=')[1]; + const downloadUrl = `https://raw.githubusercontent.com/${tool.repository.full_name}/${referenceId}/${tool.path}`; + + const { data: toolFileContent } = await axios.get(downloadUrl); + + // some stuff can be YAML + const jsonToolFileContent = await convertToJson(toolFileContent); + + // validating against JSON Schema for tools file + const isValid = await validate(jsonToolFileContent); + + if (isValid) { + const repositoryUrl = tool.repository.html_url; + const repoDescription = tool.repository.description; + const isAsyncAPIrepo = tool.repository.owner.login === 'asyncapi'; + const toolObject = await createToolObject( + jsonToolFileContent, + repositoryUrl, + repoDescription, + isAsyncAPIrepo + ); + + // Tool Object is appended to each category array according to Fuse search for categories inside Tool Object + await Promise.all( + jsonToolFileContent.filters.categories.map(async (category: string) => { + const categorySearch = await fuse.search(category); + const targetCategory = categorySearch.length ? categorySearch[0].item.name : 'Others'; + const { toolsList } = finalToolsObject[targetCategory]; + + /* istanbul ignore else */ + + if (!toolsList.includes(toolObject)) { + toolsList.push(toolObject); + } + }) + ); + } else { + logger.warn( + `Script is not failing, it is just dropping errors for further investigation.\nInvalid .asyncapi-tool file. \nLocated in: ${tool.html_url}. \nValidation errors: ${JSON.stringify(validate.errors, null, 2)}` + ); + } + } + } catch (err) { + logger.error(err); + throw err; + } + }) + ); + + return finalToolsObject; + } catch (err) { + throw new Error(`Error processing tool: ${err}`); + } +} + +export { convertTools, createToolObject }; diff --git a/scripts/tools/tools-schema.json b/scripts/tools/tools-schema.json index e11968a1b2e1..74bcb3d783b4 100644 --- a/scripts/tools/tools-schema.json +++ b/scripts/tools/tools-schema.json @@ -1,220 +1,209 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "JSON Schema for AsyncAPI tool discovery file.", - "type": "object", - "additionalProperties": false, - "required": [ - "title", - "filters" - ], - "properties": { - "title": { - "type": "string", - "description": "Human-readable name of the tool that will be visible to people in the list of tools.", - "examples": [ - "AsyncAPI Generator", - "Cupid" - ] + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "JSON Schema for AsyncAPI tool discovery file.", + "type": "object", + "additionalProperties": false, + "required": ["title", "filters"], + "properties": { + "title": { + "type": "string", + "description": "Human-readable name of the tool that will be visible to people in the list of tools.", + "examples": ["AsyncAPI Generator", "Cupid"] + }, + "description": { + "type": "string", + "description": "By default scripts read description of repository there project is stored. You can override this behaviour by providing custom description." + }, + "links": { + "type": "object", + "additionalProperties": false, + "properties": { + "websiteUrl": { + "type": "string", + "description": "You can provide URL to the website where your project hosts some demo or project landing page.", + "format": "uri" }, - "description": { - "type": "string", - "description": "By default scripts read description of repository there project is stored. You can override this behaviour by providing custom description." + "docsUrl": { + "type": "string", + "description": "You can provide URL to project documentation in case you have more than just a readme file.", + "format": "uri" }, - "links": { - "type": "object", - "additionalProperties": false, - "properties": { - "websiteUrl": { - "type": "string", - "description": "You can provide URL to the website where your project hosts some demo or project landing page.", - "format": "uri" - }, - "docsUrl": { - "type": "string", - "description": "You can provide URL to project documentation in case you have more than just a readme file.", - "format": "uri" + "repoUrl": { + "type": "string", + "description": "You can provide URL to project codebase in case you have more than one tool present inside single repository.", + "format": "uri" + } + } + }, + "filters": { + "type": "object", + "additionalProperties": false, + "required": ["categories"], + "properties": { + "language": { + "description": "The language referred to is the runtime language selected by the user, not the generator or library language. For example, the Generator written in JavaScript generates Python code from the JavaScript template and the result of generation is a Python app, so the language for Generator is specified as Python. But for the Bundler library, users need to know if it can be integrated into their TypeScript codebase, so its language is specified as TypeScript. If some language in the schema's enum is omitted, it can be added through a pull request to the AsyncAPI website repository.", + "anyOf": [ + { + "type": "string", + "anyOf": [ + { + "type": "string", + "enum": [ + "Go", + "Java", + "JavaScript", + "HTML", + "C/C++", + "C#", + "Python", + "TypeScript", + "Kotlin", + "Scala", + "Markdown", + "YAML", + "R", + "Ruby", + "Rust", + "Shell", + "Groovy" + ] }, - "repoUrl": { - "type": "string", - "description": "You can provide URL to project codebase in case you have more than one tool present inside single repository.", - "format": "uri" + { + "type": "string" } - } - }, - "filters": { - "type": "object", - "additionalProperties": false, - "required": [ - "categories" - ], - "properties": { - "language": { - "description": "The language referred to is the runtime language selected by the user, not the generator or library language. For example, the Generator written in JavaScript generates Python code from the JavaScript template and the result of generation is a Python app, so the language for Generator is specified as Python. But for the Bundler library, users need to know if it can be integrated into their TypeScript codebase, so its language is specified as TypeScript. If some language in the schema's enum is omitted, it can be added through a pull request to the AsyncAPI website repository.", - "anyOf": [ - { - "type": "string", - "anyOf": [ - { - "type": "string", - "enum": [ - "Go", - "Java", - "JavaScript", - "HTML", - "C/C++", - "C#", - "Python", - "TypeScript", - "Kotlin", - "Scala", - "Markdown", - "YAML", - "R", - "Ruby", - "Rust", - "Shell", - "Groovy" - ] - }, - { - "type": "string" - } - ] - }, - { - "type": "array", - "items": { - "type": "string", - "anyOf": [ - { - "type": "string", - "enum": [ - "Go", - "Java", - "JavaScript", - "HTML", - "C/C++", - "C#", - "Python", - "TypeScript", - "Kotlin", - "Scala", - "Markdown", - "YAML", - "R", - "Ruby", - "Rust", - "Shell", - "Groovy" - ] - }, - { - "type": "string" - } - ] - } - } - ] - }, - "technology": { - "type": "array", - "description": "Provide a list of different technologies used in the tool. Put details useful for tool user and tool contributor.", - "items": { - "type": "string", - "anyOf": [ - { - "type": "string", - "enum": [ - "Node js", - "Hermes", - "React JS", - ".NET", - "ASP.NET", - "Springboot", - "AWS", - "Docker", - "Node-red", - "Maven", - "Saas", - "Kubernetes-native", - "Scala", - "Azure", - "Jenkins", - "Flask" - ] - }, - { - "type": "string" - } - ] - }, - "examples": [ - "Express.js", - "Kafka" - ] - }, - "categories": { - "type": "array", - "description": "Categories are used to group tools by different use case, like documentation or code generation. If have a list of fixed categories. If you use different one that your tool lands under \"other\" category. Feel free to add your category through a pull request to AsyncAPI website repository.", - "items": { - "type": "string", - "anyOf": [ - { - "type": "string", - "enum": [ - "api", - "code-first", - "code-generator", - "converter", - "directory", - "documentation-generator", - "editor", - "ui-component", - "dsl", - "framework", - "github-action", - "mocking-and-testing", - "validator", - "compare-tool", - "other", - "cli", - "bundler", - "ide-extension" - ] - }, - { - "type": "string" - } - ] - }, - "minItems": 1, - "examples": [ - "api", - "code-first", - "code-generator", - "converter", - "directory", - "documentation-generator", - "editor", - "ui-component", - "dsl", - "framework", - "github-action", - "mocking-and-testing", - "validator", - "compare-tool", - "other", - "cli", - "bundler", - "ide-extension" + ] + }, + { + "type": "array", + "items": { + "type": "string", + "anyOf": [ + { + "type": "string", + "enum": [ + "Go", + "Java", + "JavaScript", + "HTML", + "C/C++", + "C#", + "Python", + "TypeScript", + "Kotlin", + "Scala", + "Markdown", + "YAML", + "R", + "Ruby", + "Rust", + "Shell", + "Groovy" ] - }, - "hasCommercial": { - "type": "boolean", - "description": "Indicate if your tool is open source or commercial offering, like SAAS for example", - "default": false - } + }, + { + "type": "string" + } + ] + } } + ] + }, + "technology": { + "type": "array", + "description": "Provide a list of different technologies used in the tool. Put details useful for tool user and tool contributor.", + "items": { + "type": "string", + "anyOf": [ + { + "type": "string", + "enum": [ + "Node js", + "Hermes", + "React JS", + ".NET", + "ASP.NET", + "Springboot", + "AWS", + "Docker", + "Node-red", + "Maven", + "Saas", + "Kubernetes-native", + "Scala", + "Azure", + "Jenkins", + "Flask" + ] + }, + { + "type": "string" + } + ] + }, + "examples": ["Express.js", "Kafka"] + }, + "categories": { + "type": "array", + "description": "Categories are used to group tools by different use case, like documentation or code generation. If have a list of fixed categories. If you use different one that your tool lands under \"other\" category. Feel free to add your category through a pull request to AsyncAPI website repository.", + "items": { + "type": "string", + "anyOf": [ + { + "type": "string", + "enum": [ + "api", + "code-first", + "code-generator", + "converter", + "directory", + "documentation-generator", + "editor", + "ui-component", + "dsl", + "framework", + "github-action", + "mocking-and-testing", + "validator", + "compare-tool", + "other", + "cli", + "bundler", + "ide-extension" + ] + }, + { + "type": "string" + } + ] + }, + "minItems": 1, + "examples": [ + "api", + "code-first", + "code-generator", + "converter", + "directory", + "documentation-generator", + "editor", + "ui-component", + "dsl", + "framework", + "github-action", + "mocking-and-testing", + "validator", + "compare-tool", + "other", + "cli", + "bundler", + "ide-extension" + ] + }, + "hasCommercial": { + "type": "boolean", + "description": "Indicate if your tool is open source or commercial offering, like SAAS for example", + "default": false } + } } -} \ No newline at end of file + } +} diff --git a/scripts/utils.js b/scripts/utils.js deleted file mode 100644 index c740ae91eaef..000000000000 --- a/scripts/utils.js +++ /dev/null @@ -1,26 +0,0 @@ -const yaml = require('yaml'); - -function convertToJson(contentYAMLorJSON) { - // Axios handles conversion to JSON by default, if data returned from the server allows it - // So if returned content is not a string (not YAML), we just return JSON back - if (typeof contentYAMLorJSON !== "string") { - return contentYAMLorJSON; - } - - // Check if the content is valid JSON before attempting to parse as YAML - try { - const jsonContent = JSON.parse(contentYAMLorJSON); - return jsonContent; - } catch (jsonError) { - // If it's not valid JSON, try parsing it as YAML - try { - const yamlContent = yaml.parse(contentYAMLorJSON); - return yamlContent; - } catch (yamlError) { - // If parsing as YAML also fails, throw an error - throw new Error(`Invalid content format:\nJSON Parse Error: ${jsonError}\nYAML Parse Error: ${yamlError}`); - } - } -} - -module.exports = { convertToJson }; diff --git a/scripts/utils.ts b/scripts/utils.ts new file mode 100644 index 000000000000..54081eb80648 --- /dev/null +++ b/scripts/utils.ts @@ -0,0 +1,47 @@ +import yaml from 'yaml'; + +/** + * Converts a YAML or JSON string to a JSON object. + * If the input is already a JSON object, it is returned as is. + * + * @param {unknown} contentYAMLorJSON - The content to be converted, either as a YAML/JSON string or a JSON object. + * @returns {any} - The converted JSON object. + * @throws {Error} - Throws an error if the content is neither valid JSON nor valid YAML. + */ +function convertToJson(contentYAMLorJSON: unknown): any { + // Axios handles conversion to JSON by default, if data returned from the server allows it + // So if returned content is not a string (not YAML), we just return JSON back + if (typeof contentYAMLorJSON !== 'string') { + return contentYAMLorJSON; + } + + // Check if the content is valid JSON before attempting to parse as YAML + try { + const jsonContent = JSON.parse(contentYAMLorJSON); + + return jsonContent; + } catch (jsonError) { + // If it's not valid JSON, try parsing it as YAML + try { + const yamlContent = yaml.parse(contentYAMLorJSON); + + return yamlContent; + } catch (yamlError) { + // If parsing as YAML also fails, throw an error + throw new Error(`Invalid content format:\nJSON Parse Error: ${jsonError}\nYAML Parse Error: ${yamlError}`); + } + } +} + +/** + * Pauses execution for a specified number of milliseconds. + * @param {number} ms - The number of milliseconds to pause. + * @returns {Promise} + */ +async function pause(ms: number): Promise { + return new Promise((res) => { + setTimeout(res, ms); + }); +} + +export { convertToJson, pause }; diff --git a/scripts/utils/logger.ts b/scripts/utils/logger.ts new file mode 100644 index 000000000000..753a34c80bbf --- /dev/null +++ b/scripts/utils/logger.ts @@ -0,0 +1,18 @@ +import winston from 'winston'; + +const { combine, timestamp, printf, colorize, align } = winston.format; + +const logger = winston.createLogger({ + level: process.env.LOG_LEVEL || 'info', + format: combine( + colorize({ level: true }), + timestamp({ + format: 'YYYY-MM-DD hh:mm:ss.SSS A' + }), + align(), + printf((info) => `[${info.timestamp}] ${info.level}: ${info.message}`) + ), + transports: [new winston.transports.Console()] +}); + +export { logger }; diff --git a/scripts/utils/readAndWriteJson.js b/scripts/utils/readAndWriteJson.js deleted file mode 100644 index 3c7f05d2308b..000000000000 --- a/scripts/utils/readAndWriteJson.js +++ /dev/null @@ -1,28 +0,0 @@ -const { promises: { readFile, writeFile } } = require('fs'); -const { convertToJson } = require("../utils"); - -module.exports = async function writeJSON(readPath, writePath) { - let readContent; - let jsonContent; - - // Attempt to read the file - try { - readContent = await readFile(readPath, 'utf-8'); - } catch (err) { - throw new Error(`Error while reading file\nError: ${err}`); - } - - // Attempt to convert content to JSON - try { - jsonContent = convertToJson(readContent); - } catch (err) { - throw new Error(`Error while conversion\nError: ${err}`); - } - - // Attempt to write the JSON content to file - try { - await writeFile(writePath, JSON.stringify(jsonContent)); - } catch (err) { - throw new Error(`Error while writing file\nError: ${err}`); - } -}; \ No newline at end of file diff --git a/scripts/utils/readAndWriteJson.ts b/scripts/utils/readAndWriteJson.ts new file mode 100644 index 000000000000..e017309571f3 --- /dev/null +++ b/scripts/utils/readAndWriteJson.ts @@ -0,0 +1,36 @@ +import { readFile, writeFile } from 'fs/promises'; + +import { convertToJson } from '../utils'; + +/** + * Reads a file, converts its content to JSON, and writes the JSON content to another file. + * + * @param {string} readPath - The path of the file to read. + * @param {string} writePath - The path of the file to write the JSON content to. + * @throws Will throw an error if reading, converting, or writing the file fails. + */ +export async function writeJSON(readPath: string, writePath: string) { + let readContent; + let jsonContent; + + // Attempt to read the file + try { + readContent = await readFile(readPath, 'utf-8'); + } catch (err) { + throw new Error(`Error while reading file\nError: ${err}`); + } + + // Attempt to convert content to JSON + try { + jsonContent = convertToJson(readContent); + } catch (err) { + throw new Error(`Error while conversion\nError: ${err}`); + } + + // Attempt to write the JSON content to file + try { + await writeFile(writePath, JSON.stringify(jsonContent)); + } catch (err) { + throw new Error(`Error while writing file\nError: ${err}`); + } +} diff --git a/tests/adopters/index.test.js b/tests/adopters/index.test.js index 8218a7f398f4..f2a95087ffd6 100644 --- a/tests/adopters/index.test.js +++ b/tests/adopters/index.test.js @@ -1,11 +1,10 @@ const { resolve } = require('path'); -const writeJSON = require('../../scripts/utils/readAndWriteJson.js'); -const buildAdoptersList = require('../../scripts/adopters/index'); +const { writeJSON } = require('../../scripts/utils/readAndWriteJson.ts'); +const { buildAdoptersList } = require('../../scripts/adopters/index.ts'); -jest.mock('../../scripts/utils/readAndWriteJson.js'); +jest.mock('../../scripts/utils/readAndWriteJson.ts'); describe('buildAdoptersList', () => { - test('should call writeJSON with correct arguments', async () => { const expectedReadPath = 'config/adopters.yml'; const expectedWritePath = resolve(__dirname, '../../config', 'adopters.json'); @@ -14,5 +13,4 @@ describe('buildAdoptersList', () => { expect(writeJSON).toHaveBeenCalledWith(expectedReadPath, expectedWritePath); }); - }); diff --git a/tests/babel.test.config.cjs b/tests/babel.test.config.cjs new file mode 100644 index 000000000000..1372e1c5149f --- /dev/null +++ b/tests/babel.test.config.cjs @@ -0,0 +1,6 @@ +// This babel config is to transform the scripts to typescript before running the tests. + +module.exports = { + presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'], + plugins: ['babel-plugin-transform-import-meta'] +}; diff --git a/tests/build-docs/addDocButtons.test.js b/tests/build-docs/addDocButtons.test.js index b867d8389549..1dabb71a82cf 100644 --- a/tests/build-docs/addDocButtons.test.js +++ b/tests/build-docs/addDocButtons.test.js @@ -1,5 +1,5 @@ -const { addDocButtons } = require("../../scripts/build-docs"); -const { docPosts, treePosts, mockDocPosts, mockTreePosts, invalidTreePosts } = require("../fixtures/addDocButtonsData"); +const { addDocButtons } = require('../../scripts/build-docs.ts'); +const { docPosts, treePosts, mockDocPosts, mockTreePosts, invalidTreePosts } = require('../fixtures/addDocButtonsData'); describe('addDocButtons', () => { it('should add next and previous page information', () => { @@ -8,12 +8,12 @@ describe('addDocButtons', () => { slug: '/docs', content: 'Welcome content' }; - + const expectedSecondItem = { isRootSection: true, title: 'Section 1' }; - + const expectedThirdItem = { title: 'Page 1', slug: '/docs/section1/page1', @@ -26,7 +26,7 @@ describe('addDocButtons', () => { href: undefined } }; - + const expectedFourthItem = { title: 'Page 2', slug: '/docs/section1/page2', @@ -35,16 +35,16 @@ describe('addDocButtons', () => { href: '/docs/section1/page1' } }; - + const result = addDocButtons(docPosts, treePosts); - + expect(result).toHaveLength(4); expect(result[0]).toEqual(expectedFirstItem); expect(result[1]).toEqual(expectedSecondItem); expect(result[2]).toEqual(expectedThirdItem); expect(result[3]).toEqual(expectedFourthItem); }); - + it('should set nextPage correctly when next item is a root element', () => { const result = addDocButtons(mockDocPosts, mockTreePosts); @@ -59,10 +59,10 @@ describe('addDocButtons', () => { try { addDocButtons(docPosts, undefined); } catch (err) { - error = err - expect(err.message).toContain("An error occurred while adding doc buttons:"); + error = err; + expect(err.message).toContain('An error occurred while adding doc buttons:'); } - expect(error).toBeDefined() + expect(error).toBeDefined(); }); it('should throw an error if docPosts is missing', () => { @@ -71,10 +71,10 @@ describe('addDocButtons', () => { try { addDocButtons(undefined, treePosts); } catch (err) { - error = err - expect(err.message).toContain("An error occurred while adding doc buttons:"); + error = err; + expect(err.message).toContain('An error occurred while adding doc buttons:'); } - expect(error).toBeDefined() + expect(error).toBeDefined(); }); it('should handle invalid data structure in treePosts', () => { @@ -84,8 +84,8 @@ describe('addDocButtons', () => { addDocButtons(docPosts, invalidTreePosts); } catch (err) { error = err; - expect(err.message).toContain("An error occurred while adding doc buttons:"); + expect(err.message).toContain('An error occurred while adding doc buttons:'); } - expect(error).toBeDefined() + expect(error).toBeDefined(); }); }); diff --git a/tests/build-docs/buildNavTree.test.js b/tests/build-docs/buildNavTree.test.js index 992011be949c..22006a8eb322 100644 --- a/tests/build-docs/buildNavTree.test.js +++ b/tests/build-docs/buildNavTree.test.js @@ -1,13 +1,13 @@ -const { buildNavTree } = require('../../scripts/build-docs'); +const { buildNavTree } = require('../../scripts/build-docs.ts'); -const { - basicNavItems, - sectionNavItems, - orphanNavItems, - missingFieldsNavItems, - invalidParentNavItems, - multipleSubsectionsNavItems -} = require('../fixtures/buildNavTreeData') +const { + basicNavItems, + sectionNavItems, + orphanNavItems, + missingFieldsNavItems, + invalidParentNavItems, + multipleSubsectionsNavItems +} = require('../fixtures/buildNavTreeData'); describe('buildNavTree', () => { beforeEach(() => { @@ -15,10 +15,9 @@ describe('buildNavTree', () => { }); it('should create a tree structure from nav items', () => { - const result = buildNavTree(basicNavItems); - expect(result['welcome'].item).toEqual( + expect(result.welcome.item).toEqual( expect.objectContaining({ title: 'Welcome', slug: '/docs' @@ -35,38 +34,36 @@ describe('buildNavTree', () => { expect(result['getting-started'].children).toHaveProperty('installation'); expect(result['getting-started'].children).toHaveProperty('configuration'); - expect(result['reference'].item).toEqual( + expect(result.reference.item).toEqual( expect.objectContaining({ title: 'Reference', slug: '/docs/reference' }) ); - expect(result['reference'].children.api.item).toEqual( + expect(result.reference.children.api.item).toEqual( expect.objectContaining({ title: 'API', slug: '/docs/reference/api' }) ); - expect(result['reference'].children.specification.item.slug).toBe('/docs/reference/specification'); - expect(result['reference'].children.specification.children[0].slug).toBe('/docs/reference/specification/v3.0'); - + expect(result.reference.children.specification.item.slug).toBe('/docs/reference/specification'); + expect(result.reference.children.specification.children[0].slug).toBe('/docs/reference/specification/v3.0'); }); it('should handle items without sectionId', () => { - const result = buildNavTree(sectionNavItems); - expect(result['root'].item).toEqual( + expect(result.root.item).toEqual( expect.objectContaining({ title: 'Root', slug: '/docs' }) ); - expect(result['root'].children).toHaveProperty('Item without sectionId'); - expect(result['root'].children['Item without sectionId'].item).toEqual( + expect(result.root.children).toHaveProperty('Item without sectionId'); + expect(result.root.children['Item without sectionId'].item).toEqual( expect.objectContaining({ title: 'Item without sectionId', slug: '/docs/item' @@ -83,7 +80,7 @@ describe('buildNavTree', () => { error = err; expect(err.message).toContain('Parent section non-existent-parent not found for item Orphaned Subsection'); } - expect(error).toBeDefined() + expect(error).toBeDefined(); }); it('should handle items with missing required fields gracefully', () => { @@ -113,12 +110,12 @@ describe('buildNavTree', () => { it('should sort children within subsections based on weight', () => { const result = buildNavTree(multipleSubsectionsNavItems); - const apiChildren = result['reference'].children.api.children; + const apiChildren = result.reference.children.api.children; expect(apiChildren[0].title).toBe('Authentication'); expect(apiChildren[1].title).toBe('Endpoints'); expect(apiChildren[2].title).toBe('Rate Limiting'); - const specChildren = result['reference'].children.specification.children; + const specChildren = result.reference.children.specification.children; expect(specChildren[0].title).toBe('v1.0'); expect(specChildren[1].title).toBe('v2.0'); expect(specChildren[2].title).toBe('v3.0'); @@ -129,5 +126,4 @@ describe('buildNavTree', () => { expect(specChildren[0].weight).toBeLessThan(specChildren[1].weight); expect(specChildren[1].weight).toBeLessThan(specChildren[2].weight); }); - }); diff --git a/tests/build-docs/convertDocPosts.test.js b/tests/build-docs/convertDocPosts.test.js index 6cc397ed98b2..13159e2ed8de 100644 --- a/tests/build-docs/convertDocPosts.test.js +++ b/tests/build-docs/convertDocPosts.test.js @@ -1,10 +1,10 @@ -const { convertDocPosts } = require('../../scripts/build-docs'); -const { - docObject, - emptyDocObject, - singlePostDocObject, +const { convertDocPosts } = require('../../scripts/build-docs.ts'); +const { + docObject, + emptyDocObject, + singlePostDocObject, nestedChildrenDocObject - } = require('../fixtures/convertDocPostData'); +} = require('../fixtures/convertDocPostData'); describe('convertDocPosts', () => { it('should convert a doc object to an array', () => { @@ -58,5 +58,4 @@ describe('convertDocPosts', () => { } expect(error).toBeDefined(); }); - }); diff --git a/tests/build-meetings.test.js b/tests/build-meetings.test.js index bd4d9db5b1ec..366682a93f27 100644 --- a/tests/build-meetings.test.js +++ b/tests/build-meetings.test.js @@ -1,117 +1,147 @@ const { google } = require('googleapis'); -const path = require("path"); +const path = require('path'); const { readFileSync, mkdirSync, rmSync } = require('fs'); -const { buildMeetings } = require('../scripts/build-meetings'); -const { mockEvents, expectedContent } = require('../tests/fixtures/meetingsData'); +const { buildMeetings } = require('../scripts/build-meetings.ts'); +const { mockEvents, expectedContent } = require('./fixtures/meetingsData'); jest.mock('googleapis', () => { - const events = { - list: jest.fn(), - }; - const calendar = { - events, - }; - const google = { - calendar: jest.fn(() => calendar), - auth: { - GoogleAuth: jest.fn(() => ({ - getClient: jest.fn(), - })), - }, - }; - return { google }; + const events = { + list: jest.fn() + }; + const calendar = { + events + }; + const mockGoogle = { + calendar: jest.fn(() => calendar), + auth: { + GoogleAuth: jest.fn(() => ({ + getClient: jest.fn() + })) + } + }; + return { google: mockGoogle }; }); describe('buildMeetings', () => { - const testDir = path.join(__dirname, 'testCache'); - const outputFilePath = path.join(testDir, 'meetings.json'); + const testDir = path.join(__dirname, 'testCache'); + const outputFilePath = path.join(testDir, 'meetings.json'); - beforeEach(() => { - jest.clearAllMocks(); - process.env.CALENDAR_SERVICE_ACCOUNT = JSON.stringify({ key: 'test_key' }); - process.env.CALENDAR_ID = 'test_calendar_id'; + beforeEach(() => { + jest.clearAllMocks(); + process.env.CALENDAR_SERVICE_ACCOUNT = JSON.stringify({ key: 'test_key' }); + process.env.CALENDAR_ID = 'test_calendar_id'; - mkdirSync(testDir, { recursive: true }); - }); - - afterEach(() => { - rmSync(testDir, { recursive: true, force: true }); - }); + mkdirSync(testDir, { recursive: true }); + }); - it('should fetch events, process them, and write to a file', async () => { - google.calendar().events.list.mockResolvedValue({ data: { items: mockEvents } }); + afterEach(() => { + rmSync(testDir, { recursive: true, force: true }); + }); - await buildMeetings(outputFilePath); + it('should fetch events, process them, and write to a file', async () => { + google.calendar().events.list.mockResolvedValue({ data: { items: mockEvents } }); - expect(google.auth.GoogleAuth).toHaveBeenCalledWith({ - scopes: ['https://www.googleapis.com/auth/calendar'], - credentials: { key: 'test_key' }, - }); - expect(google.calendar).toHaveBeenCalled(); - expect(google.calendar().events.list).toHaveBeenCalledWith({ - calendarId: 'test_calendar_id', - timeMax: expect.any(String), - timeMin: expect.any(String), - }); + await buildMeetings(outputFilePath); - const fileContent = readFileSync(outputFilePath, 'utf8'); - const parsedContent = JSON.parse(fileContent); - - expect(parsedContent).toEqual(expectedContent); + expect(google.auth.GoogleAuth).toHaveBeenCalledWith({ + scopes: ['https://www.googleapis.com/auth/calendar'], + credentials: { key: 'test_key' } }); - - it('should throw an error if the Google API call fails', async () => { - google.calendar().events.list.mockRejectedValue(new Error('Google API error')); - - try { - await buildMeetings(outputFilePath) - } catch (err) { - expect(err.message).toContain('Google API error'); - } + expect(google.calendar).toHaveBeenCalled(); + expect(google.calendar().events.list).toHaveBeenCalledWith({ + calendarId: 'test_calendar_id', + timeMax: expect.any(String), + timeMin: expect.any(String) }); - it('should handle undefined CALENDAR_SERVICE_ACCOUNT', async () => { - delete process.env.CALENDAR_SERVICE_ACCOUNT; + const fileContent = readFileSync(outputFilePath, 'utf8'); + const parsedContent = JSON.parse(fileContent); - google.calendar().events.list.mockResolvedValue({ data: { items: [] } }); + expect(parsedContent).toEqual(expectedContent); + }); - await buildMeetings(outputFilePath); + it('should throw an error if the Google API call fails', async () => { + google.calendar().events.list.mockRejectedValue(new Error('Google API error')); - expect(google.auth.GoogleAuth).toHaveBeenCalledWith({ - scopes: ['https://www.googleapis.com/auth/calendar'], - credentials: undefined, - }); + try { + await buildMeetings(outputFilePath); + } catch (err) { + expect(err.message).toContain('Google API error'); + } + }); - const fileContent = readFileSync(outputFilePath, 'utf8'); - expect(fileContent).toBe('[]'); - }); + it('should handle undefined CALENDAR_SERVICE_ACCOUNT', async () => { + delete process.env.CALENDAR_SERVICE_ACCOUNT; + + google.calendar().events.list.mockResolvedValue({ data: { items: [] } }); - it('should throw an error if authentication fails', async () => { - google.auth.GoogleAuth.mockImplementation(() => { - throw new Error('Authentication failed'); - }); + await buildMeetings(outputFilePath); - try { - await buildMeetings(outputFilePath) - } catch (err) { - expect(err.message).toContain('Authentication failed') - } + expect(google.auth.GoogleAuth).toHaveBeenCalledWith({ + scopes: ['https://www.googleapis.com/auth/calendar'], + credentials: undefined }); - it('should handle file write errors', async () => { - google.auth.GoogleAuth.mockImplementation(() => ({ - getClient: jest.fn(), - })); + const fileContent = readFileSync(outputFilePath, 'utf8'); + expect(fileContent).toBe('[]'); + }); - google.calendar().events.list.mockResolvedValue({ data: { items: mockEvents } }); + it('should throw an error if authentication fails', async () => { + google.auth.GoogleAuth.mockImplementation(() => { + throw new Error('Authentication failed'); + }); - const invalidPath = '/root/invalid_dir/meetings.json'; + try { + await buildMeetings(outputFilePath); + } catch (err) { + expect(err.message).toContain('Authentication failed'); + } + }); + + it('should handle file write errors', async () => { + google.auth.GoogleAuth.mockImplementation(() => ({ + getClient: jest.fn() + })); + + google.calendar().events.list.mockResolvedValue({ data: { items: mockEvents } }); + + const invalidPath = '/root/invalid_dir/meetings.json'; + + try { + await buildMeetings(invalidPath); + } catch (err) { + expect(err.message).toMatch(/ENOENT|EACCES/); + } + }); + + it('should throw an error if the data structure received from Google Calendar API is invalid', async () => { + const mockCalendar = google.calendar().events.list; + mockCalendar.mockResolvedValueOnce({ + data: { + items: null // or {} or any non-array value to trigger the error + } + }); - try { - await buildMeetings(invalidPath); - } catch (err) { - expect(err.message).toMatch(/ENOENT|EACCES/); - } + await expect(buildMeetings('/path/to/write')).rejects.toThrow( + 'Invalid data structure received from Google Calendar API' + ); + }); + + it('should throw an error if start.dateTime is missing in the event', async () => { + const mockCalendar = google.calendar().events.list; + mockCalendar.mockResolvedValueOnce({ + data: { + items: [ + { + summary: 'Test Event', + htmlLink: 'http://example.com/event', + // start.dateTime is intentionally missing to trigger the error + start: {} + } + ] + } }); + await expect(buildMeetings('/path/to/write')).rejects.toThrow('start.dateTime is missing in the event'); + }); }); diff --git a/tests/build-newsroom-videos.test.js b/tests/build-newsroom-videos.test.js index 188bc2dffc70..2829b2e0cc54 100644 --- a/tests/build-newsroom-videos.test.js +++ b/tests/build-newsroom-videos.test.js @@ -1,102 +1,110 @@ const { readFileSync, removeSync, mkdirpSync, outputFileSync } = require('fs-extra'); const { resolve, join } = require('path'); -const { buildNewsroomVideos } = require('../scripts/build-newsroom-videos'); -const { mockApiResponse, expectedResult } = require('./fixtures/newsroomData'); const fetch = require('node-fetch-2'); const os = require('os'); +const { buildNewsroomVideos } = require('../scripts/build-newsroom-videos.ts'); +const { mockApiResponse, expectedResult } = require('./fixtures/newsroomData'); jest.mock('node-fetch-2', () => jest.fn()); describe('buildNewsroomVideos', () => { - const testDir = join(os.tmpdir(), 'test_config'); - const testFilePath = resolve(testDir, 'newsroom_videos.json'); - - beforeAll(() => { - mkdirpSync(testDir); - outputFileSync(testFilePath, JSON.stringify({})); - process.env.YOUTUBE_TOKEN = 'testkey'; + const testDir = join(os.tmpdir(), 'test_config'); + const testFilePath = resolve(testDir, 'newsroom_videos.json'); + + beforeAll(() => { + mkdirpSync(testDir); + outputFileSync(testFilePath, JSON.stringify({})); + process.env.YOUTUBE_TOKEN = 'testkey'; + }); + + afterAll(() => { + removeSync(testDir); + }); + + beforeEach(() => { + fetch.mockClear(); + }); + + it('should fetch video data and write to file', async () => { + fetch.mockResolvedValue({ + ok: true, + json: jest.fn().mockResolvedValue(mockApiResponse) }); - afterAll(() => { - removeSync(testDir); + const result = await buildNewsroomVideos(testFilePath); + + const expectedUrl = new URL('https://youtube.googleapis.com/youtube/v3/search'); + expectedUrl.searchParams.set('key', 'testkey'); + expectedUrl.searchParams.set('part', 'snippet'); + expectedUrl.searchParams.set('channelId', 'UCIz9zGwDLbrYQcDKVXdOstQ'); + expectedUrl.searchParams.set('eventType', 'completed'); + expectedUrl.searchParams.set('type', 'video'); + expectedUrl.searchParams.set('order', 'Date'); + expectedUrl.searchParams.set('maxResults', '5'); + + expect(fetch).toHaveBeenCalledWith(expectedUrl.toString()); + const response = readFileSync(testFilePath, 'utf8'); + expect(response).toEqual(expectedResult); + expect(result).toEqual(expectedResult); + }); + + it('should handle fetch errors', async () => { + fetch.mockRejectedValue(new Error('Fetch error')); + + try { + await buildNewsroomVideos(testFilePath); + } catch (err) { + expect(err.message).toContain('Fetch error'); + } + }); + + it('should handle invalid API response', async () => { + fetch.mockResolvedValue({ + ok: true, + json: jest.fn().mockResolvedValue({}) }); - beforeEach(() => { - fetch.mockClear(); + try { + await buildNewsroomVideos(testFilePath); + } catch (err) { + expect(err.message).toContain('Invalid data structure received from YouTube API'); + } + }); + + it('should handle HTTP status code', async () => { + fetch.mockResolvedValue({ + ok: false, + status: 404, + json: jest.fn().mockResolvedValue({}) }); - it('should fetch video data and write to file', async () => { - fetch.mockResolvedValue({ - ok: true, - json: jest.fn().mockResolvedValue(mockApiResponse), - }); - - const result = await buildNewsroomVideos(testFilePath); - - const expectedUrl = new URL('https://youtube.googleapis.com/youtube/v3/search'); - expectedUrl.searchParams.set('key', 'testkey'); - expectedUrl.searchParams.set('part', 'snippet'); - expectedUrl.searchParams.set('channelId', 'UCIz9zGwDLbrYQcDKVXdOstQ'); - expectedUrl.searchParams.set('eventType', 'completed'); - expectedUrl.searchParams.set('type', 'video'); - expectedUrl.searchParams.set('order', 'Date'); - expectedUrl.searchParams.set('maxResults', '5'); - - expect(fetch).toHaveBeenCalledWith(expectedUrl.toString()); - const response = readFileSync(testFilePath, 'utf8'); - expect(response).toEqual(expectedResult); - expect(result).toEqual(expectedResult); + try { + await buildNewsroomVideos(testFilePath); + } catch (err) { + expect(err.message).toContain('HTTP error! with status code: 404'); + } + }); + + it('should handle file write errors', async () => { + fetch.mockResolvedValue({ + ok: true, + json: jest.fn().mockResolvedValue(mockApiResponse) }); - it('should handle fetch errors', async () => { - fetch.mockRejectedValue(new Error('Fetch error')); - - try { - await buildNewsroomVideos(testFilePath); - } catch (err) { - expect(err.message).toContain('Fetch error'); - } - }); - - it('should handle invalid API response', async () => { - fetch.mockResolvedValue({ - ok: true, - json: jest.fn().mockResolvedValue({}), - }); - - try { - await buildNewsroomVideos(testFilePath); - } catch (err) { - expect(err.message).toContain('Invalid data structure received from YouTube API'); - } - }); - - it('should handle HTTP status code', async () => { - fetch.mockResolvedValue({ - ok: false, - status: 404, - json: jest.fn().mockResolvedValue({}), - }); - - try { - await buildNewsroomVideos(testFilePath); - } catch (err) { - expect(err.message).toContain('HTTP error! with status code: 404'); - } - }); - - it('should handle file write errors', async () => { - fetch.mockResolvedValue({ - ok: true, - json: jest.fn().mockResolvedValue(mockApiResponse), - }); - - const invalidPath = resolve(os.tmpdir(), 'invalid_dir', 'newsroom_videos.json'); - - try { - await buildNewsroomVideos(invalidPath); - } catch (err) { - expect(err.message).toMatch(/ENOENT|EACCES/); - } - }); + const invalidPath = resolve(os.tmpdir(), 'invalid_dir', 'newsroom_videos.json'); + + try { + await buildNewsroomVideos(invalidPath); + } catch (err) { + expect(err.message).toMatch(/ENOENT|EACCES/); + } + }); + + it('should throw an error if YOUTUBE_TOKEN environment variable is not set', async () => { + delete process.env.YOUTUBE_TOKEN; + await expect(buildNewsroomVideos('/path/to/write')).rejects.toThrow( + 'YOUTUBE_TOKEN environment variable is required' + ); + process.env.YOUTUBE_TOKEN = 'testkey'; + }); }); diff --git a/tests/build-pages.test.js b/tests/build-pages.test.js index f811b1480158..0a1d797b06f7 100644 --- a/tests/build-pages.test.js +++ b/tests/build-pages.test.js @@ -1,6 +1,6 @@ const fs = require('fs'); const path = require('path'); -const { capitalizeJsxTags, copyAndRenameFiles, ensureDirectoryExists } = require('../scripts/build-pages'); +const { capitalizeJsxTags, copyAndRenameFiles, ensureDirectoryExists } = require('../scripts/build-pages.ts'); describe('capitalizeJsxTags', () => { test('should capitalize JSX tags', () => { @@ -52,6 +52,7 @@ describe('copyAndRenameFiles', () => { expect(fs.existsSync(NEW_TEST_DIR)).toBe(false); ensureDirectoryExists(NEW_TEST_DIR); expect(fs.existsSync(NEW_TEST_DIR)).toBe(true); + // delete the test directory after the test + fs.rmSync(NEW_TEST_DIR, { recursive: true, force: true }); }); - -}); \ No newline at end of file +}); diff --git a/tests/build-post-list.test.js b/tests/build-post-list.test.js index 388364a447ab..a8a2a6d983df 100644 --- a/tests/build-post-list.test.js +++ b/tests/build-post-list.test.js @@ -1,7 +1,7 @@ const fs = require('fs-extra'); const { resolve, join } = require('path'); -const { setupTestDirectories, generateTempDirPath } = require('./helper/buildPostListSetup') -const { buildPostList, slugifyToC, addItem } = require('../scripts/build-post-list'); +const { setupTestDirectories, generateTempDirPath } = require('./helper/buildPostListSetup'); +const { buildPostList, slugifyToC, addItem } = require('../scripts/build-post-list.ts'); describe('buildPostList', () => { let tempDir; @@ -14,11 +14,10 @@ describe('buildPostList', () => { postDirectories = [ [join(tempDir, 'blog'), '/blog'], [join(tempDir, 'docs'), '/docs'], - [join(tempDir, 'about'), '/about'], + [join(tempDir, 'about'), '/about'] ]; await setupTestDirectories(tempDir); - }); afterEach(async () => { @@ -30,7 +29,7 @@ describe('buildPostList', () => { const outputExists = await fs.pathExists(writeFilePath); expect(outputExists).toBe(true); }); - + it('writes valid JSON content', async () => { await buildPostList(postDirectories, tempDir, writeFilePath); const content = await fs.readFile(writeFilePath, 'utf-8'); @@ -45,19 +44,19 @@ describe('buildPostList', () => { expect.arrayContaining([ expect.objectContaining({ title: 'Docs Home', - slug: '/docs', + slug: '/docs' }), expect.objectContaining({ title: 'Reference', slug: '/docs/reference', - isRootSection: true, + isRootSection: true }), expect.objectContaining({ title: 'Specification', slug: '/docs/reference/specification', - isSection: true, - }), - ]), + isSection: true + }) + ]) ); }); @@ -69,25 +68,23 @@ describe('buildPostList', () => { expect.arrayContaining([ expect.objectContaining({ title: 'Release Notes 2.1.0', - slug: '/blog/release-notes-2.1.0', - }), - ]), + slug: '/blog/release-notes-2.1.0' + }) + ]) ); expect(output.about).toEqual( expect.arrayContaining([ expect.objectContaining({ title: 'About Us', - slug: '/about', - }), - ]), + slug: '/about' + }) + ]) ); expect(output.docsTree).toBeDefined(); - const blogEntry = output.blog.find( - (item) => item.slug === '/blog/release-notes-2.1.0', - ); + const blogEntry = output.blog.find((item) => item.slug === '/blog/release-notes-2.1.0'); expect(blogEntry).toBeDefined(); expect(blogEntry.title).toBe('Release Notes 2.1.0'); }); @@ -96,7 +93,7 @@ describe('buildPostList', () => { await fs.ensureDir(join(tempDir, 'docs', 'section1')); await fs.writeFile( join(tempDir, 'docs', 'section1', '_section.mdx'), - '---\ntitle: Section 1\n---\nThis is section 1.', + '---\ntitle: Section 1\n---\nThis is section 1.' ); await buildPostList(postDirectories, tempDir, writeFilePath); @@ -107,26 +104,22 @@ describe('buildPostList', () => { expect(sectionEntry).toMatchObject({ title: 'Section 1', slug: expect.stringContaining('/docs/section1'), - isSection: true, + isSection: true }); }); it('handles multiple release notes correctly', async () => { await fs.writeFile( join(tempDir, 'blog', 'release-notes-2.1.1.mdx'), - '---\ntitle: Release Notes 2.1.1\n---\nThis is a release note.', + '---\ntitle: Release Notes 2.1.1\n---\nThis is a release note.' ); await buildPostList(postDirectories, tempDir, writeFilePath); const output = JSON.parse(await fs.readFile(writeFilePath, 'utf-8')); - const firstReleaseNote = output.blog.find( - (item) => item.slug === '/blog/release-notes-2.1.0', - ); - const secondReleaseNote = output.blog.find( - (item) => item.slug === '/blog/release-notes-2.1.1', - ); + const firstReleaseNote = output.blog.find((item) => item.slug === '/blog/release-notes-2.1.0'); + const secondReleaseNote = output.blog.find((item) => item.slug === '/blog/release-notes-2.1.1'); expect(firstReleaseNote).toBeDefined(); expect(firstReleaseNote.title).toBe('Release Notes 2.1.0'); @@ -137,24 +130,19 @@ describe('buildPostList', () => { it('throws an error when accessing non-existent directory', async () => { const invalidDir = [join(tempDir, 'non-existent-dir'), '/invalid']; - await expect( - buildPostList([invalidDir], tempDir, writeFilePath), - ).rejects.toThrow(/Error while building post list: ENOENT/); + await expect(buildPostList([invalidDir], tempDir, writeFilePath)).rejects.toThrow( + /Error while building post list: ENOENT/ + ); }); it('does not process specification files without a title', async () => { const specDir = join(tempDir, 'docs', 'reference', 'specification'); - await fs.writeFile( - join(specDir, 'v2.1.0-no-title.mdx'), - '---\n---\nContent of specification without a title.', - ); + await fs.writeFile(join(specDir, 'v2.1.0-no-title.mdx'), '---\n---\nContent of specification without a title.'); await buildPostList(postDirectories, tempDir, writeFilePath); const output = JSON.parse(await fs.readFile(writeFilePath, 'utf-8')); - const noTitleEntry = output.docs.find((item) => - item.slug.includes('/reference/specification/v2.1.0-no-title'), - ); + const noTitleEntry = output.docs.find((item) => item.slug.includes('/reference/specification/v2.1.0-no-title')); expect(noTitleEntry).toBeUndefined(); }); @@ -163,66 +151,50 @@ describe('buildPostList', () => { const specDir = join(tempDir, 'docs', 'reference', 'specification'); await fs.writeFile( join(specDir, 'v2.1.0-next-spec.1.mdx'), - '---\n---\nContent of pre-release specification v2.1.0-next-spec.1.', + '---\n---\nContent of pre-release specification v2.1.0-next-spec.1.' ); await buildPostList(postDirectories, tempDir, writeFilePath); const output = JSON.parse(await fs.readFile(writeFilePath, 'utf-8')); - const nextSpecEntry = output.docs.find((item) => - item.slug.includes('/reference/specification/v2.1.0-next-spec.1'), - ); + const nextSpecEntry = output.docs.find((item) => item.slug.includes('/reference/specification/v2.1.0-next-spec.1')); expect(nextSpecEntry).toBeUndefined(); }); it('does not process specification files with "explorer" in the filename', async () => { const specDir = join(tempDir, 'docs', 'reference', 'specification'); - await fs.writeFile( - join(specDir, 'explorer.mdx'), - '---\n---\nContent of explorer specification.', - ); + await fs.writeFile(join(specDir, 'explorer.mdx'), '---\n---\nContent of explorer specification.'); await buildPostList(postDirectories, tempDir, writeFilePath); const output = JSON.parse(await fs.readFile(writeFilePath, 'utf-8')); - const explorerEntry = output.docs.find((item) => - item.slug.includes('/reference/specification/explorer'), - ); + const explorerEntry = output.docs.find((item) => item.slug.includes('/reference/specification/explorer')); expect(explorerEntry).toBeUndefined(); }); it('throws "Error while building post list" when front matter is invalid', async () => { - await fs.writeFile( - join(tempDir, 'docs', 'invalid.mdx'), - '---\ninvalid front matter\n---\nContent', - ); + await fs.writeFile(join(tempDir, 'docs', 'invalid.mdx'), '---\ninvalid front matter\n---\nContent'); - await expect( - buildPostList(postDirectories, tempDir, writeFilePath), - ).rejects.toThrow(/Error while building post list/); + await expect(buildPostList(postDirectories, tempDir, writeFilePath)).rejects.toThrow( + /Error while building post list/ + ); }); it('throws an error if no post directories are provided', async () => { - await expect(buildPostList([], tempDir, writeFilePath)).rejects.toThrow( - /Error while building post list/, - ); + await expect(buildPostList([], tempDir, writeFilePath)).rejects.toThrow(/Error while building post list/); }); it('throws specific error message when basePath parameter is undefined', async () => { - await expect( - buildPostList(postDirectories, undefined, writeFilePath), - ).rejects.toThrow( - "Error while building post list: basePath is required", + await expect(buildPostList(postDirectories, undefined, writeFilePath)).rejects.toThrow( + 'Error while building post list: basePath is required' ); }); it('throws specific error message when writeFilePath parameter is undefined', async () => { - await expect( - buildPostList(postDirectories, tempDir, undefined), - ).rejects.toThrow( - "Error while building post list: writeFilePath is required", + await expect(buildPostList(postDirectories, tempDir, undefined)).rejects.toThrow( + 'Error while building post list: writeFilePath is required' ); }); @@ -247,13 +219,13 @@ describe('buildPostList', () => { it('handles empty strings', () => { expect(slugifyToC('')).toBe(''); }); - + it('returns empty string for malformed heading IDs', () => { expect(slugifyToC('## Heading {#}')).toBe(''); expect(slugifyToC('## Heading {# }')).toBe(''); expect(slugifyToC('## Heading {}')).toBe(''); }); - + it('handles mixed format heading IDs', () => { expect(slugifyToC('## Heading {#id} {}')).toBe('id'); }); diff --git a/tests/build-rss.test.js b/tests/build-rss.test.js index 7961740fe5c6..08b88de54efa 100644 --- a/tests/build-rss.test.js +++ b/tests/build-rss.test.js @@ -1,7 +1,8 @@ const fs = require('fs'); const path = require('path'); -const rssFeed = require('../scripts/build-rss'); const { XMLParser } = require('fast-xml-parser'); +const { rssFeed } = require('../scripts/build-rss.ts'); + const parser = new XMLParser({ ignoreAttributes: false }); const { mockRssData, title, type, desc, missingDateMockData, incompletePostMockData } = require('./fixtures/rssData'); @@ -20,7 +21,7 @@ describe('rssFeed', () => { afterAll(async () => { try { const files = await fs.promises.readdir(testOutputDir); - await Promise.all(files.map(file => fs.promises.unlink(path.join(testOutputDir, file)))); + await Promise.all(files.map((file) => fs.promises.unlink(path.join(testOutputDir, file)))); await fs.promises.rmdir(testOutputDir); } catch (err) { throw new Error(`Error while deleting temp dir: ${err.message}`); @@ -32,10 +33,9 @@ describe('rssFeed', () => { }); it('should generate RSS feed and write to file', async () => { - jest.doMock('../config/posts.json', () => mockRssData, { virtual: true }); - await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined() + await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined(); const filePath = path.join(__dirname, '..', 'public', outputPath); expect(fs.existsSync(filePath)).toBe(true); @@ -45,37 +45,37 @@ describe('rssFeed', () => { it('should prioritize featured posts over non-featured ones', async () => { jest.doMock('../config/posts.json', () => mockRssData, { virtual: true }); - + await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined(); - + const filePath = path.join(__dirname, '..', 'public', outputPath); const fileContent = fs.readFileSync(filePath, 'utf8'); - + const parsedContent = parser.parse(fileContent); - const itemTitles = parsedContent.rss.channel.item.map(item => item.title); - + const itemTitles = parsedContent.rss.channel.item.map((item) => item.title); + expect(itemTitles[0]).toBe('Test Post 1'); expect(itemTitles[1]).toBe('Another Featured Post'); - + expect(itemTitles[2]).toBe('Post with Special Characters: & < > "'); expect(itemTitles[3]).toBe('Post with UTC Date Format'); expect(itemTitles[4]).toBe('Non-Featured Post 1'); expect(itemTitles[5]).toBe('Non-Featured Post 3'); }); - + it('should sort posts by date in descending order', async () => { jest.doMock('../config/posts.json', () => mockRssData, { virtual: true }); - + await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined(); - + const filePath = path.join(__dirname, '..', 'public', outputPath); const fileContent = fs.readFileSync(filePath, 'utf8'); - + const parsedContent = parser.parse(fileContent); - const itemTitles = parsedContent.rss.channel.item.map(item => item.title); - + const itemTitles = parsedContent.rss.channel.item.map((item) => item.title); + expect(itemTitles[0]).toBe('Test Post 1'); - expect(itemTitles[1]).toBe('Another Featured Post') + expect(itemTitles[1]).toBe('Another Featured Post'); expect(itemTitles[2]).toBe('Post with Special Characters: & < > "'); expect(itemTitles[3]).toBe('Post with UTC Date Format'); expect(itemTitles[4]).toBe('Non-Featured Post 1'); @@ -85,7 +85,7 @@ describe('rssFeed', () => { it('should set correct enclosure type based on image extension', async () => { jest.doMock('../config/posts.json', () => mockRssData, { virtual: true }); - await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined() + await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined(); const filePath = path.join(__dirname, '..', 'public', outputPath); const fileContent = fs.readFileSync(filePath, 'utf8'); @@ -101,26 +101,28 @@ describe('rssFeed', () => { it('should catch and handle errors when write operation fails', async () => { jest.doMock('../config/posts.json', () => mockRssData, { virtual: true }); - const invalidOutputPath = "invalid/path"; + const invalidOutputPath = 'invalid/path'; await expect(rssFeed(type, title, desc, invalidOutputPath)).rejects.toThrow(/ENOENT|EACCES/); - }); it('should throw an error when posts.json is malformed', async () => { - jest.doMock('../config/posts.json', () => { - return { invalidKey: [] }; - }, { virtual: true }); + jest.doMock( + '../config/posts.json', + () => { + return { invalidKey: [] }; + }, + { virtual: true } + ); await expect(rssFeed(type, title, desc, outputPath)).rejects.toThrow('Failed to generate RSS feed'); - }); it('should handle empty posts array', async () => { const emptyMockData = { blog: [] }; jest.doMock('../config/posts.json', () => emptyMockData, { virtual: true }); - await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined() + await expect(rssFeed(type, title, desc, outputPath)).resolves.toBeUndefined(); const filePath = path.join(__dirname, '..', 'public', outputPath); const fileContent = fs.readFileSync(filePath, 'utf8'); @@ -129,19 +131,16 @@ describe('rssFeed', () => { }); it('should throw an error when post is missing required fields', async () => { - jest.doMock('../config/posts.json', () => incompletePostMockData, { virtual: true }); await expect(rssFeed(type, title, desc, outputPath)).rejects.toThrow('Missing required fields'); - }); it('should throw an error when a post is missing a date field during sorting', async () => { - jest.doMock('../config/posts.json', () => missingDateMockData, { virtual: true }); - await expect(rssFeed(type, title, desc, outputPath)).rejects.toThrow('Failed to generate RSS feed: Missing date in posts: Post without Date'); - + await expect(rssFeed(type, title, desc, outputPath)).rejects.toThrow( + 'Failed to generate RSS feed: Missing date in posts: Post without Date' + ); }); - }); diff --git a/tests/build-tools.test.js b/tests/build-tools.test.js index 5e49682cce28..cef7ca6f27bb 100644 --- a/tests/build-tools.test.js +++ b/tests/build-tools.test.js @@ -1,96 +1,96 @@ const axios = require('axios'); const { resolve } = require('path'); -const { buildTools } = require('../scripts/build-tools'); -const { tagsData, manualTools, mockConvertedData, mockExtractData } = require('../tests/fixtures/buildToolsData'); const fs = require('fs-extra'); const os = require('os'); const path = require('path'); +const { tagsData, manualTools, mockConvertedData, mockExtractData } = require('./fixtures/buildToolsData'); +const { buildTools } = require('../scripts/build-tools.ts'); jest.mock('axios'); jest.mock('../scripts/tools/categorylist', () => ({ - categoryList: [ - { name: 'Category1', description: 'Description for Category1' }, - { name: 'Category2', description: 'Description for Category2' } - ] + categoryList: [ + { name: 'Category1', description: 'Description for Category1' }, + { name: 'Category2', description: 'Description for Category2' } + ] })); jest.mock('../scripts/tools/tags-color', () => ({ - languagesColor: [ - { name: 'JavaScript', color: 'bg-[#f1e05a]', borderColor: 'border-[#f1e05a]' }, - { name: 'Python', color: 'bg-[#3572A5]', borderColor: 'border-[#3572A5]' } - ], - technologiesColor: [ - { name: 'React', color: 'bg-[#61dafb]', borderColor: 'border-[#61dafb]' }, - { name: 'Node.js', color: 'bg-[#68a063]', borderColor: 'border-[#68a063]' } - ] + languagesColor: [ + { name: 'JavaScript', color: 'bg-[#f1e05a]', borderColor: 'border-[#f1e05a]' }, + { name: 'Python', color: 'bg-[#3572A5]', borderColor: 'border-[#3572A5]' } + ], + technologiesColor: [ + { name: 'React', color: 'bg-[#61dafb]', borderColor: 'border-[#61dafb]' }, + { name: 'Node.js', color: 'bg-[#68a063]', borderColor: 'border-[#68a063]' } + ] })); describe('buildTools', () => { - const testDir = path.join(String(os.tmpdir()), 'test_config'); - const toolsPath = resolve(testDir, 'tools.json'); - const tagsPath = resolve(testDir, 'all-tags.json'); - const automatedToolsPath = resolve(testDir, 'tools-automated.json'); - const manualToolsPath = resolve(testDir, 'tools-manual.json'); - let consoleErrorMock; - - beforeAll(() => { - consoleErrorMock = jest.spyOn(console, 'error').mockImplementation(() => {}); - fs.ensureDirSync(testDir); - fs.outputFileSync(manualToolsPath, JSON.stringify(manualTools)); - fs.outputFileSync(automatedToolsPath, JSON.stringify({})); - fs.outputFileSync(toolsPath, JSON.stringify({})); - fs.outputFileSync(tagsPath, JSON.stringify({})); - }); - - afterAll(() => { - fs.removeSync(testDir); - consoleErrorMock.mockRestore(); - }); - - beforeEach(() => { - jest.clearAllMocks(); - }); - - it('should extract, convert, combine tools, and write to file', async () => { - axios.get.mockResolvedValue({ data: mockExtractData }); - - await buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath); - - const automatedToolsContent = JSON.parse(fs.readFileSync(automatedToolsPath, 'utf8')); - const combinedToolsContent = JSON.parse(fs.readFileSync(toolsPath, 'utf8')); - const tagsContent = JSON.parse(fs.readFileSync(tagsPath, 'utf8')); - - expect(Object.keys(automatedToolsContent)).toEqual(Object.keys(mockConvertedData)); - expect(automatedToolsContent["Category1"].description).toEqual(mockConvertedData["Category1"].description); - expect(automatedToolsContent["Category2"].description).toEqual(mockConvertedData["Category2"].description); - - expect(combinedToolsContent).toHaveProperty('Category1'); - expect(combinedToolsContent).toHaveProperty('Category2'); - expect(combinedToolsContent["Category1"].description).toEqual(mockConvertedData["Category1"].description); - expect(combinedToolsContent["Category2"].description).toEqual(mockConvertedData["Category2"].description); - - expect(tagsContent).toEqual(tagsData); - }); - - it('should handle getData error', async () => { - axios.get.mockRejectedValue(new Error('Extract error')); - - try { - await buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath); - } catch (err) { - expect(err.message).toContain('Extract error'); - } - }); - - it('should handle file write errors', async () => { - axios.get.mockResolvedValue({ data: mockExtractData }); - - const invalidPath = path.resolve(os.tmpdir(), 'invalid_dir', 'tools.json'); - - try { - await buildTools(invalidPath, manualToolsPath, toolsPath, tagsPath); - } catch (err) { - expect(err.message).toMatch(/ENOENT|EACCES/); - } - }); + const testDir = path.join(String(os.tmpdir()), 'test_config'); + const toolsPath = resolve(testDir, 'tools.json'); + const tagsPath = resolve(testDir, 'all-tags.json'); + const automatedToolsPath = resolve(testDir, 'tools-automated.json'); + const manualToolsPath = resolve(testDir, 'tools-manual.json'); + let consoleErrorMock; + + beforeAll(() => { + consoleErrorMock = jest.spyOn(console, 'error').mockImplementation(() => {}); + fs.ensureDirSync(testDir); + fs.outputFileSync(manualToolsPath, JSON.stringify(manualTools)); + fs.outputFileSync(automatedToolsPath, JSON.stringify({})); + fs.outputFileSync(toolsPath, JSON.stringify({})); + fs.outputFileSync(tagsPath, JSON.stringify({})); + }); + + afterAll(() => { + fs.removeSync(testDir); + consoleErrorMock.mockRestore(); + }); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should extract, convert, combine tools, and write to file', async () => { + axios.get.mockResolvedValue({ data: mockExtractData }); + + await buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath); + + const automatedToolsContent = JSON.parse(fs.readFileSync(automatedToolsPath, 'utf8')); + const combinedToolsContent = JSON.parse(fs.readFileSync(toolsPath, 'utf8')); + const tagsContent = JSON.parse(fs.readFileSync(tagsPath, 'utf8')); + + expect(Object.keys(automatedToolsContent)).toEqual(Object.keys(mockConvertedData)); + expect(automatedToolsContent.Category1.description).toEqual(mockConvertedData.Category1.description); + expect(automatedToolsContent.Category2.description).toEqual(mockConvertedData.Category2.description); + + expect(combinedToolsContent).toHaveProperty('Category1'); + expect(combinedToolsContent).toHaveProperty('Category2'); + expect(combinedToolsContent.Category1.description).toEqual(mockConvertedData.Category1.description); + expect(combinedToolsContent.Category2.description).toEqual(mockConvertedData.Category2.description); + + expect(tagsContent).toEqual(tagsData); + }); + + it('should handle getData error', async () => { + axios.get.mockRejectedValue(new Error('Extract error')); + + try { + await buildTools(automatedToolsPath, manualToolsPath, toolsPath, tagsPath); + } catch (err) { + expect(err.message).toContain('Extract error'); + } + }); + + it('should handle file write errors', async () => { + axios.get.mockResolvedValue({ data: mockExtractData }); + + const invalidPath = path.resolve(os.tmpdir(), 'invalid_dir', 'tools.json'); + + try { + await buildTools(invalidPath, manualToolsPath, toolsPath, tagsPath); + } catch (err) { + expect(err.message).toMatch(/ENOENT|EACCES/); + } + }); }); diff --git a/tests/casestudies/index.test.js b/tests/casestudies/index.test.js index 5e5455e3902e..c25a25bdce62 100644 --- a/tests/casestudies/index.test.js +++ b/tests/casestudies/index.test.js @@ -1,7 +1,7 @@ const fs = require('fs').promises; const path = require('path'); -const buildCaseStudiesList = require('../../scripts/casestudies/index'); -const { yaml1,yaml2,json1,json2 } = require("../fixtures/caseStudyData"); +const { buildCaseStudiesList } = require('../../scripts/casestudies/index.ts'); +const { yaml1, yaml2, json1, json2 } = require('../fixtures/caseStudyData'); describe('buildCaseStudiesList', () => { const tempDir = path.join(__dirname, 'temp-test-dir'); @@ -21,7 +21,7 @@ describe('buildCaseStudiesList', () => { beforeEach(async () => { // Clear the config directory before each test const files = await fs.readdir(tempConfigDir); - await Promise.all(files.map(file => fs.unlink(path.join(tempConfigDir, file)))); + await Promise.all(files.map((file) => fs.unlink(path.join(tempConfigDir, file)))); }); it('should read YAML files and create a JSON file with case studies', async () => { @@ -42,7 +42,6 @@ describe('buildCaseStudiesList', () => { expect(outputJson[1]).toEqual(json2); }); - it('should throw an error with incorrect parameters', async () => { try { await buildCaseStudiesList('invalid-dir', tempOutputFile); @@ -72,9 +71,7 @@ describe('buildCaseStudiesList', () => { await buildCaseStudiesList(tempConfigDir, tempOutputFile); } catch (error) { expect(error).toBeInstanceOf(Error); - expect(error.message).toContain("Invalid content format"); // Error for invalid YAML content + expect(error.message).toContain('Invalid content format'); // Error for invalid YAML content } }); - - -}); \ No newline at end of file +}); diff --git a/tests/dashboard/build-dashboard.test.js b/tests/dashboard/build-dashboard.test.js index b8f2f0a96ab4..08d8590a6647 100644 --- a/tests/dashboard/build-dashboard.test.js +++ b/tests/dashboard/build-dashboard.test.js @@ -11,7 +11,7 @@ const { writeToFile, getDiscussions, start -} = require('../../scripts/dashboard/build-dashboard'); +} = require('../../scripts/dashboard/build-dashboard.ts'); const { issues, @@ -19,7 +19,12 @@ const { discussionWithMoreComments, fullDiscussionDetails, mockRateLimitResponse -} = require("../fixtures/dashboardData") +} = require('../fixtures/dashboardData'); +const { logger } = require('../../scripts/utils/logger.ts'); + +jest.mock('../../scripts/utils/logger', () => ({ + logger: { error: jest.fn(), warn: jest.fn() } +})); jest.mock('@octokit/graphql'); @@ -39,8 +44,8 @@ describe('GitHub Discussions Processing', () => { beforeEach(() => { jest.clearAllMocks(); - consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => { }); - consoleLogSpy = jest.spyOn(console, 'log').mockImplementation(() => { }); + consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => {}); + consoleLogSpy = jest.spyOn(console, 'log').mockImplementation(() => {}); }); afterEach(() => { @@ -76,12 +81,8 @@ describe('GitHub Discussions Processing', () => { await getDiscussions('test-query', 10); - expect(consoleLogSpy).toHaveBeenCalledWith( - '[WARNING] GitHub GraphQL rateLimit', - 'cost = 1', - 'limit = 5000', - 'remaining = 50', - expect.any(String) + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining(`GitHub GraphQL rateLimit \ncost = 1\nlimit = 5000\nremaining = 50`) ); }); @@ -102,9 +103,7 @@ describe('GitHub Discussions Processing', () => { rateLimit: { remaining: 1000 } }; - graphql - .mockResolvedValueOnce(mockFirstResponse) - .mockResolvedValueOnce(mockSecondResponse); + graphql.mockResolvedValueOnce(mockFirstResponse).mockResolvedValueOnce(mockSecondResponse); const result = await getDiscussions('test-query', 10); expect(result).toHaveLength(2); @@ -115,8 +114,7 @@ describe('GitHub Discussions Processing', () => { const filePath = resolve(tempDir, 'error-output.json'); await start(filePath); - - expect(consoleLogSpy).toHaveBeenCalledWith('There were some issues parsing data from github.'); + expect(logger.error).toHaveBeenCalledWith('There were some issues parsing data from github.'); }); it('should successfully process and write data', async () => { @@ -145,7 +143,6 @@ describe('GitHub Discussions Processing', () => { }); it('should map good first issues', async () => { - const result = await mapGoodFirstIssues(issues); expect(result[0]).toMatchObject({ id: '1', @@ -188,16 +185,12 @@ describe('GitHub Discussions Processing', () => { await expect(getHotDiscussions([undefined])).rejects.toThrow(); - expect(consoleErrorSpy).toHaveBeenCalledWith( - 'there were some issues while parsing this item: undefined' - ); + expect(logger.error).toHaveBeenCalledWith('there were some issues while parsing this item: undefined'); localConsoleErrorSpy.mockRestore(); }); it('should handle write failures gracefully', async () => { - await expect(writeToFile()).rejects.toThrow(); - }); - + }); }); diff --git a/tests/finance/index.test.js b/tests/finance/index.test.js index eaea82e86c60..0c22468aa330 100644 --- a/tests/finance/index.test.js +++ b/tests/finance/index.test.js @@ -1,6 +1,6 @@ const fs = require('fs'); const path = require('path'); -const buildFinanceInfoList = require('../../scripts/finance/index'); +const { buildFinanceInfoList } = require('../../scripts/finance/index.ts'); const { expensesYaml, expensesLinkYaml, expensesjson, expensesLinkjson } = require('../fixtures/financeData'); describe('buildFinanceInfoList', () => { @@ -102,4 +102,4 @@ describe('buildFinanceInfoList', () => { expect(error.message).toMatch(/YAMLException/); // Expecting a YAML parsing error } }); -}); \ No newline at end of file +}); diff --git a/tests/fixtures/combineToolsData.js b/tests/fixtures/combineToolsData.js index 452764ac9192..e395c3677908 100644 --- a/tests/fixtures/combineToolsData.js +++ b/tests/fixtures/combineToolsData.js @@ -3,34 +3,34 @@ const expectedDataT1 = { { name: 'JavaScript', color: 'bg-[#57f281]', - borderColor: 'border-[#37f069]', + borderColor: 'border-[#37f069]' }, { name: 'Python', color: 'bg-[#3572A5]', - borderColor: 'border-[#3572A5]', - }, + borderColor: 'border-[#3572A5]' + } ], technologies: [ { name: 'Node.js', color: 'bg-[#61d0f2]', - borderColor: 'border-[#40ccf7]', + borderColor: 'border-[#40ccf7]' }, { name: 'Flask', color: 'bg-[#000000]', - borderColor: 'border-[#FFFFFF]', - }, - ], + borderColor: 'border-[#FFFFFF]' + } + ] }; const manualToolsWithMissingData = [ { title: 'Tool C', filters: {}, - links: { repoUrl: 'https://github.com/asyncapi/tool-c' }, - }, + links: { repoUrl: 'https://github.com/asyncapi/tool-c' } + } ]; const manualToolsToSort = { @@ -40,31 +40,31 @@ const manualToolsToSort = { { title: 'Tool Z', filters: { language: 'JavaScript' }, - links: { repoUrl: 'https://github.com/asyncapi/tool-z' }, + links: { repoUrl: 'https://github.com/asyncapi/tool-z' } }, { title: 'Tool A', filters: { language: 'Python' }, - links: { repoUrl: 'https://github.com/asyncapi/tool-a' }, - }, - ], - }, + links: { repoUrl: 'https://github.com/asyncapi/tool-a' } + } + ] + } }; const toolWithMultipleLanguages = { title: 'Multi-Language Tool', filters: { language: ['JavaScript', 'Python', 'NewLanguage'], - technology: ['Node.js'], + technology: ['Node.js'] }, - links: { repoUrl: 'https://github.com/example/multi-language-tool' }, + links: { repoUrl: 'https://github.com/example/multi-language-tool' } }; const automatedToolsT5 = { category1: { description: 'Category 1 Description', - toolsList: [toolWithMultipleLanguages], - }, + toolsList: [toolWithMultipleLanguages] + } }; const invalidToolT4 = { title: 'Invalid Tool' }; @@ -72,96 +72,96 @@ const invalidToolT4 = { title: 'Invalid Tool' }; const automatedToolsT4 = { category1: { description: 'Category 1 Description', - toolsList: [], - }, + toolsList: [] + } }; const manualToolsT4 = { category1: { - toolsList: [invalidToolT4], - }, + toolsList: [invalidToolT4] + } }; const toolWithNewTagsT6 = { title: 'New Tags Tool', filters: { language: 'NewLanguage', - technology: ['NewTechnology'], + technology: ['NewTechnology'] }, - links: { repoUrl: 'https://github.com/example/new-tags-tool' }, + links: { repoUrl: 'https://github.com/example/new-tags-tool' } }; const automatedToolsT6 = { category1: { description: 'Category 1 Description', - toolsList: [toolWithNewTagsT6], - }, + toolsList: [toolWithNewTagsT6] + } }; const toolWithNewLanguageT7 = { title: 'New Language Tool', filters: { language: 'Go', - technology: ['Node.js'], + technology: ['Node.js'] }, - links: { repoUrl: 'https://github.com/example/new-language-tool' }, + links: { repoUrl: 'https://github.com/example/new-language-tool' } }; const automatedToolsT7 = { category1: { description: 'Category 1 Description', - toolsList: [toolWithNewLanguageT7], - }, + toolsList: [toolWithNewLanguageT7] + } }; const validToolT8 = { title: 'Valid Tool', filters: { language: 'JavaScript', - technology: ['Node.js'], + technology: ['Node.js'] }, - links: { repoUrl: 'https://github.com/asyncapi/valid-tool' }, + links: { repoUrl: 'https://github.com/asyncapi/valid-tool' } }; const automatedToolsT8 = { category1: { description: 'Category 1 Description', - toolsList: [], - }, + toolsList: [] + } }; const manualToolsT8 = { category1: { - toolsList: [validToolT8], - }, + toolsList: [validToolT8] + } }; const toolWithoutRepoUrlT9 = { title: 'Tool Without Repo', filters: { language: 'Python', - technology: ['Flask'], + technology: ['Flask'] }, - links: {}, + links: {} }; const automatedToolsT9 = { category1: { description: 'Category 1 Description', - toolsList: [], - }, + toolsList: [] + } }; const manualToolsT9 = { category1: { - toolsList: [toolWithoutRepoUrlT9], - }, + toolsList: [toolWithoutRepoUrlT9] + } }; const invalidAutomatedToolsT10 = { invalidCategory: { description: 'Invalid Category Description', - toolsList: [], - }, + toolsList: [] + } }; const manualToolsWithInvalidURLT11 = { @@ -170,26 +170,40 @@ const manualToolsWithInvalidURLT11 = { { title: 'Tool with Invalid URL', filters: { language: 'JavaScript' }, - links: { repoUrl: 'invalid-url' }, - }, - ], - }, + links: { repoUrl: 'invalid-url' } + } + ] + } }; const circularTool = { title: 'Circular Tool', filters: { language: 'JavaScript', - technology: ['Node.js'], + technology: ['Node.js'] }, - links: { repoUrl: 'https://github.com/asyncapi/circular-tool' }, + links: { repoUrl: 'https://github.com/asyncapi/circular-tool' } }; const automatedToolsT12 = { category1: { description: 'Category 1', - toolsList: [circularTool], + toolsList: [circularTool] + } +}; + +const finalToolWithMissingData = { + 0: { + title: 'Tool C', + filters: {}, + links: { repoUrl: 'https://github.com/asyncapi/tool-c' } }, + filters: { + language: [], + technology: [], + categories: [], + hasCommercial: false + } }; module.exports = { @@ -209,4 +223,5 @@ module.exports = { automatedToolsT12, invalidAutomatedToolsT10, manualToolsWithInvalidURLT11, + finalToolWithMissingData }; diff --git a/tests/fixtures/dashboardData.js b/tests/fixtures/dashboardData.js index fa0618c299a9..60db2e11a575 100644 --- a/tests/fixtures/dashboardData.js +++ b/tests/fixtures/dashboardData.js @@ -1,68 +1,65 @@ const mockDiscussion = { - id: 'test-id', - __typename: 'Issue', - title: 'Test', - author: { login: 'author' }, - resourcePath: '/path', - repository: { name: 'repo' }, - assignees: { totalCount: 0 }, - reactions: { totalCount: 5 }, - comments: { - totalCount: 2, - nodes: [{ reactions: { totalCount: 1 } }], - pageInfo: { hasNextPage: false } - }, - labels: { nodes: [] }, - timelineItems: { updatedAt: new Date().toISOString() } + id: 'test-id', + __typename: 'Issue', + title: 'Test', + author: { login: 'author' }, + resourcePath: '/path', + repository: { name: 'repo' }, + assignees: { totalCount: 0 }, + reactions: { totalCount: 5 }, + comments: { + totalCount: 2, + nodes: [{ reactions: { totalCount: 1 } }], + pageInfo: { hasNextPage: false } + }, + labels: { nodes: [] }, + timelineItems: { updatedAt: new Date().toISOString() } }; const discussionWithMoreComments = { - id: 'paginated-discussion', - __typename: 'Issue', - title: 'Test with Pagination', - author: { login: 'author' }, - resourcePath: '/path', - repository: { name: 'repo' }, - assignees: { totalCount: 0 }, - reactions: { totalCount: 5 }, - comments: { - totalCount: 5, - nodes: [{ reactions: { totalCount: 1 } }], - pageInfo: { hasNextPage: true } - }, - labels: { nodes: [] }, - timelineItems: { updatedAt: new Date().toISOString() } + id: 'paginated-discussion', + __typename: 'Issue', + title: 'Test with Pagination', + author: { login: 'author' }, + resourcePath: '/path', + repository: { name: 'repo' }, + assignees: { totalCount: 0 }, + reactions: { totalCount: 5 }, + comments: { + totalCount: 5, + nodes: [{ reactions: { totalCount: 1 } }], + pageInfo: { hasNextPage: true } + }, + labels: { nodes: [] }, + timelineItems: { updatedAt: new Date().toISOString() } }; const fullDiscussionDetails = { - node: { - ...discussionWithMoreComments, - comments: { - totalCount: 5, - nodes: [ - { reactions: { totalCount: 1 } }, - { reactions: { totalCount: 2 } }, - { reactions: { totalCount: 3 } } - ], - pageInfo: { hasNextPage: false } - } + node: { + ...discussionWithMoreComments, + comments: { + totalCount: 5, + nodes: [{ reactions: { totalCount: 1 } }, { reactions: { totalCount: 2 } }, { reactions: { totalCount: 3 } }], + pageInfo: { hasNextPage: false } } + } }; const mockRateLimitResponse = { - search: { - nodes: [mockDiscussion], - pageInfo: { hasNextPage: false } - }, - rateLimit: { - cost: 1, - limit: 5000, - remaining: 50, - resetAt: new Date().toISOString() - } + search: { + nodes: [mockDiscussion], + pageInfo: { hasNextPage: false } + }, + rateLimit: { + cost: 1, + limit: 5000, + remaining: 50, + resetAt: new Date().toISOString() + } }; -const issues = [{ +const issues = [ + { id: '1', title: 'Test', assignees: { totalCount: 1 }, @@ -70,12 +67,13 @@ const issues = [{ repository: { name: 'repo' }, author: { login: 'author' }, labels: { nodes: [{ name: 'area/docs' }] } -}]; + } +]; module.exports = { - issues, - mockDiscussion, - discussionWithMoreComments, - fullDiscussionDetails, - mockRateLimitResponse + issues, + mockDiscussion, + discussionWithMoreComments, + fullDiscussionDetails, + mockRateLimitResponse }; diff --git a/tests/index.test.js b/tests/index.test.js index f1d3850a37c5..d78ba273ffd6 100644 --- a/tests/index.test.js +++ b/tests/index.test.js @@ -1,10 +1,10 @@ -const rssFeed = require('../scripts/build-rss'); -const { buildPostList } = require('../scripts/build-post-list'); -const buildCaseStudiesList = require('../scripts/casestudies'); -const buildAdoptersList = require('../scripts/adopters'); -const buildFinanceInfoList = require('../scripts/finance'); -const start = require('../scripts/index'); const fs = require('fs'); +const { rssFeed } = require('../scripts/build-rss.ts'); +const { buildPostList } = require('../scripts/build-post-list.ts'); +const { buildCaseStudiesList } = require('../scripts/casestudies/index.ts'); +const { buildAdoptersList } = require('../scripts/adopters/index.ts'); +const { buildFinanceInfoList } = require('../scripts/finance/index.ts'); +const { start } = require('../scripts/index.ts'); jest.mock('../scripts/build-rss'); jest.mock('../scripts/build-post-list'); diff --git a/tests/markdown/check-edit-links.test.js b/tests/markdown/check-edit-links.test.js index 68b8f313e37d..76da44edf024 100644 --- a/tests/markdown/check-edit-links.test.js +++ b/tests/markdown/check-edit-links.test.js @@ -7,9 +7,13 @@ const { checkUrls, determineEditLink, main -} = require('../../scripts/markdown/check-edit-links'); +} = require('../../scripts/markdown/check-edit-links.ts'); const { determineEditLinkData, processBatchData, testPaths } = require('../fixtures/markdown/check-edit-links-data'); +const { logger } = require('../../scripts/utils/logger.ts'); +jest.mock('../../scripts/utils/logger', () => ({ + logger: { info: jest.fn() } +})); jest.mock('node-fetch-2', () => jest.fn()); describe('URL Checker Tests', () => { @@ -140,22 +144,18 @@ describe('URL Checker Tests', () => { describe('main', () => { it('should run successfully when all URLs are valid', async () => { fetch.mockImplementation(() => Promise.resolve({ status: 200 })); - const consoleSpy = jest.spyOn(console, 'log'); await main(); - expect(consoleSpy).toHaveBeenCalledWith('All URLs are valid.'); - consoleSpy.mockRestore(); + expect(logger.info).toHaveBeenCalledWith('All URLs are valid.'); }); it('should report invalid URLs when found', async () => { fetch.mockImplementation(() => Promise.resolve({ status: 404 })); - const consoleSpy = jest.spyOn(console, 'log'); await main(); - expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('URLs returning 404:')); - consoleSpy.mockRestore(); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('URLs returning 404:')); }); it('should handle errors gracefully', async () => { diff --git a/tests/markdown/check-markdown.test.js b/tests/markdown/check-markdown.test.js index 85e06b70383f..e4d053c9c066 100644 --- a/tests/markdown/check-markdown.test.js +++ b/tests/markdown/check-markdown.test.js @@ -2,149 +2,166 @@ const fs = require('fs').promises; const path = require('path'); const os = require('os'); const { - isValidURL, - main, - validateBlogs, - validateDocs, - checkMarkdownFiles -} = require('../../scripts/markdown/check-markdown'); + isValidURL, + main, + validateBlogs, + validateDocs, + checkMarkdownFiles +} = require('../../scripts/markdown/check-markdown.ts'); +const { logger } = require('../../scripts/utils/logger.ts'); + +jest.mock('../../scripts/utils/logger', () => ({ + logger: { error: jest.fn(), warn: jest.fn() } +})); describe('Frontmatter Validator', () => { - let tempDir; - let mockConsoleError; - let mockProcessExit; - - beforeEach(async () => { - mockConsoleError = jest.spyOn(console, 'error').mockImplementation(); - mockProcessExit = jest.spyOn(process, 'exit').mockImplementation(); - tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'test-config')); - }); - - afterEach(async () => { - mockConsoleError.mockRestore(); - mockProcessExit.mockRestore(); - await fs.rm(tempDir, { recursive: true, force: true }); - }); - - it('validates authors array and returns specific errors', async () => { - const frontmatter = { - title: 'Test Blog', - date: '2024-01-01', - type: 'blog', - tags: ['test'], - cover: 'cover.jpg', - authors: [{ name: 'John' }, { photo: 'jane.jpg' }, { name: 'Bob', photo: 'bob.jpg', link: 'not-a-url' }] - }; - - const errors = validateBlogs(frontmatter); - expect(errors).toEqual(expect.arrayContaining([ - 'Author at index 0 is missing a photo', - 'Author at index 1 is missing a name', - 'Invalid URL for author at index 2: not-a-url' - ])); - }); - - it('validates docs frontmatter for required fields', async () => { - const frontmatter = { title: 123, weight: 'not-a-number' }; - const errors = validateDocs(frontmatter); - expect(errors).toEqual(expect.arrayContaining([ - 'Title is missing or not a string', - 'Weight is missing or not a number' - ])); - }); - - it('checks for errors in markdown files in a directory', async () => { - await fs.writeFile(path.join(tempDir, 'invalid.md'), `---\ntitle: Invalid Blog\n---`); - const mockConsoleLog = jest.spyOn(console, 'log').mockImplementation(); - - await checkMarkdownFiles(tempDir, validateBlogs); - - expect(mockConsoleLog).toHaveBeenCalledWith(expect.stringContaining('Errors in file invalid.md:')); - mockConsoleLog.mockRestore(); - }); - - it('returns multiple validation errors for invalid blog frontmatter', async () => { - const frontmatter = { - title: 123, - date: 'invalid-date', - type: 'blog', - tags: 'not-an-array', - cover: ['not-a-string'], - authors: { name: 'John Doe' } - }; - const errors = validateBlogs(frontmatter); - - expect(errors).toEqual([ - 'Invalid date format: invalid-date', - 'Tags should be an array', - 'Cover must be a string', - 'Authors should be an array']); - }); - - it('logs error to console when an error occurs in checkMarkdownFiles', async () => { - const invalidFolderPath = path.join(tempDir, 'non-existent-folder'); - - await expect(checkMarkdownFiles(invalidFolderPath, validateBlogs)) - .rejects.toThrow('ENOENT'); - - expect(mockConsoleError.mock.calls[0][0]).toContain('Error in directory'); - }); - - it('skips the "reference/specification" folder during validation', async () => { - const referenceSpecDir = path.join(tempDir, 'reference', 'specification'); - await fs.mkdir(referenceSpecDir, { recursive: true }); - await fs.writeFile(path.join(referenceSpecDir, 'skipped.md'), `---\ntitle: Skipped File\n---`); - - const mockConsoleLog = jest.spyOn(console, 'log').mockImplementation(); - - await checkMarkdownFiles(tempDir, validateDocs); - - expect(mockConsoleLog).not.toHaveBeenCalledWith(expect.stringContaining('Errors in file reference/specification/skipped.md')); - mockConsoleLog.mockRestore(); - }); - - it('logs and rejects when an exception occurs while processing a file', async () => { - const filePath = path.join(tempDir, 'invalid.md'); - await fs.writeFile(filePath, `---\ntitle: Valid Title\n---`); - - const mockReadFile = jest.spyOn(fs, 'readFile').mockRejectedValue(new Error('Test readFile error')); - - await expect(checkMarkdownFiles(tempDir, validateBlogs)).rejects.toThrow('Test readFile error'); - expect(mockConsoleError).toHaveBeenCalledWith( - expect.stringContaining(`Error in directory`), - expect.any(Error) - ); - - mockReadFile.mockRestore(); - }); - - it('should handle main function errors and exit with status 1', async () => { - jest.spyOn(fs, 'readdir').mockRejectedValue(new Error('Test error')); - - await main(); - - expect(mockProcessExit).toHaveBeenCalledWith(1); - - expect(mockConsoleError).toHaveBeenCalledWith( - 'Failed to validate markdown files:', - expect.any(Error) - ); - }); - - it('should handle successful main function execution', async () => { - - await main(); - - expect(mockConsoleError).not.toHaveBeenCalledWith(); - }); - - it('should return true or false for URLs', () => { - expect(isValidURL('http://example.com')).toBe(true); - expect(isValidURL('https://www.example.com')).toBe(true); - expect(isValidURL('ftp://ftp.example.com')).toBe(true); - expect(isValidURL('invalid-url')).toBe(false); - expect(isValidURL('/path/to/file')).toBe(false); - expect(isValidURL('www.example.com')).toBe(false); - }); - + let tempDir; + let mockConsoleError; + let mockProcessExit; + + beforeEach(async () => { + mockConsoleError = jest.spyOn(console, 'error').mockImplementation(); + mockProcessExit = jest.spyOn(process, 'exit').mockImplementation(); + tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'test-config')); + }); + + afterEach(async () => { + mockConsoleError.mockRestore(); + mockProcessExit.mockRestore(); + await fs.rm(tempDir, { recursive: true, force: true }); + }); + + it('validates authors array and returns specific errors', async () => { + const frontmatter = { + title: 'Test Blog', + date: '2024-01-01', + type: 'blog', + tags: ['test'], + cover: 'cover.jpg', + authors: [{ name: 'John' }, { photo: 'jane.jpg' }, { name: 'Bob', photo: 'bob.jpg', link: 'not-a-url' }] + }; + + const errors = validateBlogs(frontmatter); + expect(errors).toEqual( + expect.arrayContaining([ + 'Author at index 0 is missing a photo', + 'Author at index 1 is missing a name', + 'Invalid URL for author at index 2: not-a-url' + ]) + ); + }); + + it('validates docs frontmatter for required fields', async () => { + const frontmatter = { title: 123, weight: 'not-a-number' }; + const errors = validateDocs(frontmatter); + expect(errors).toEqual( + expect.arrayContaining(['Title is missing or not a string', 'Weight is missing or not a number']) + ); + }); + + it('checks for errors in markdown files in a directory', async () => { + await fs.writeFile(path.join(tempDir, 'invalid.md'), `---\ntitle: Invalid Blog\n---`); + const mockConsoleLog = jest.spyOn(console, 'log').mockImplementation(); + + await checkMarkdownFiles(tempDir, validateBlogs); + + expect(logger.warn).toHaveBeenCalledWith(expect.stringContaining('Errors in file invalid.md:')); + mockConsoleLog.mockRestore(); + }); + + it('returns multiple validation errors for invalid blog frontmatter', async () => { + const frontmatter = { + title: 123, + date: 'invalid-date', + type: 'blog', + tags: 'not-an-array', + cover: ['not-a-string'], + authors: { name: 'John Doe' } + }; + const errors = validateBlogs(frontmatter); + + expect(errors).toEqual([ + 'Invalid date format: invalid-date', + 'Tags should be an array', + 'Cover must be a string', + 'Authors should be an array' + ]); + }); + + it('logs error to console when an error occurs in checkMarkdownFiles', async () => { + const invalidFolderPath = path.join(tempDir, 'non-existent-folder'); + + await expect(checkMarkdownFiles(invalidFolderPath, validateBlogs)).rejects.toThrow('ENOENT'); + + expect(logger.error.mock.calls[0][0]).toContain('Error in directory'); + }); + + it('skips the "reference/specification" folder during validation', async () => { + const referenceSpecDir = path.join(tempDir, 'reference', 'specification'); + await fs.mkdir(referenceSpecDir, { recursive: true }); + await fs.writeFile(path.join(referenceSpecDir, 'skipped.md'), `---\ntitle: Skipped File\n---`); + + const mockLoggerWarn = jest.spyOn(logger, 'warn').mockImplementation(); + + await checkMarkdownFiles(tempDir, validateDocs); + expect(mockLoggerWarn).not.toHaveBeenCalledWith( + expect.stringContaining('Errors in file reference/specification/skipped.md') + ); + mockLoggerWarn.mockRestore(); + }); + + it('logs and rejects when an exception occurs while processing a file', async () => { + const filePath = path.join(tempDir, 'invalid.md'); + await fs.writeFile(filePath, `---\ntitle: Valid Title\n---`); + + const mockReadFile = jest.spyOn(fs, 'readFile').mockRejectedValue(new Error('Test readFile error')); + + await expect(checkMarkdownFiles(tempDir, validateBlogs)).rejects.toThrow('Test readFile error'); + expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(`Error in directory`), expect.any(Error)); + + mockReadFile.mockRestore(); + }); + + it('should handle main function errors and exit with status 1', async () => { + jest.spyOn(fs, 'readdir').mockRejectedValue(new Error('Test error')); + + await main(); + + expect(mockProcessExit).toHaveBeenCalledWith(1); + + expect(logger.error).toHaveBeenCalledWith('Failed to validate markdown files:', expect.any(Error)); + }); + + it('should handle successful main function execution', async () => { + await main(); + + expect(mockConsoleError).not.toHaveBeenCalledWith(); + }); + + it('should return true or false for URLs', () => { + expect(isValidURL('http://example.com')).toBe(true); + expect(isValidURL('https://www.example.com')).toBe(true); + expect(isValidURL('ftp://ftp.example.com')).toBe(true); + expect(isValidURL('invalid-url')).toBe(false); + expect(isValidURL('/path/to/file')).toBe(false); + expect(isValidURL('www.example.com')).toBe(false); + }); + + it('should return true or false for URLs', () => { + expect(isValidURL('http://example.com')).toBe(true); + expect(isValidURL('https://www.example.com')).toBe(true); + expect(isValidURL('ftp://ftp.example.com')).toBe(true); + expect(isValidURL('invalid-url')).toBe(false); + expect(isValidURL('/path/to/file')).toBe(false); + expect(isValidURL('www.example.com')).toBe(false); + }); + + it('should throw an error if frontmatter is missing', () => { + const errors = validateBlogs(undefined); + expect(errors).toEqual(['Frontmatter is missing']); + }); + it('should throw an error if frontmatter is missing', () => { + const errors = validateBlogs(undefined); + expect(errors).toEqual(['Frontmatter is missing']); + }); }); diff --git a/tests/readAndWriteJson.test.js b/tests/readAndWriteJson.test.js index f201f7e9a280..bcdae70a2fd5 100644 --- a/tests/readAndWriteJson.test.js +++ b/tests/readAndWriteJson.test.js @@ -1,17 +1,15 @@ -const { promises: fs } = require('fs'); -const { convertToJson } = require('../scripts/utils'); -const writeJSON = require("../scripts/utils/readAndWriteJson"); -const { yamlString, jsonObject } = require("./fixtures/utilsData"); - -jest.mock('fs', () => ({ - promises: { - readFile: jest.fn(), - writeFile: jest.fn(), - }, +const fs = require('fs/promises'); +const { convertToJson } = require('../scripts/utils.ts'); +const { writeJSON } = require('../scripts/utils/readAndWriteJson.ts'); +const { yamlString, jsonObject } = require('./fixtures/utilsData'); + +jest.mock('fs/promises', () => ({ + readFile: jest.fn(), + writeFile: jest.fn() })); jest.mock('../scripts/utils', () => ({ - convertToJson: jest.fn(), + convertToJson: jest.fn() })); describe('writeJSON', () => { @@ -74,5 +72,4 @@ describe('writeJSON', () => { expect(fs.writeFile).toHaveBeenCalledWith(writePath, JSON.stringify(jsonObject)); }); - }); diff --git a/tests/tools/combine-tools.test.js b/tests/tools/combine-tools.test.js index 622067a57462..0b14b4362a2f 100644 --- a/tests/tools/combine-tools.test.js +++ b/tests/tools/combine-tools.test.js @@ -1,6 +1,6 @@ const fs = require('fs'); const path = require('path'); -const { combineTools } = require('../../scripts/tools/combine-tools'); +const { combineTools, getFinalTool } = require('../../scripts/tools/combine-tools.ts'); const { expectedDataT1, manualToolsWithMissingData, @@ -17,16 +17,21 @@ const { automatedToolsT12, invalidAutomatedToolsT10, manualToolsWithInvalidURLT11, - circularTool + circularTool, + finalToolWithMissingData } = require('../fixtures/combineToolsData'); +const { logger } = require('../../scripts/utils/logger.ts'); + +jest.mock('../../scripts/utils/logger', () => ({ + logger: { error: jest.fn() } +})); jest.mock('ajv', () => { return jest.fn().mockImplementation(() => ({ - compile: jest.fn().mockImplementation(() => (data) => data.title !== 'Invalid Tool'), + compile: jest.fn().mockImplementation(() => (data) => data.title !== 'Invalid Tool') })); }); - jest.mock('ajv-formats', () => { return jest.fn(); }); @@ -74,7 +79,7 @@ describe('combineTools function', () => { }); beforeEach(() => { - consoleErrorMock = jest.spyOn(console, 'error').mockImplementation(() => { }); + consoleErrorMock = jest.spyOn(console, 'error').mockImplementation(() => {}); }); afterEach(() => { @@ -90,7 +95,7 @@ describe('combineTools function', () => { const tagsData = readJSON(tagsPath); expect(tagsData).toHaveProperty('languages'); expect(tagsData).toHaveProperty('technologies'); - expect(tagsData).toEqual(expectedDataT1) + expect(tagsData).toEqual(expectedDataT1); }); it('should handle tools with missing language or technology', async () => { @@ -104,14 +109,14 @@ describe('combineTools function', () => { await combineTools(manualToolsToSort, {}, toolsPath, tagsPath); const combinedTools = readJSON(toolsPath); - const toolTitles = combinedTools.category1.toolsList.map(tool => tool.title); + const toolTitles = combinedTools.category1.toolsList.map((tool) => tool.title); expect(toolTitles).toEqual(['Tool A', 'Tool Z']); }); it('should log validation errors to console.error', async () => { await combineTools(automatedToolsT4, manualToolsT4, toolsPath, tagsPath); - const { message, tool, source, note } = console.error.mock.calls[0][0]; + const { message, tool, source, note } = JSON.parse(logger.error.mock.calls[0][0]); expect(message).toBe('Tool validation failed'); expect(tool).toBe('Invalid Tool'); @@ -206,24 +211,33 @@ describe('combineTools function', () => { it('should throw an error when fs.writeFileSync fails', async () => { const invalidPath = 'this/is/not/valid'; - await expect(combineTools(automatedTools, manualTools, invalidPath, invalidPath)) - .rejects.toThrow(/ENOENT|EACCES/); + await expect(combineTools(automatedTools, manualTools, invalidPath, invalidPath)).rejects.toThrow(/ENOENT|EACCES/); }); it('should throw an error when there is an invalid category', async () => { - await expect(combineTools(invalidAutomatedToolsT10, manualTools, toolsPath, tagsPath)) - .rejects.toThrow('Error combining tools'); + await expect(combineTools(invalidAutomatedToolsT10, manualTools, toolsPath, tagsPath)).rejects.toThrow( + 'Error combining tools' + ); }); it('should throw an error when URL parsing fails', async () => { - await expect(combineTools(automatedTools, manualToolsWithInvalidURLT11, toolsPath, tagsPath)) - .rejects.toThrow('Invalid URL'); + await expect(combineTools(automatedTools, manualToolsWithInvalidURLT11, toolsPath, tagsPath)).rejects.toThrow( + 'Invalid URL' + ); }); it('should handle errors when processing tools with circular references', async () => { circularTool.circular = circularTool; - await expect(combineTools(automatedToolsT12, {}, toolsPath, tagsPath)) - .rejects.toThrow('Converting circular structure to JSON'); + await expect(combineTools(automatedToolsT12, {}, toolsPath, tagsPath)).rejects.toThrow( + 'Converting circular structure to JSON' + ); + }); + it('should handle tools with missing data and filters', async () => { + manualToolsWithMissingData.filters = { + categories: [], + hasCommercial: false + }; + const result = await getFinalTool(manualToolsWithMissingData); + expect(result).toEqual(finalToolWithMissingData); }); - }); diff --git a/tests/tools/extract-tools-github.test.js b/tests/tools/extract-tools-github.test.js index 3b779db3de1d..b81824204140 100644 --- a/tests/tools/extract-tools-github.test.js +++ b/tests/tools/extract-tools-github.test.js @@ -1,22 +1,31 @@ const axios = require('axios'); -const { getData } = require('../../scripts/tools/extract-tools-github'); +const { getData } = require('../../scripts/tools/extract-tools-github.ts'); +const { logger } = require('../../scripts/utils/logger.ts'); + +jest.mock('../../scripts/utils/logger', () => ({ + logger: { info: jest.fn() } +})); jest.mock('axios'); describe('getData', () => { it('should return data when API call is successful', async () => { - const mockData = { data: { - name: '.asyncapi-tool', - path: 'asyncapi/.asyncapi-tool', - }, + items: [ + { + name: '.asyncapi-tool', + path: 'asyncapi/.asyncapi-tool' + } + ], + total_count: 1 + } }; - const apiBaseUrl = 'https://api.github.com/search/code?q=filename:.asyncapi-tool'; + const apiBaseUrl = 'https://api.github.com/search/code?q=filename:.asyncapi-tool&per_page=50&page=1'; const headers = { accept: 'application/vnd.github.text-match+json', - authorization: `token ${process.env.GITHUB_TOKEN}`, + authorization: `token ${process.env.GITHUB_TOKEN}` }; axios.get.mockResolvedValue(mockData); @@ -24,11 +33,49 @@ describe('getData', () => { const result = await getData(); expect(result).toEqual(mockData.data); - expect(axios.get).toHaveBeenCalledWith( - apiBaseUrl, { headers } - ); + expect(axios.get).toHaveBeenCalledWith(apiBaseUrl, { headers }); }); + it('should return data when API call is successful, when items are more then one page', async () => { + const mockInitialResponse = { + data: { + total_count: 100, + items: Array.from({ length: 50 }, (_, index) => ({ + name: `.asyncapi-tool-${index + 1}`, + path: `asyncapi/.asyncapi-tool-${index + 1}` + })) + } + }; + + const mockNextPageResponse = { + data: { + items: Array.from({ length: 50 }, (_, index) => ({ + name: `.asyncapi-tool-${index + 51}`, + path: `asyncapi/.asyncapi-tool-${index + 51}` + })) + } + }; + + const apiBaseUrl = 'https://api.github.com/search/code?q=filename:.asyncapi-tool&per_page=50&page='; + const headers = { + accept: 'application/vnd.github.text-match+json', + authorization: `token ${process.env.GITHUB_TOKEN}` + }; + + axios.get.mockResolvedValueOnce(mockInitialResponse).mockResolvedValueOnce(mockNextPageResponse); + + const result = await getData(); + + // Check if the logger was called with the correct page numbers + expect(logger.info).toHaveBeenCalledWith('Fetching page: 2'); + + // Check if axios.get was called with the correct URLs + expect(axios.get).toHaveBeenCalledWith(`${apiBaseUrl}1`, { headers }); + expect(axios.get).toHaveBeenCalledWith(`${apiBaseUrl}2`, { headers }); + + // Check if the result contains all the items from both pages + expect(result.items).toHaveLength(150); + }); it('should throw an error when API call fails', async () => { const mockError = new Error('Error'); diff --git a/tests/tools/tools-object.test.js b/tests/tools/tools-object.test.js index 2577a2a27d94..cbfb05cd54e1 100644 --- a/tests/tools/tools-object.test.js +++ b/tests/tools/tools-object.test.js @@ -1,5 +1,5 @@ const axios = require('axios'); -const { convertTools, createToolObject } = require('../../scripts/tools/tools-object'); +const { convertTools, createToolObject } = require('../../scripts/tools/tools-object.ts'); const { createToolFileContent, createExpectedToolObject, @@ -7,11 +7,17 @@ const { createMalformedYAML } = require('../helper/toolsObjectData'); +const { logger } = require('../../scripts/utils/logger.ts'); + +jest.mock('../../scripts/utils/logger', () => ({ + logger: { warn: jest.fn(), error: jest.fn() } +})); + jest.mock('axios'); jest.mock('../../scripts/tools/categorylist', () => ({ categoryList: [ { name: 'Category1', tag: 'Category1', description: 'Description for Category1' }, - { name: 'Others', tag: 'Others', description: 'Other tools category' }, + { name: 'Others', tag: 'Others', description: 'Other tools category' } ] })); @@ -42,15 +48,29 @@ describe('Tools Object', () => { additionalLinks: { docsUrl: 'https://docs.example.com' } }); - const result = await createToolObject( - toolFile, - expected.links.repoUrl, - 'Repository Description', - true - ); + const result = await createToolObject(toolFile, expected.links.repoUrl, 'Repository Description', true); expect(result).toEqual(expected); }); + it('should create a tool object one parameters', async () => { + // We will pass only the first parameter in the createToolObject + const toolFile = createToolFileContent({ + title: 'Test Tool', + description: 'Test Description', + hasCommercial: true, + additionalLinks: { docsUrl: 'https://docs.example.com' } + }); + + const expected = createExpectedToolObject({ + title: 'Test Tool', + description: 'Test Description', + hasCommercial: true, + additionalLinks: { docsUrl: 'https://docs.example.com' } + }); + expected.filters.isAsyncAPIOwner = ''; + const result = await createToolObject(toolFile); + expect(result).toEqual(expected); + }); it('should convert tools data correctly', async () => { const toolContent = createToolFileContent({ title: 'Valid Tool', categories: ['Category1'] }); @@ -81,8 +101,11 @@ describe('Tools Object', () => { await convertTools(mockData); - expect(console.error).toHaveBeenCalledWith(expect.stringContaining('Script is not failing')); - expect(console.error).toHaveBeenCalledWith(expect.stringContaining('Invalid .asyncapi-tool file')); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining( + 'Script is not failing, it is just dropping errors for further investigation.\nInvalid .asyncapi-tool file.' + ) + ); }); it('should add duplicate tool objects to the same category', async () => { @@ -120,10 +143,12 @@ describe('Tools Object', () => { }); it('should throw an error if axios.get fails', async () => { - const mockData = createMockData([{ - name: '.asyncapi-tool-error', - repoName: 'error-tool' - }]); + const mockData = createMockData([ + { + name: '.asyncapi-tool-error', + repoName: 'error-tool' + } + ]); axios.get.mockRejectedValue(new Error('Network Error')); @@ -138,24 +163,25 @@ describe('Tools Object', () => { it('should use repository description when tool description is missing', async () => { const toolFile = createToolFileContent({ title: 'No Description Tool', - description: '', + description: '' }); - + const repositoryDescription = 'Fallback Repository Description'; - const mockData = createMockData([{ - name: '.asyncapi-tool-no-description', - repoName: 'no-description', - description: repositoryDescription - }]); - + const mockData = createMockData([ + { + name: '.asyncapi-tool-no-description', + repoName: 'no-description', + description: repositoryDescription + } + ]); + axios.get.mockResolvedValue({ data: toolFile }); - + const result = await convertTools(mockData); - + const toolObject = result.Category1.toolsList[0]; - + expect(toolObject.description).toBe(repositoryDescription); expect(toolObject.title).toBe('No Description Tool'); }); - }); diff --git a/tests/utils.test.js b/tests/utils.test.js index baedda3da6e0..c7715f84e2a5 100644 --- a/tests/utils.test.js +++ b/tests/utils.test.js @@ -1,5 +1,5 @@ -const { convertToJson } = require("../scripts/utils"); -const { jsonString, yamlString, jsonObject, invalidString } = require("./fixtures/utilsData") +const { convertToJson } = require('../scripts/utils.ts'); +const { jsonString, yamlString, jsonObject, invalidString } = require('./fixtures/utilsData'); describe('convertToJson', () => { test('should return JSON object if input is valid JSON string', () => { @@ -19,7 +19,7 @@ describe('convertToJson', () => { convertToJson(invalidString); expect(convertToJson(invalidString)).toBeUndefined(); } catch (error) { - expect(error.message.includes("Invalid content format")).toBeTruthy(); + expect(error.message.includes('Invalid content format')).toBeTruthy(); } }); }); diff --git a/tsconfig.json b/tsconfig.json index d7c7683d9403..175047894056 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,21 +1,36 @@ { "compilerOptions": { - "lib": ["dom", "dom.iterable", "esnext"], + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], "allowJs": true, "skipLibCheck": true, "strict": true, "noEmit": true, "esModuleInterop": true, "module": "esnext", + "target": "ES2022", "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, "jsx": "preserve", "incremental": true, "paths": { - "@/*": ["./*"] + "@/*": [ + "./*" + ] } }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "types/**/*.d.ts", "**/*.json"], - "exclude": ["node_modules", "netlify"] + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + "**/*.json" + ], + "exclude": [ + "node_modules", + "netlify" + ] } diff --git a/types/packages/jgexml__json2xml.d.ts b/types/packages/jgexml__json2xml.d.ts new file mode 100644 index 000000000000..7c5bd65dce9c --- /dev/null +++ b/types/packages/jgexml__json2xml.d.ts @@ -0,0 +1,9 @@ +declare module 'jgexml/json2xml' { + interface Json2Xml { + getXml(feed: unknown, attributePrefix: string, defaultValue: string, indentLevel: number): string; + } + + const json2xml: Json2Xml; + + export = json2xml; +} diff --git a/types/packages/json.d.ts b/types/packages/json.d.ts new file mode 100644 index 000000000000..2c290013c994 --- /dev/null +++ b/types/packages/json.d.ts @@ -0,0 +1,5 @@ +declare module '*.json' { + const value: any; + + export default value; +} diff --git a/types/packages/markdown-toc.d.ts b/types/packages/markdown-toc.d.ts new file mode 100644 index 000000000000..ec969c5c5535 --- /dev/null +++ b/types/packages/markdown-toc.d.ts @@ -0,0 +1,46 @@ +// src/types/markdown-toc.d.ts + +declare module 'markdown-toc' { + interface TocItem { + content: string; + slug: string; + lvl: number; + i: number; + } + + interface MarkdownToken { + type: string; + level: number; + + // Optional properties for headings + hLevel?: number; + + // Optional properties for inline content + content?: string; + lines?: [number, number][]; + + // Optional properties for nested tokens + children?: MarkdownToken[]; + + // Optional properties for inline tokens + lvl?: number; + i?: number; + seen?: number; + } + interface TocResult { + json: TocItem[]; + content: string; + highest: number; + tokens: MarkdownToken[]; + } + + interface TocOptions { + firsth1?: boolean; + maxdepth?: number; + slugify?: (str: string) => string; + } + + function toc(markdown: string, options?: TocOptions): TocResult; + + export = toc; +} diff --git a/types/packages/node-fetch-2.d.ts b/types/packages/node-fetch-2.d.ts new file mode 100644 index 000000000000..b0222239a633 --- /dev/null +++ b/types/packages/node-fetch-2.d.ts @@ -0,0 +1,3 @@ +declare module 'node-fetch-2' { + export default function fetch(url: string, options?: RequestInit): Promise; +} diff --git a/types/scripts/build-docs.ts b/types/scripts/build-docs.ts new file mode 100644 index 000000000000..46f035524ae2 --- /dev/null +++ b/types/scripts/build-docs.ts @@ -0,0 +1,12 @@ +import type { Details } from './build-posts-list'; + +export type NavTreeItem = { + item: Details; + // eslint-disable-next-line no-use-before-define + children?: RecursiveChildren | Array
; +}; +export type RecursiveChildren = { [key: string]: NavTreeItem }; + +export type NavTree = { + [key: string]: NavTreeItem | Details; +}; diff --git a/types/scripts/build-posts-list.ts b/types/scripts/build-posts-list.ts new file mode 100644 index 000000000000..1da2a8b11e94 --- /dev/null +++ b/types/scripts/build-posts-list.ts @@ -0,0 +1,42 @@ +import type { NavTree } from './build-docs'; + +export interface TableOfContentsItem { + content: string; + slug: string; + lvl: number; + i: number; +} +export type NavigationPage = { + title: string; + href?: string; +}; +export interface Details { + title: string; + isSection?: boolean; + parent?: string; + sectionId?: string; + isRootSection?: boolean; + rootSectionId?: string; + sectionWeight?: number; + slug?: string; + toc?: TableOfContentsItem[]; + readingTime?: number; + excerpt?: string; + sectionSlug?: string; + sectionTitle?: string; + id?: string; + isIndex?: boolean; + weight?: number; + releaseNoteLink?: string; + isPrerelease?: boolean; + nextPage?: NavigationPage; + prevPage?: NavigationPage; + [key: string]: any; // For any additional properties +} + +export interface Result { + docs: Details[]; + blog: Details[]; + about: Details[]; + docsTree: NavTree; +} diff --git a/types/scripts/build-rss.ts b/types/scripts/build-rss.ts new file mode 100644 index 000000000000..ea2b8a2632ad --- /dev/null +++ b/types/scripts/build-rss.ts @@ -0,0 +1,40 @@ +export type BlogPostTypes = 'docs' | 'blog' | 'about' | 'docsTree'; +export type Enclosure = { + '@url': string; + '@length': number; + '@type': string; + enclosure?: Enclosure; +}; + +export type RSSItemType = { + title: string; + description: string; + link: string; + category: BlogPostTypes; + guid: { + '@isPermaLink': boolean; + '': string; + }; + pubDate: string; + enclosure: Enclosure; +}; +export type RSS = { + '@version': string; + '@xmlns:atom': string; + channel: { + title: string; + link: string; + 'atom:link': { + '@rel': string; + '@href': string; + '@type': string; + }; + description: string; + language: string; + copyright: string; + webMaster: string; + pubDate: string; // UTC string format + generator: string; + item: RSSItemType[]; + }; +}; diff --git a/types/scripts/dashboard.ts b/types/scripts/dashboard.ts new file mode 100644 index 000000000000..d6e8545d34a6 --- /dev/null +++ b/types/scripts/dashboard.ts @@ -0,0 +1,131 @@ +interface RateLimit { + limit: number; + cost: number; + remaining: number; + resetAt: string; +} + +interface PageInfo { + hasNextPage: boolean; + endCursor: string | null; +} + +interface Reactions { + totalCount: number; +} + +interface Author { + login: string; +} + +interface Repository { + name: string; +} + +interface Label { + name: string; + color: string; +} + +interface Assignees { + totalCount: number; +} + +interface TimelineItems { + updatedAt: string; +} + +interface Comments { + totalCount: number; + pageInfo?: PageInfo; + nodes: { + reactions: Reactions; + }[]; +} + +interface Reviews { + totalCount: number; + nodes?: { + lastEditedAt: string; + comments: { + totalCount: number; + }; + }[]; +} + +interface BasicIssueOrPR { + __typename: string; + id: string; + title: string; + author: Author; + assignees: Assignees; + resourcePath: string; + repository: Repository; + labels: { + nodes: Label[]; + }; +} + +export interface PullRequestById { + node: { + reactions: Reactions; + reviews: Reviews; + timelineItems: TimelineItems; + comments: Comments; + } & BasicIssueOrPR; +} + +export interface IssueById { + node: { + timelineItems: TimelineItems; + reactions: Reactions; + comments: Comments; + reviews: Reviews; + } & BasicIssueOrPR; +} + +export interface GoodFirstIssues extends BasicIssueOrPR {} + +export interface HotDiscussionsIssuesNode extends BasicIssueOrPR { + timelineItems: TimelineItems; + reactions: Reactions; + comments: Comments; + reviews: Reviews; +} + +export interface HotDiscussionsPullRequestsNode extends BasicIssueOrPR { + timelineItems: TimelineItems; + reactions: Reactions; + reviews: Reviews; + comments: Comments; +} +export interface Discussion { + search: { + pageInfo: PageInfo; + nodes: HotDiscussionsPullRequestsNode[]; + }; + rateLimit: RateLimit; +} + +export interface ProcessedDiscussion { + id: string; + title: string; + author: string; + isPR: boolean; + isAssigned: boolean; + resourcePath: string; + repo: string; + labels: Label[]; + score: number; +} + +export interface MappedIssue { + id: string; + title: string; + isAssigned: boolean; + resourcePath: string; + repo: string; + author: string; + area: string; + labels: Label[]; +} diff --git a/types/scripts/tools.ts b/types/scripts/tools.ts new file mode 100644 index 000000000000..2985070d5ab5 --- /dev/null +++ b/types/scripts/tools.ts @@ -0,0 +1,103 @@ +interface Links { + websiteUrl?: string; // URL to the website where your project hosts some demo or project landing page. + docsUrl?: string; // URL to project documentation. + repoUrl?: string; // URL to project codebase. +} + +type Category = + | 'api' + | 'code-first' + | 'code-generator' + | 'converter' + | 'directory' + | 'documentation-generator' + | 'editor' + | 'ui-component' + | 'dsl' + | 'framework' + | 'github-action' + | 'mocking-and-testing' + | 'validator' + | 'compare-tool' + | 'other' + | 'cli' + | 'bundler' + | 'ide-extension'; + +// Base types +export type CategoryListItem = { + name: string; + tag: string; + description: string; +}; + +export type LanguageColorItem = { + name: string; + color: string; + borderColor: string; +}; + +// Filter types +export interface Filters { + language?: Array; + technology?: Array; + categories: Array; + hasCommercial?: boolean; +} + +// Instead of extending BaseFilters, create a separate interface +export interface FinalFilters { + language: LanguageColorItem[]; + technology: LanguageColorItem[]; + categories: Array; + hasCommercial: boolean; +} + +// Tool types +type BaseAsyncAPITool = { + title: string; + description?: string; + links?: Links; +}; + +export interface AsyncAPITool extends BaseAsyncAPITool { + filters: Filters; +} + +export interface FinalAsyncAPITool extends BaseAsyncAPITool { + description: string; // Make required in final + filters: FinalFilters; +} + +// Repository and tools data types +type Repository = { + full_name: string; + html_url: string; + owner: { + login: string; + }; + description: string; +}; + +type ToolItem = { + name: string; + url: string; + path: string; + html_url: string; + repository: Repository; +}; + +export type ToolsData = { + items: ToolItem[]; +}; + +// Tools list types +type ToolsList = { + [key: string]: { + description: string; + toolsList: T[]; + }; +}; + +export type ToolsListObject = ToolsList; +export type FinalToolsListObject = ToolsList; diff --git a/utils/getStatic.ts b/utils/getStatic.ts index 974ff691920f..6ae14a76fddb 100644 --- a/utils/getStatic.ts +++ b/utils/getStatic.ts @@ -1,6 +1,6 @@ import { serverSideTranslations } from 'next-i18next/serverSideTranslations'; -import i18nextConfig from '../next-i18next.config'; +import i18nextConfig from '../next-i18next.config.cjs'; /** * Retrieves the internationalization paths for the supported locales. diff --git a/utils/getUniqueCategories.ts b/utils/getUniqueCategories.ts index d239ee12c478..0c75626ce980 100644 --- a/utils/getUniqueCategories.ts +++ b/utils/getUniqueCategories.ts @@ -7,13 +7,18 @@ import Expenses from '../config/finance/json-data/Expenses.json'; * @returns {string[]} An array of unique expense categories. */ export const getUniqueCategories = (): string[] => { - const allCategories: string[] = []; - for (const month in Expenses) { - Expenses[month as keyof typeof Expenses].forEach((entry: { Category: string }) => { - if (!allCategories.includes(entry.Category)) { - allCategories.push(entry.Category); - } - }); + const allCategories: string[] = []; + + // eslint-disable-next-line no-restricted-syntax + for (const month in Expenses) { + if (Object.prototype.hasOwnProperty.call(Expenses, month)) { + Expenses[month as keyof typeof Expenses].forEach((entry: { Category: string }) => { + if (!allCategories.includes(entry.Category)) { + allCategories.push(entry.Category); + } + }); } - return allCategories; + } + + return allCategories; }; diff --git a/utils/languageDetector.ts b/utils/languageDetector.ts index e3db95e0f17d..425ffe880a4f 100644 --- a/utils/languageDetector.ts +++ b/utils/languageDetector.ts @@ -1,6 +1,6 @@ import languageDetector from 'next-language-detector'; -import i18nextConfig from '../next-i18next.config'; +import i18nextConfig from '../next-i18next.config.cjs'; export default languageDetector({ supportedLngs: i18nextConfig.i18n.locales, diff --git a/utils/redirect.ts b/utils/redirect.ts index d4419d0f6e22..06472bda8563 100644 --- a/utils/redirect.ts +++ b/utils/redirect.ts @@ -9,35 +9,36 @@ import languageDetector from './languageDetector'; * @returns null */ export function useRedirect(to: string | undefined): any { - const router = useRouter(); + const router = useRouter(); - const toUrl = to || router.asPath; + const toUrl = to || router.asPath; - // language detection - useEffect(() => { - const detectedLng = languageDetector.detect(); + // language detection + useEffect(() => { + const detectedLng = languageDetector.detect(); - if (toUrl.startsWith(`/${detectedLng}`) && router.route === '/404') { // prevent endless loop - router.replace(`/${detectedLng}${router.route}`); + if (toUrl.startsWith(`/${detectedLng}`) && router.route === '/404') { + // prevent endless loop + router.replace(`/${detectedLng}${router.route}`); - return; - } + return; + } - languageDetector.cache!(detectedLng!); - router.replace(`/${detectedLng}${toUrl}`); - }); + languageDetector.cache!(detectedLng!); + router.replace(`/${detectedLng}${toUrl}`); + }); - return null; -}; + return null; +} /** * Component that redirects the user to the current URL with a language prefix. * @returns null */ export const Redirect = () => { - useRedirect(undefined); + useRedirect(undefined); - return null; + return null; }; /** @@ -46,7 +47,7 @@ export const Redirect = () => { * @returns A component that redirects the user to the specified URL. */ export const getRedirect = (to: string) => () => { - useRedirect(to); + useRedirect(to); - return null; + return null; }; diff --git a/utils/staticHelpers.ts b/utils/staticHelpers.ts index ecc22f628472..8dc1c6196c39 100644 --- a/utils/staticHelpers.ts +++ b/utils/staticHelpers.ts @@ -169,8 +169,8 @@ export const generateCaseStudyContent = (data: any) => { ] }, { - title: "Production-use AsyncAPI document", - content: fullExample, + title: 'Production-use AsyncAPI document', + content: fullExample } ]; };