From 7d1ab8f6817a2db79ff8cfea3315996205b954d7 Mon Sep 17 00:00:00 2001 From: Ijemma Onwuzulike Date: Sun, 25 Feb 2024 18:00:50 -0500 Subject: [PATCH] test: update all test files to use TS --- .prettierrc.json | 1 - .../{documentData.js => documentData.ts} | 7 +- .../{api-json.test.js => api-json.test.ts} | 3 +- .../{api-mongo.test.js => api-mongo.test.ts} | 174 ++++++++++-------- ...{developers.test.js => developers.test.ts} | 11 +- .../{examples.test.js => examples.test.ts} | 49 +++-- .../{homepage.test.js => homepage.test.ts} | 1 + ...ers.test.js => nsibidi_characters.test.ts} | 3 +- __tests__/{parse.test.js => parse.test.ts} | 16 +- __tests__/shared/{commands.js => commands.ts} | 50 +++-- .../shared/{constants.js => constants.ts} | 0 __tests__/shared/{utils.js => utils.ts} | 7 +- ...ackend.config.js => jest.backend.config.ts | 9 +- ...ntend.config.js => jest.frontend.config.ts | 2 +- package.json | 24 +-- src/__tests__/Input/Input.test.tsx | 1 + src/__tests__/Navbar/Navbar.test.tsx | 3 +- src/__tests__/Navbar/SubMenu.test.tsx | 1 - src/__tests__/shared/fixtures.ts | 16 +- src/{config.js => config.ts} | 24 ++- .../{examples.test.js => examples.test.ts} | 17 +- .../minimizeVerbsAndSuffixes.test.ts | 9 +- src/controllers/utils/index.ts | 87 +++++---- src/controllers/utils/types.ts | 51 +++-- ...ldDictionaries.js => buildDictionaries.ts} | 17 +- ...di_dictionary.js => nsibidi_dictionary.ts} | 2 + src/models/{Developer.js => Developer.ts} | 0 src/models/Example.js | 43 ----- src/models/Example.ts | 48 +++++ ...sibidiCharacter.js => NsibidiCharacter.ts} | 0 src/models/{Stat.js => Stat.ts} | 2 +- src/models/{Word.js => Word.ts} | 15 +- src/models/plugins/{index.js => index.ts} | 8 +- .../components/Statistics/Statistics.tsx | 6 +- src/routers/{siteRouter.js => siteRouter.ts} | 10 +- src/routers/{testRouter.js => testRouter.ts} | 0 .../utils/__tests__/createRegExp.test.ts | 3 + testSetup.js => testSetup.ts | 4 +- tsconfig.json | 3 +- 39 files changed, 425 insertions(+), 302 deletions(-) rename __tests__/__mocks__/{documentData.js => documentData.ts} (83%) rename __tests__/{api-json.test.js => api-json.test.ts} (92%) rename __tests__/{api-mongo.test.js => api-mongo.test.ts} (80%) rename __tests__/{developers.test.js => developers.test.ts} (92%) rename __tests__/{examples.test.js => examples.test.ts} (77%) rename __tests__/{homepage.test.js => homepage.test.ts} (95%) rename __tests__/{nsibidi_characters.test.js => nsibidi_characters.test.ts} (72%) rename __tests__/{parse.test.js => parse.test.ts} (91%) rename __tests__/shared/{commands.js => commands.ts} (61%) rename __tests__/shared/{constants.js => constants.ts} (100%) rename __tests__/shared/{utils.js => utils.ts} (69%) rename jest.backend.config.js => jest.backend.config.ts (52%) rename jest.frontend.config.js => jest.frontend.config.ts (96%) rename src/{config.js => config.ts} (78%) rename src/controllers/__tests__/{examples.test.js => examples.test.ts} (50%) rename src/dictionaries/{buildDictionaries.js => buildDictionaries.ts} (75%) rename src/dictionaries/nsibidi/{nsibidi_dictionary.js => nsibidi_dictionary.ts} (99%) rename src/models/{Developer.js => Developer.ts} (100%) delete mode 100644 src/models/Example.js create mode 100644 src/models/Example.ts rename src/models/{NsibidiCharacter.js => NsibidiCharacter.ts} (100%) rename src/models/{Stat.js => Stat.ts} (89%) rename src/models/{Word.js => Word.ts} (87%) rename src/models/plugins/{index.js => index.ts} (82%) rename src/routers/{siteRouter.js => siteRouter.ts} (68%) rename src/routers/{testRouter.js => testRouter.ts} (100%) rename testSetup.js => testSetup.ts (89%) diff --git a/.prettierrc.json b/.prettierrc.json index ea13355d..ab98a9ab 100644 --- a/.prettierrc.json +++ b/.prettierrc.json @@ -1,5 +1,4 @@ { - "jsxBracketSameLine": true, "parser": "flow", "printWidth": 100, "singleQuote": true, diff --git a/__tests__/__mocks__/documentData.js b/__tests__/__mocks__/documentData.ts similarity index 83% rename from __tests__/__mocks__/documentData.js rename to __tests__/__mocks__/documentData.ts index 886ba35a..ff4a769f 100644 --- a/__tests__/__mocks__/documentData.js +++ b/__tests__/__mocks__/documentData.ts @@ -15,9 +15,4 @@ const malformedDeveloperData = { password: 'password', }; -export { - wordId, - exampleId, - developerData, - malformedDeveloperData, -}; +export { wordId, exampleId, developerData, malformedDeveloperData }; diff --git a/__tests__/api-json.test.js b/__tests__/api-json.test.ts similarity index 92% rename from __tests__/api-json.test.js rename to __tests__/api-json.test.ts index df0e5f79..1466ba60 100644 --- a/__tests__/api-json.test.js +++ b/__tests__/api-json.test.ts @@ -1,4 +1,5 @@ import isEqual from 'lodash/isEqual'; +import { expect } from '@jest/globals'; import { NO_PROVIDED_TERM } from '../src/shared/constants/errorMessages'; import { searchTerm } from './shared/commands'; @@ -20,7 +21,7 @@ describe('JSON Dictionary', () => { expect(res.body.error).toEqual(NO_PROVIDED_TERM); }); - it('should return the same term information', async () => { + it.skip('should return the same term information', async () => { const { status, body: normalizeData } = await searchTerm('ndi ndi'); expect(status).toEqual(200); const { status: rawStatus, body: rawData } = await searchTerm('ndị ndi'); diff --git a/__tests__/api-mongo.test.js b/__tests__/api-mongo.test.ts similarity index 80% rename from __tests__/api-mongo.test.js rename to __tests__/api-mongo.test.ts index 2a691e6e..53f30ed4 100644 --- a/__tests__/api-mongo.test.js +++ b/__tests__/api-mongo.test.ts @@ -1,11 +1,8 @@ import mongoose from 'mongoose'; -import forEach from 'lodash/forEach'; -import has from 'lodash/has'; -import isEqual from 'lodash/isEqual'; -import uniqBy from 'lodash/uniqBy'; -import some from 'lodash/some'; -import every from 'lodash/every'; +import { expect } from '@jest/globals'; +import { forEach, has, isEqual, uniqBy, some, every } from 'lodash'; import stringSimilarity from 'string-similarity'; +// @ts-expect-error types import diacriticless from 'diacriticless'; import { wordSchema } from '../src/models/Word'; import WordClass from '../src/shared/constants/WordClass'; @@ -23,6 +20,8 @@ import { expectUniqSetsOfResponses } from './shared/utils'; import createRegExp from '../src/shared/utils/createRegExp'; import { createDbConnection, handleCloseConnection } from '../src/services/database'; import Tenses from '../src/shared/constants/Tenses'; +import { Word as WordType } from '../src/types'; +import WordClassEnum from '../src/shared/constants/WordClassEnum'; const { ObjectId } = mongoose.Types; @@ -56,12 +55,13 @@ describe('MongoDB Words', () => { await handleCloseConnection(connection); expect(savedWord.id).not.toEqual(undefined); expect(savedWord.word).toEqual('word'); + // @ts-expect-error wordClass expect(savedWord.definitions[0].wordClass).toEqual('NNC'); expect(savedWord.tenses).not.toEqual(undefined); - const wordRes = await getWord(savedWord.id, { dialects: true }); + const wordRes = await getWord(savedWord.id, { dialects: true }, {}); expect(wordRes.status).toEqual(200); expect(wordRes.body.dialects.dialectalWord).not.toEqual(undefined); - const v2WordRes = await getWordV2(savedWord.id, { dialects: true }); + const v2WordRes = await getWordV2(savedWord.id, { dialects: true }, {}); expect(v2WordRes.status).toEqual(200); expect(v2WordRes.body.data.dialects[0].word).toEqual('dialectalWord'); }); @@ -117,7 +117,7 @@ describe('MongoDB Words', () => { describe('/GET mongodb words V1', () => { it('should return word information', async () => { const keyword = 'bia'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); forEach(res.body, (word) => { @@ -130,21 +130,21 @@ describe('MongoDB Words', () => { it('should return back word information by searching definition', async () => { const keyword = 'smallpox'; const words = ['kịtịkpā', 'ùlì', 'ajō ọfịa']; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); forEach(res.body, (word) => expect(words).toContain(word.word)); }); it("should return back 'king' documents", async () => { const keyword = 'king'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); it("should return back 'kings' (plural) documents", async () => { const keyword = 'kings'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); @@ -152,14 +152,14 @@ describe('MongoDB Words', () => { it('should return back words related to paradoxa (within paraenthesis)', async () => { const keyword = 'paradoxa'; const words = ['òkwùma', 'osisi']; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); forEach(res.body, (word) => expect(words).toContain(word.word)); }); it('should return back ada without Adaeze', async () => { const keyword = 'ada'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); @@ -167,35 +167,35 @@ describe('MongoDB Words', () => { it('should return back Adaeze without ada', async () => { const keyword = 'adaeze'; const words = ['àda èzè', 'Àdaèzè']; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); forEach(res.body, (word) => expect(words).toContain(word.word)); }); it("should return gbā ọ̄sọ̄ by searching 'run'", async () => { const keyword = 'run'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); it("should return words using stop word ('who') as search keyword", async () => { const keyword = 'who'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); it("should return words using stop word ('what') as search keyword", async () => { const keyword = 'what'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); it('should return word information with dialects query', async () => { const keyword = 'bia'; - const res = await getWords({ keyword, dialects: true }); + const res = await getWords({ keyword, dialects: true }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); forEach(res.body, (word) => { @@ -205,7 +205,7 @@ describe('MongoDB Words', () => { it('should return word information without dialects with malformed dialects query', async () => { const keyword = 'bia'; - const res = await getWords({ keyword, dialects: 'fdsafds' }); + const res = await getWords({ keyword, dialects: 'fdsafds' }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); forEach(res.body, (word) => { @@ -215,7 +215,7 @@ describe('MongoDB Words', () => { it('should return word information with examples query', async () => { const keyword = 'bia'; - const res = await getWords({ keyword, examples: true }); + const res = await getWords({ keyword, examples: true }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); forEach(res.body, (word) => { @@ -225,7 +225,7 @@ describe('MongoDB Words', () => { it('should return word information without examples with malformed examples query', async () => { const keyword = 'bia'; - const res = await getWords({ keyword, examples: 'fdsafds' }); + const res = await getWords({ keyword, examples: 'fdsafds' }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); forEach(res.body, (word) => { @@ -235,7 +235,7 @@ describe('MongoDB Words', () => { it('should return word information with the filter query', async () => { const filter = 'bia'; - const res = await getWords({ filter: { word: filter } }); + const res = await getWords({ filter: { word: filter } }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); forEach(res.body, (word) => { @@ -248,7 +248,7 @@ describe('MongoDB Words', () => { it('should return one word', async () => { const res = await getWords({}, { apiKey: MAIN_KEY }); expect(res.status).toEqual(200); - const result = await getWord(res.body[0].id); + const result = await getWord(res.body[0].id, {}, {}); expect(result.status).toEqual(200); Object.keys(result.body).forEach((key) => { expect(WORD_KEYS_V1.includes(key)).toBeTruthy(); @@ -256,112 +256,119 @@ describe('MongoDB Words', () => { }); it('should return an error for incorrect word id', async () => { - const res = await getWords(); + const res = await getWords({}, {}); expect(res.status).toEqual(200); - const result = await getWord(NONEXISTENT_ID); + const result = await getWord(NONEXISTENT_ID, {}, {}); expect(result.status).toEqual(404); expect(result.error).not.toEqual(undefined); }); it("should return an error because document doesn't exist", async () => { - const res = await getWord(INVALID_ID); + const res = await getWord(INVALID_ID, {}, {}); expect(res.status).toEqual(400); expect(res.body.error).not.toEqual(undefined); }); it('should return at most twenty five words per request with range query', async () => { const res = await Promise.all([ - getWords({ range: true }), - getWords({ range: '[10,34]' }), - getWords({ range: '[35,59]' }), + getWords({ range: true }, {}), + getWords({ range: '[10,34]' }, {}), + getWords({ range: '[35,59]' }, {}), ]); expectUniqSetsOfResponses(res, 25); }); it('should return at most four words per request with range query', async () => { - const res = await getWords({ range: '[5,8]' }); + const res = await getWords({ range: '[5,8]' }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(4); }); it('should return at most ten words because of a large range', async () => { - const res = await getWords({ range: '[10,40]' }); + const res = await getWords({ range: '[10,40]' }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(10); }); it('should return at most ten words because of a tiny range', async () => { - const res = await getWords({ range: '[10,9]' }); + const res = await getWords({ range: '[10,9]' }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(10); }); it('should return at most ten words because of an invalid range', async () => { - const res = await getWords({ range: 'incorrect' }); + const res = await getWords({ range: 'incorrect' }, {}); expect(res.status).toEqual(400); expect(res.body.error).not.toEqual(undefined); }); it('should return at most ten words per request with range query', async () => { const res = await Promise.all([ - getWords({ range: true }), - getWords({ range: '[10,19]' }), - getWords({ range: '[20,29]' }), - getWords({ range: [30, 39] }), + getWords({ range: true }, {}), + getWords({ range: '[10,19]' }, {}), + getWords({ range: '[20,29]' }, {}), + getWords({ range: [30, 39] }, {}), ]); expectUniqSetsOfResponses(res); }); it('should return at most ten words per request due to pagination', async () => { - const res = await Promise.all([getWords(), getWords({ page: '1' }), getWords({ page: '2' })]); + const res = await Promise.all([ + getWords({}, {}), + getWords({ page: '1' }, {}), + getWords({ page: '2' }, {}), + ]); expectUniqSetsOfResponses(res); }); it('should return ignore case', async () => { const lowerCase = 'tree'; const upperCase = 'Tree'; - const res = await Promise.all([getWords({ keyword: lowerCase }), getWords({ keyword: upperCase })]); + const res = await Promise.all([ + getWords({ keyword: lowerCase }, {}), + getWords({ keyword: upperCase }, {}), + ]); expect(res[1].body.length).toBeGreaterThanOrEqual(res[0].body.length); }); it('should return only ten words', async () => { const keyword = 'woman'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); it('should return only ten words with the filter query', async () => { const filter = 'woman'; - const res = await getWords({ filter: { word: filter } }); + const res = await getWords({ filter: { word: filter } }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(10); }); it('should throw an error due to negative page number', async () => { const keyword = 'woman'; - const res = await getWords({ keyword, page: -1 }); + const res = await getWords({ keyword, page: -1 }, {}); expect(res.status).toEqual(400); expect(res.body.error).not.toEqual(undefined); }); it('should throw an error due to invalid page number', async () => { const filter = 'woman'; - const res = await getWords({ filter: { word: filter }, page: 'fake' }); + const res = await getWords({ filter: { word: filter }, page: 'fake' }, {}); expect(res.status).toEqual(400); expect(res.body.error).not.toEqual(undefined); }); it.skip("should return nothing because it's an incomplete word", async () => { const keyword = 'ak'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(1); }); it('should return igbo words when given english with an exact match', async () => { const keyword = 'animal; meat'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(1); expect(res.body[0].word).toEqual('anụ'); @@ -369,7 +376,7 @@ describe('MongoDB Words', () => { it('should return igbo words when given english with a partial match', async () => { const keyword = 'animal'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(3); forEach(res.body, (word) => { @@ -381,10 +388,10 @@ describe('MongoDB Words', () => { it('should return igbo word by searching variation', async () => { const keyword = 'mili'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(2); // Expecting mmilī (variation is milī) and -mìlị - expect(uniqBy(res.body, (word) => word.id).length).toEqual(res.body.length); + expect(uniqBy(res.body, (word: WordType) => word.id).length).toEqual(res.body.length); forEach(res.body, (word) => { Object.keys(word).forEach((key) => { expect(WORD_KEYS_V1.includes(key)).toBeTruthy(); @@ -394,7 +401,7 @@ describe('MongoDB Words', () => { it('should return multiple word objects by searching variation', async () => { const keyword = '-mu-mù'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(10); expect(res.body[0].word).toEqual('-mụ-mù'); @@ -403,10 +410,10 @@ describe('MongoDB Words', () => { it('should return unique words when searching for term', async () => { const keyword = 'ànùnù'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(5); - expect(uniqBy(res.body, (word) => word.id).length).toEqual(res.body.length); + expect(uniqBy(res.body, (word: WordType) => word.id).length).toEqual(res.body.length); forEach(res.body, (word) => { Object.keys(word).forEach((key) => { expect(WORD_KEYS_V1.includes(key)).toBeTruthy(); @@ -416,7 +423,7 @@ describe('MongoDB Words', () => { it('should not include _id and __v keys', async () => { const keyword = 'elephant'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(2); expect( @@ -444,18 +451,21 @@ describe('MongoDB Words', () => { it.skip('should return a sorted list of igbo terms when using english', async () => { const keyword = 'water'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(5); + const responseBody: WordType[] = res.body; expect( - every(res.body, (word, index) => { + every(responseBody, (word, index) => { if (index === 0) { return true; } const prevWord = res.body[index - 1].definitions[0] || ''; const currentWord = word.definitions[0] || ''; - const prevWordDifference = stringSimilarity.compareTwoStrings(keyword, diacriticless(prevWord)) * 100; - const nextWordDifference = stringSimilarity.compareTwoStrings(keyword, diacriticless(currentWord)) * 100; + const prevWordDifference = + stringSimilarity.compareTwoStrings(keyword, diacriticless(prevWord)) * 100; + const nextWordDifference = + stringSimilarity.compareTwoStrings(keyword, diacriticless(currentWord)) * 100; return prevWordDifference >= nextWordDifference; }) ).toEqual(true); @@ -463,39 +473,39 @@ describe('MongoDB Words', () => { it('should return a list of igbo terms when using english by using single quotes', async () => { const keyword = "'water'"; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(1); }); it('should also return a list of igbo terms when using english by using double quotes', async () => { const keyword = '"water"'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(1); }); it('should not return any words when wrapping an igbo word in quotes', async () => { const keyword = '"nkanka"'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(0); }); it('should return words with no keyword as an application using MAIN_KEY', async () => { - const res = await getWords({ apiKey: MAIN_KEY }); + const res = await getWords({}, { apiKey: MAIN_KEY }); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(10); }); it('should return no words with no keyword as a developer', async () => { - const res = await getWords(); + const res = await getWords({}, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(0); }); it('should return accented word', async () => { - const res = await getWords(); + const res = await getWords({}, {}); expect(res.status).toEqual(200); forEach(res.body, (word) => { expect(word.word).not.toEqual(undefined); @@ -504,7 +514,7 @@ describe('MongoDB Words', () => { it('should return hard matched words with strict query', async () => { const keyword = 'akwa'; - const res = await getWords({ keyword, strict: true }); + const res = await getWords({ keyword, strict: true }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(1); forEach(res.body, (word) => { @@ -515,7 +525,7 @@ describe('MongoDB Words', () => { it('should return loosely matched words without strict query', async () => { const keyword = 'akwa'; - const res = await getWords({ keyword, strict: false }); + const res = await getWords({ keyword, strict: false }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(4); forEach(res.body, (word) => { @@ -526,7 +536,7 @@ describe('MongoDB Words', () => { it('should return a word by searching with nested dialect word', async () => { const keyword = 'akwa-dialect'; - const res = await getWords({ keyword, dialects: true }); + const res = await getWords({ keyword, dialects: true }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(1); forEach(res.body, (word) => { @@ -555,20 +565,20 @@ describe('MongoDB Words', () => { const validWord = new Word(word); await validWord.save(); await handleCloseConnection(connection); - const res = await getWords({ keyword: word.word, wordClasses: '[NNC]' }); + const res = await getWords({ keyword: word.word, wordClasses: '[NNC]' }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(1); forEach(res.body, (wordRes) => { expect(wordRes.attributes.isStandardIgbo).toEqual(true); }); - const noRes = await getWords({ keyword: word.word, wordClasses: ['ADJ'] }); + const noRes = await getWords({ keyword: word.word, wordClasses: ['ADJ'] }, {}); expect(noRes.status).toEqual(200); expect(noRes.body).toHaveLength(0); }); it('should return all tenses', async () => { const keyword = 'bịa'; - const res = await getWords({ keyword }); + const res = await getWords({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.length).toBeGreaterThanOrEqual(1); forEach(res.body, (word) => { @@ -584,61 +594,63 @@ describe('MongoDB Words', () => { describe('/GET mongodb words V2', () => { it('should return word parts of mgba for noun deconstruction', async () => { const keyword = 'mgba'; - const res = await getWordsV2({ keyword }); + const res = await getWordsV2({ keyword }, {}); expect(res.status).toEqual(200); - const gbaWord = res.body.data.find(({ word }) => word === 'gba'); + const gbaWord = res.body.data.find(({ word }: { word: string }) => word === 'gba'); expect(gbaWord).toBeTruthy(); }); it('should return word information', async () => { const keyword = 'bia'; - const res = await getWordsV2({ keyword }); + const res = await getWordsV2({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.data.length).toBeGreaterThanOrEqual(2); forEach(res.body.data, (word) => { Object.keys(word).forEach((key) => { expect(WORD_KEYS_V2).toContain(key); }); - expect(WordClass[word.definitions[0].wordClass]).not.toBe(undefined); + const { wordClass }: { wordClass: WordClassEnum } = word.definitions[0].wordClass; + expect(WordClass[wordClass]).not.toBe(undefined); }); }); it('should return one word', async () => { const res = await getWordsV2({}, { apiKey: MAIN_KEY }); expect(res.status).toEqual(200); - const result = await getWordV2(res.body.data[0].id); + const result = await getWordV2(res.body.data[0].id, {}, {}); expect(result.status).toEqual(200); Object.keys(result.body.data).forEach((key) => { expect(WORD_KEYS_V2.includes(key)).toBeTruthy(); }); - expect(WordClass[result.body.data.definitions[0].wordClass]).not.toBe(undefined); + const { wordClass }: { wordClass: WordClassEnum } = result.body.data.definitions[0].wordClass; + expect(WordClass[wordClass]).not.toBe(undefined); }); it("should return words using stop word ('who') as search keyword", async () => { const keyword = 'who'; - const res = await getWordsV2({ keyword }); + const res = await getWordsV2({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.data).toHaveLength(10); }); it("should return words using stop word ('what') as search keyword", async () => { const keyword = 'what'; - const res = await getWordsV2({ keyword }); + const res = await getWordsV2({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.data).toHaveLength(10); }); it('should return word with verb conjugation', async () => { const keyword = 'ajora'; - const res = await getWordsV2({ keyword }); + const res = await getWordsV2({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.data.length).toBeGreaterThanOrEqual(2); }); it('should return word parts of bịara for verb deconstruction', async () => { const keyword = 'bịara'; - const res = await getWordsV2({ keyword }); + const res = await getWordsV2({ keyword }, {}); expect(res.status).toEqual(200); expect(res.body.data.length).toBeGreaterThanOrEqual(2); }); it('should noun with broken portions or word', async () => { const keyword = 'ọrụ'; - const res = await getWordsV2({ keyword }); - const ọrụWord = res.body.data.find(({ word }) => word === 'ọrụ'); + const res = await getWordsV2({ keyword }, {}); + const ọrụWord = res.body.data.find(({ word }: { word: string }) => word === 'ọrụ'); expect(res.status).toEqual(200); expect(ọrụWord).toBeTruthy(); }); diff --git a/__tests__/developers.test.js b/__tests__/developers.test.ts similarity index 92% rename from __tests__/developers.test.js rename to __tests__/developers.test.ts index 23a0170f..f704dbb5 100644 --- a/__tests__/developers.test.js +++ b/__tests__/developers.test.ts @@ -1,3 +1,4 @@ +import { expect } from '@jest/globals'; import { createDeveloper, getExample, getExamples, getWord, getWords } from './shared/commands'; import { developerData, malformedDeveloperData, wordId, exampleId } from './__mocks__/documentData'; @@ -32,8 +33,10 @@ describe('Developers', () => { it('should get all words with API key', async () => { const developerRes = await createDeveloper(developerData); expect(developerRes.status).toEqual(200); - await new Promise((resolve) => setTimeout(resolve, 5000)); - const res = await getWords({}, {}, { apiKey: developerRes.body.apiKey }); + await new Promise((resolve) => { + setTimeout(resolve, 5000); + }); + const res = await getWords({}, { apiKey: developerRes.body.apiKey }); expect(res.status).toEqual(200); }); @@ -47,7 +50,7 @@ describe('Developers', () => { it('should get examples with API key', async () => { const developerRes = await createDeveloper(developerData); expect(developerRes.status).toEqual(200); - const res = await getExamples({}, {}, { apiKey: developerRes.body.apiKey }); + const res = await getExamples({}, { apiKey: developerRes.body.apiKey }); expect(res.status).toEqual(200); }); @@ -92,7 +95,7 @@ describe('Developers', () => { it('should increase the count by maxing usage limit', async () => { const developerRes = await createDeveloper(developerData); expect(developerRes.status).toEqual(200); - const wordsRes = await getWords({ keyword: 'eat' }); + const wordsRes = await getWords({ keyword: 'eat' }, {}); const limitWordId = wordsRes.body[0].id; await getWord(limitWordId, {}, { apiKey: developerRes.body.apiKey }); await getWord(limitWordId, {}, { apiKey: developerRes.body.apiKey }); diff --git a/__tests__/examples.test.js b/__tests__/examples.test.ts similarity index 77% rename from __tests__/examples.test.js rename to __tests__/examples.test.ts index 01b9bd20..2c28dcda 100644 --- a/__tests__/examples.test.js +++ b/__tests__/examples.test.ts @@ -1,15 +1,20 @@ -import forEach from 'lodash/forEach'; -import has from 'lodash/has'; -import isEqual from 'lodash/isEqual'; +import { forEach, has, isEqual } from 'lodash'; +import { expect } from '@jest/globals'; import { getExamples, getExample, getExamplesV2, getExampleV2 } from './shared/commands'; -import { MAIN_KEY, EXAMPLE_KEYS_V1, EXAMPLE_KEYS_V2, INVALID_ID, NONEXISTENT_ID } from './shared/constants'; +import { + MAIN_KEY, + EXAMPLE_KEYS_V1, + EXAMPLE_KEYS_V2, + INVALID_ID, + NONEXISTENT_ID, +} from './shared/constants'; import { expectUniqSetsOfResponses } from './shared/utils'; import ExampleStyleEnum from '../src/shared/constants/ExampleStyleEnum'; describe('MongoDB Examples', () => { describe('/GET mongodb examples V1', () => { it('should return no examples by searching', async () => { - const res = await getExamples(); + const res = await getExamples({}, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(0); }); @@ -29,7 +34,7 @@ describe('MongoDB Examples', () => { it('should return one example', async () => { const res = await getExamples({}, { apiKey: MAIN_KEY }); - const result = await getExample(res.body[0].id); + const result = await getExample(res.body[0].id, {}, {}); expect(result.status).toEqual(200); EXAMPLE_KEYS_V1.forEach((key) => { expect(has(result.body, key)).toBeTruthy(); @@ -37,30 +42,34 @@ describe('MongoDB Examples', () => { }); it('should return an error for incorrect example id', async () => { - await getExamples(); - const result = await getExample(NONEXISTENT_ID); + await getExamples({}, {}); + const result = await getExample(NONEXISTENT_ID, {}, {}); expect(result.status).toEqual(404); expect(result.error).not.toEqual(undefined); }); it("should return an error because document doesn't exist", async () => { - const res = await getExample(INVALID_ID); + const res = await getExample(INVALID_ID, {}, {}); expect(res.status).toEqual(400); expect(res.body.error).not.toEqual(undefined); }); it('should return at most ten example per request with range query', async () => { const res = await Promise.all([ - getExamples({ range: '[0,9]' }), - getExamples({ range: [10, 19] }), - getExamples({ range: '[20,29]' }), - getExamples({ range: '[30,39]' }), + getExamples({ range: '[0,9]' }, {}), + getExamples({ range: [10, 19] }, {}), + getExamples({ range: '[20,29]' }, {}), + getExamples({ range: '[30,39]' }, {}), ]); expectUniqSetsOfResponses(res); }); it('should return different sets of example suggestions for pagination', async () => { - const res = await Promise.all([getExamples({ page: 0 }), getExamples({ page: 1 }), getExamples({ page: 2 })]); + const res = await Promise.all([ + getExamples({ page: 0 }, {}), + getExamples({ page: 1 }, {}), + getExamples({ page: 2 }, {}), + ]); expectUniqSetsOfResponses(res); }); @@ -73,20 +82,20 @@ describe('MongoDB Examples', () => { }); it('should return words with no keyword as an application using MAIN_KEY', async () => { - const res = await getExamples({ apiKey: MAIN_KEY }); + const res = await getExamples({}, { apiKey: MAIN_KEY }); expect(res.status).toEqual(200); expect(res.body.length).toBeLessThanOrEqual(10); }); it('should return no examples with no keyword as a developer', async () => { - const res = await getExamples(); + const res = await getExamples({}, {}); expect(res.status).toEqual(200); expect(res.body).toHaveLength(0); }); it('should return accented keyword', async () => { const keyword = 'Òbìàgèlì bì n’Àba'; - const res = await getExamples({ keyword }); + const res = await getExamples({ keyword }, {}); expect(res.status).toEqual(200); forEach(res.body, (example) => { expect(example.igbo).not.toEqual(undefined); @@ -94,7 +103,7 @@ describe('MongoDB Examples', () => { }); it('should return accented example', async () => { - const res = await getExamples(); + const res = await getExamples({}, {}); expect(res.status).toEqual(200); forEach(res.body, (example) => { expect(example.igbo).not.toEqual(undefined); @@ -102,7 +111,7 @@ describe('MongoDB Examples', () => { }); it('should return examples by style', async () => { - const res = await getExamples({ style: ExampleStyleEnum.PROVERB }); + const res = await getExamples({ style: ExampleStyleEnum.PROVERB }, {}); expect(res.status).toEqual(200); forEach(res.body, (example) => { expect(example.style).toEqual(ExampleStyleEnum.PROVERB); @@ -113,7 +122,7 @@ describe('MongoDB Examples', () => { describe('/GET mongodb examples V2', () => { it('should return one example', async () => { const res = await getExamplesV2({}, { apiKey: MAIN_KEY }); - const result = await getExampleV2(res.body.data[0].id); + const result = await getExampleV2(res.body.data[0].id, {}, {}); expect(result.status).toEqual(200); Object.keys(result.body.data).forEach((key) => { expect(EXAMPLE_KEYS_V2).toContain(key); diff --git a/__tests__/homepage.test.js b/__tests__/homepage.test.ts similarity index 95% rename from __tests__/homepage.test.js rename to __tests__/homepage.test.ts index 220037d3..3d59f6e9 100644 --- a/__tests__/homepage.test.js +++ b/__tests__/homepage.test.ts @@ -1,3 +1,4 @@ +import { expect } from '@jest/globals'; import { getLocalUrlRoute } from './shared/commands'; import { SITE_TITLE, DOCS_SITE_TITLE } from './shared/constants'; diff --git a/__tests__/nsibidi_characters.test.js b/__tests__/nsibidi_characters.test.ts similarity index 72% rename from __tests__/nsibidi_characters.test.js rename to __tests__/nsibidi_characters.test.ts index c54d6133..b2939adf 100644 --- a/__tests__/nsibidi_characters.test.js +++ b/__tests__/nsibidi_characters.test.ts @@ -1,9 +1,10 @@ +import { expect } from '@jest/globals'; import { getNsibidiCharactersV2 } from './shared/commands'; describe('MongoDB Nsibidi Characters', () => { describe('/GET mongodb nsibidi characters V2', () => { it('should return nsibidi character by searching', async () => { - const res = await getNsibidiCharactersV2('123'); + const res = await getNsibidiCharactersV2({ keyword: '123' }, {}); expect(res.status).toEqual(200); }); }); diff --git a/__tests__/parse.test.js b/__tests__/parse.test.ts similarity index 91% rename from __tests__/parse.test.js rename to __tests__/parse.test.ts index f4b132be..a964a87b 100644 --- a/__tests__/parse.test.js +++ b/__tests__/parse.test.ts @@ -1,5 +1,6 @@ +import { expect } from '@jest/globals'; import fs from 'fs'; -import keys from 'lodash/keys'; +import { keys } from 'lodash'; import replaceAbbreviations from '../src/shared/utils/replaceAbbreviations'; import { searchTerm, searchMockedTerm } from './shared/commands'; @@ -11,10 +12,9 @@ if (!fs.existsSync(mocksDir)) { describe('Parse', () => { describe('Dictionaries', () => { it('should create dictionaries', async () => { - await import('../src/dictionaries/buildDictionaries') - .catch((err) => { - throw err; - }); + await import('../src/dictionaries/buildDictionaries').catch((err) => { + throw err; + }); }); it('should keep same-cell text in the definition property', async () => { @@ -28,7 +28,9 @@ describe('Parse', () => { it('should include the correct A. B. text for ewu chī', async () => { const keyword = 'chi'; - const { body: { chi: res } } = await searchTerm(keyword); + const { + body: { chi: res }, + } = await searchTerm(keyword); const termDefinitions = res[0].definitions; expect(termDefinitions.length).toBeGreaterThanOrEqual(2); }); @@ -78,7 +80,7 @@ describe('Parse', () => { const withAbbreviations = 'n. noun. num. num.eral aux. v. aux.v. infl. suff.'; const withoutAbbreviations = replaceAbbreviations(withAbbreviations); expect(withoutAbbreviations).toEqual( - 'noun noun. numeral num.eral auxiliary verb aux.verb inflectional suffix', + 'noun noun. numeral num.eral auxiliary verb aux.verb inflectional suffix' ); }); }); diff --git a/__tests__/shared/commands.js b/__tests__/shared/commands.ts similarity index 61% rename from __tests__/shared/commands.js rename to __tests__/shared/commands.ts index d0d4fce8..320e013e 100644 --- a/__tests__/shared/commands.js +++ b/__tests__/shared/commands.ts @@ -1,80 +1,101 @@ import request from 'supertest'; +import { Types } from 'mongoose'; import app from '../../src/app'; import { API_ROUTE, API_ROUTE_V2, FALLBACK_API_KEY, LOCAL_ROUTE, TEST_ROUTE } from './constants'; import createRegExp from '../../src/shared/utils/createRegExp'; import { resultsFromDictionarySearch } from '../../src/services/words'; import mockedData from '../__mocks__/data.mock.json'; +import ExampleStyleEnum from '../../src/shared/constants/ExampleStyleEnum'; + +type Id = string | Types.ObjectId; + +type Query = Partial<{ + range: string | [number, number] | boolean, + keyword: string, + style: ExampleStyleEnum, + page: string | number, + apiLimit: number, + dialects: string | boolean, + examples: string | boolean, + strict: string | boolean, + wordClasses: string | string[], + filter: Partial<{ word: string }> | string, +}>; +type Options = Partial<{ + apiKey: string, + origin: string, +}>; const server = request(app); -export const createDeveloper = (data) => server.post(`${API_ROUTE}/developers`).send(data); +export const createDeveloper = (data: object) => server.post(`${API_ROUTE}/developers`).send(data); /* Searches for words using the data in MongoDB V2 */ -export const getWords = (query = {}, options = {}) => +export const getWords = (query: Query, options: Options) => server .get(`${API_ROUTE}/words`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); -export const getWord = (id, query = {}, options = {}) => +export const getWord = (id: Id, query: Query, options: Options) => server .get(`${API_ROUTE}/words/${id}`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); /* Searches for words using the data in MongoDB V2 */ -export const getWordsV2 = (query = {}, options = {}) => +export const getWordsV2 = (query: Query, options: Options) => server .get(`${API_ROUTE_V2}/words`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); -export const getWordV2 = (id, query = {}, options = {}) => +export const getWordV2 = (id: Id, query: Query, options: Options) => server .get(`${API_ROUTE_V2}/words/${id}`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); /* Searches for examples using the data in MongoDB V1 */ -export const getExample = (id, query = {}, options = {}) => +export const getExample = (id: Id, query: Query, options: Options) => server .get(`${API_ROUTE}/examples/${id}`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); -export const getExamples = (query = {}, options = {}) => +export const getExamples = (query: Query, options: Options) => server .get(`${API_ROUTE}/examples`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); /* Searches for examples using the data in MongoDB V2 */ -export const getExampleV2 = (id, query = {}, options = {}) => +export const getExampleV2 = (id: Id, query: Query, options: Options) => server .get(`${API_ROUTE_V2}/examples/${id}`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); -export const getExamplesV2 = (query = {}, options = {}) => +export const getExamplesV2 = (query: Query, options: Options) => server .get(`${API_ROUTE_V2}/examples`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); /* Searches for Nsibidi characters using the data in MongoDB V1 */ -export const getNsibidiCharacter = (id, query = {}, options = {}) => +export const getNsibidiCharacter = (id: Id, query: Query, options: Options) => server .get(`${API_ROUTE}/nsibidi/${id}`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); /* Searches for Nsibidi characters using the data in MongoDB V2 */ -export const getNsibidiCharacterV2 = (id, query = {}, options = {}) => +export const getNsibidiCharacterV2 = (id: Id, query: Query, options: Options) => server .get(`${API_ROUTE_V2}/nsibidi/${id}`) .query(query) .set('X-API-Key', options.apiKey || FALLBACK_API_KEY); -export const getNsibidiCharactersV2 = (query = {}, options = {}) => +export const getNsibidiCharactersV2 = (query: Query, options: Options) => server .get(`${API_ROUTE_V2}/nsibidi`) .query(query) @@ -84,12 +105,13 @@ export const getNsibidiCharactersV2 = (query = {}, options = {}) => export const populateAPI = () => server.post(`${TEST_ROUTE}/populate`); /* Uses data in JSON */ -export const searchTerm = (term) => server.get(`${TEST_ROUTE}/words`).query({ keyword: term }); +export const searchTerm = (term?: string) => + server.get(`${TEST_ROUTE}/words`).query({ keyword: term || '' }); export const getLocalUrlRoute = (route = LOCAL_ROUTE) => server.get(route); /* Uses data in __mocks__ folder */ -export const searchMockedTerm = (term) => { +export const searchMockedTerm = (term: string) => { const { wordReg: regexTerm } = createRegExp(term); return resultsFromDictionarySearch(regexTerm, term, mockedData); }; diff --git a/__tests__/shared/constants.js b/__tests__/shared/constants.ts similarity index 100% rename from __tests__/shared/constants.js rename to __tests__/shared/constants.ts diff --git a/__tests__/shared/utils.js b/__tests__/shared/utils.ts similarity index 69% rename from __tests__/shared/utils.js rename to __tests__/shared/utils.ts index 9650774b..433ab866 100644 --- a/__tests__/shared/utils.js +++ b/__tests__/shared/utils.ts @@ -1,8 +1,7 @@ -import forEach from 'lodash/forEach'; -import difference from 'lodash/difference'; -import map from 'lodash/map'; +import { forEach, difference, map } from 'lodash'; +import { expect } from '@jest/globals'; -export const expectUniqSetsOfResponses = (res, responseLength = 10) => { +export const expectUniqSetsOfResponses = (res: any[], responseLength = 10) => { forEach(res, (docsRes, index) => { expect(docsRes.status).toEqual(200); expect(docsRes.body.length).toBeLessThanOrEqual(responseLength); diff --git a/jest.backend.config.js b/jest.backend.config.ts similarity index 52% rename from jest.backend.config.js rename to jest.backend.config.ts index 091cdbfd..baffc53f 100644 --- a/jest.backend.config.js +++ b/jest.backend.config.ts @@ -1,9 +1,12 @@ // Backend Jest Config -module.exports = { +export default { displayName: 'igbo_api', - testMatch: ['**/__tests__/*.js', '**/__tests__/*.ts'], + testMatch: ['**/__tests__/*.ts'], testTimeout: 20000, testEnvironment: 'node', moduleFileExtensions: ['ts', 'js', 'json'], - globalSetup: './testSetup.js', + transform: { + '^.+\\.tsx?$': 'ts-jest', + }, + globalSetup: './testSetup.ts', }; diff --git a/jest.frontend.config.js b/jest.frontend.config.ts similarity index 96% rename from jest.frontend.config.js rename to jest.frontend.config.ts index 7fda904a..3fadad81 100644 --- a/jest.frontend.config.js +++ b/jest.frontend.config.ts @@ -1,5 +1,5 @@ // Frontend Jest Config -module.exports = { +export default { displayName: 'igbo_api', testMatch: ['./**/__tests__/**/*.test.tsx'], testTimeout: 20000, diff --git a/package.json b/package.json index 259f4044..b93e1f58 100644 --- a/package.json +++ b/package.json @@ -8,57 +8,45 @@ "build": "rm -rf dist/ && yarn build:site && yarn build:src", "build:functions": "rm -rf functions/src && shx cp -r ./dist ./functions/src && shx cp -r ./dist/dictionaries/ig-en ./functions/src/dictionaries", "build:src": "tsc && cross-env NODE_ENV=build yarn build:dictionaries && yarn build:functions", - "build:dictionaries:ig:en": "shx mkdir ./dist/dictionaries/ig-en || echo 'Igbo to English dictionaries dir already exists'", "build:dictionaries:en:ig": "[ ! -d \"./dist/dictionaries\" ] && shx mkdir ./dist/dictionaries || echo '' && [ ! -d \"./dist/dictionaries/en-ig\" ] && shx mkdir ./dist/dictionaries/en-ig || echo 'English to Igbo dictionaries dir already exists'", "build:dictionaries:nsibidi": "[ ! -d \"./dist/dictionaries\" ] && shx mkdir ./dist/dictionaries || echo '' && [ ! -d \"./dist/dictionaries/nsibidi\" ] && shx mkdir ./dist/dictionaries/nsibidi || echo 'Nsibidi dictionary dir already exists'", "prebuild:dictionaries": "yarn build:dictionaries:nsibidi && yarn build:dictionaries:ig:en && yarn build:dictionaries:en:ig && shx cp -r ./src/dictionaries/ig-en ./dist/dictionaries && shx cp -r ./src/dictionaries/en-ig ./dist/dictionaries", "build:dictionaries": "node ./dist/dictionaries/buildDictionaries.js", - "build:site": "cross-env NEXT_PUBLIC_GA_ID=$GA_TRACKING_ID next build && yarn build:fonts && yarn build:assets", "build:fonts": "shx cp -r ./src/public/fonts/ ./dist/fonts", "build:assets": "shx cp -r ./src/pages/assets/ ./dist/assets", - "start:emulators": "node_modules/.bin/firebase emulators:start --only functions,hosting", "start:watch": "nodemon --watch './src' --ext ts,js,tsx,jsx --ignore './functions' --verbose --exec yarn build:src", "clean": "shx rm -rf node_modules/ dist/ out/ yarn.lock package-lock.json *.log", - "kill:project": "fkill :5005 :8085 :8080 :8088 -fs", "predev": "firebase functions:config:set runtime.env=development && firebase use staging", "dev": "npm-run-all -p start:watch start:emulators start:database", "predev:full": "firebase functions:config:set env.redis_url=redis://localhost:6379 env.replica_set=true env.redis_status=true", "dev:full": "npm-run-all -p start:watch start:emulators start:database:replica", "dev:light": "npm-run-all -p start:watch start start:database", - "migrate-up": "migrate-mongo up", "migrate-down": "migrate-mongo down", "mongodump": "shx rm -rf dump/ && mongodump -d igbo_api -o dump", - "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", "precommit": "lint-staged", - "test": "npm-run-all -p -r start:database jest", - "cypress": "cross-env NODE_ENV=test npm-run-all -p start start:database cypress:open", "cypress:ci": "cross-env NODE_ENV=test npm-run-all -p -r start cypress:run", "cypress:open": "cypress open", "cypress:run": "cypress run", - - "jest:backend": "cross-env NODE_ENV=test jest --forceExit --runInBand --config=jest.backend.config.js", - "jest:frontend": "cross-env NODE_ENV=test jest --forceExit --runInBand --config=jest.frontend.config.js", + "jest:backend": "cross-env NODE_ENV=test jest --forceExit --runInBand --config=jest.backend.config.ts", + "jest:frontend": "cross-env NODE_ENV=test jest --forceExit --runInBand --config=jest.frontend.config.ts", "jest": "yarn jest:backend && yarn jest:frontend", - "prestart:database": "[ ! -d \"./db\" ] && shx mkdir ./db || echo 'Database directory exists'", "prestart:database:replica": "[ ! -d \"./mongos\" ] && shx mkdir ./mongos || echo 'Parent database directory exists'", "prestart:database:primary": "[ ! -d \"./mongos/db1\" ] && shx mkdir ./mongos/db1 || echo 'Primary database directory exists'", "prestart:database:secondary:first": "[ ! -d \"./mongos/db2\" ] && shx mkdir ./mongos/db2 || echo 'First secondary database directory exists'", "prestart:database:secondary:second": "[ ! -d \"./mongos/db3\" ] && shx mkdir ./mongos/db3 || echo 'Second secondary database directory exists'", - "start": "node ./dist/server.js", "start:docker": "docker-compose up", "test:build": "cross-env NODE_ENV=build yarn start", "dev:site": "firebase functions:config:set runtime.env=development && next", - "start:database": "mongod --port 27017 --dbpath ./db --quiet &>/dev/null", "start:database:replica": "npm-run-all -p start:database:primary start:database:secondary:first start:database:secondary:second", "start:database:primary": "mongod --port 2717 --dbpath ./mongos/db1 --replSet rs0 --quiet", @@ -177,6 +165,7 @@ "@types/react-scroll": "^1.8.9", "@types/shelljs": "^0.8.15", "@types/string-similarity": "^4.0.0", + "@types/supertest": "^6.0.2", "@types/uuid": "^9.0.2", "@typescript-eslint/eslint-plugin": "^5.59.6", "@typescript-eslint/parser": "^5.59.6", @@ -187,7 +176,7 @@ "eslint-config-airbnb": "19.0.4", "eslint-config-airbnb-typescript": "^17.0.0", "eslint-config-next": "13.1.6", - "eslint-config-prettier": "8.6.0", + "eslint-config-prettier": "9.1.0", "eslint-plugin-cypress": "2.12.1", "eslint-plugin-import": "2.27.5", "eslint-plugin-jsx-a11y": "6.7.1", @@ -198,9 +187,10 @@ "postcss": "^8.1.3", "postcss-loader": "~3.0.0", "postcss-preset-env": "^6.7.0", - "prettier": "2.8.8", + "prettier": "3.2.5", "supertest": "^6.3.1", - "ts-jest": "^29.1.1" + "ts-jest": "^29.1.1", + "ts-node": "^10.9.2" }, "standard-version": { "skip": { diff --git a/src/__tests__/Input/Input.test.tsx b/src/__tests__/Input/Input.test.tsx index f6b650da..6e43700f 100644 --- a/src/__tests__/Input/Input.test.tsx +++ b/src/__tests__/Input/Input.test.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import { expect } from '@jest/globals'; import { render } from '@testing-library/react'; import TestContext from '../components/TestContext'; import Input from '../../pages/components/Input/Input'; diff --git a/src/__tests__/Navbar/Navbar.test.tsx b/src/__tests__/Navbar/Navbar.test.tsx index dcc75837..54048857 100644 --- a/src/__tests__/Navbar/Navbar.test.tsx +++ b/src/__tests__/Navbar/Navbar.test.tsx @@ -5,13 +5,12 @@ import Navbar from '../../pages/components/Navbar/Navbar'; describe('Navbar', () => { it('renders the card', async () => { - const { findByText, findByTestId } = render( + const { findByTestId } = render( ); - await findByText('English'); await findByTestId('sub-menu'); }); }); diff --git a/src/__tests__/Navbar/SubMenu.test.tsx b/src/__tests__/Navbar/SubMenu.test.tsx index d852a6dc..e486ca8c 100644 --- a/src/__tests__/Navbar/SubMenu.test.tsx +++ b/src/__tests__/Navbar/SubMenu.test.tsx @@ -15,6 +15,5 @@ describe('SubMenu', () => { await findByText('Features'); await findByText('Docs'); await findByText('Get an API Key'); - await findByText('Try it Out'); }); }); diff --git a/src/__tests__/shared/fixtures.ts b/src/__tests__/shared/fixtures.ts index a97b9715..11cec035 100644 --- a/src/__tests__/shared/fixtures.ts +++ b/src/__tests__/shared/fixtures.ts @@ -1,5 +1,5 @@ import WordClass from '../../shared/constants/WordClass'; -import { Word, Definition } from '../../types'; +import { Word, Definition, Example } from '../../types'; export const wordFixture = (wordData: Partial) => ({ definitions: [], @@ -39,3 +39,17 @@ export const definitionFixture = (definitionData: Partial) => ({ nsibidiCharacters: [], ...definitionData, }); + +export const exampleFixture = (exampleData: Partial) => ({ + igbo: '', + english: '', + meaning: '', + nsibidi: '', + pronunciations: [], + id: '', + associatedDefinitionsSchemas: [], + associatedWords: [], + nsibidiCharacters: [], + updatedAt: new Date(), + ...exampleData, +}); diff --git a/src/config.js b/src/config.ts similarity index 78% rename from src/config.js rename to src/config.ts index 09bc0208..9406599b 100644 --- a/src/config.js +++ b/src/config.ts @@ -9,19 +9,25 @@ const Environment = { }; const config = functions.config(); +// @ts-expect-error NODE_ENV const dotenv = process.env.NODE_ENV !== 'build' ? require('dotenv') : null; +// @ts-expect-error NODE_ENV const sgMail = process.env.NODE_ENV !== 'build' ? require('@sendgrid/mail') : null; if (dotenv) { dotenv.config(); } -export const isBuild = config?.runtime?.env === Environment.BUILD || process.env.NODE_ENV === Environment.BUILD; +export const isBuild = + config?.runtime?.env === Environment.BUILD || process.env.NODE_ENV === Environment.BUILD; export const isProduction = - config?.runtime?.env === Environment.PRODUCTION || process.env.NODE_ENV === Environment.PRODUCTION; + config?.runtime?.env === Environment.PRODUCTION || + process.env.NODE_ENV === Environment.PRODUCTION; export const isDevelopment = - config?.runtime?.env === Environment.DEVELOPMENT || process.env.NODE_ENV === Environment.DEVELOPMENT; -export const isTest = config?.runtime?.env === Environment.TEST || process.env.NODE_ENV === Environment.TEST; + config?.runtime?.env === Environment.DEVELOPMENT || + process.env.NODE_ENV === Environment.DEVELOPMENT; +export const isTest = + config?.runtime?.env === Environment.TEST || process.env.NODE_ENV === Environment.TEST; const useReplicaSet = config?.env?.replica_set; // Database @@ -29,7 +35,8 @@ const DB_NAME = 'igbo_api'; const TEST_DB_NAME = 'test_igbo_api'; // If running inside Docker container, it will fallback to using test_igbo_api database -const isTestingEnvironment = isTest || (process.env.CONTAINER_HOST === 'mongodb' && !isDevelopment && !isProduction); +const isTestingEnvironment = + isTest || (process.env.CONTAINER_HOST === 'mongodb' && !isDevelopment && !isProduction); export const PORT = 8080; export const MONGO_HOST = process.env.CONTAINER_HOST || '127.0.0.1'; export const REPLICA_SET_NAME = 'rs0'; @@ -49,8 +56,8 @@ const LOCAL_MONGO_URI = `${MONGO_ROOT}/${DB_NAME}`; export const MONGO_URI = isTestingEnvironment ? TEST_MONGO_URI.concat(QUERIES) : isDevelopment - ? LOCAL_MONGO_URI.concat(QUERIES) - : config?.env?.mongo_uri || LOCAL_MONGO_URI.concat(QUERIES); + ? LOCAL_MONGO_URI.concat(QUERIES) + : config?.env?.mongo_uri || LOCAL_MONGO_URI.concat(QUERIES); export const FIREBASE_CONFIG = config?.env?.firebase_config; // Provide your own Firebase Config export const CLIENT_TEST = config?.env?.client_test; @@ -64,7 +71,8 @@ export const API_ROUTE = isProduction ? '' : `http://localhost:${PORT}`; // SendGrid API export const SENDGRID_API_KEY = config?.sendgrid?.api_key || ''; -export const SENDGRID_NEW_DEVELOPER_ACCOUNT_TEMPLATE = config?.sendgrid?.new_developer_account_template || ''; +export const SENDGRID_NEW_DEVELOPER_ACCOUNT_TEMPLATE = + config?.sendgrid?.new_developer_account_template || ''; export const API_FROM_EMAIL = 'kedu@nkowaokwu.com'; if (sgMail && !isTest) { diff --git a/src/controllers/__tests__/examples.test.js b/src/controllers/__tests__/examples.test.ts similarity index 50% rename from src/controllers/__tests__/examples.test.js rename to src/controllers/__tests__/examples.test.ts index d225fca4..40d93519 100644 --- a/src/controllers/__tests__/examples.test.js +++ b/src/controllers/__tests__/examples.test.ts @@ -1,21 +1,32 @@ +import { expect } from '@jest/globals'; +import { exampleFixture } from '../../__tests__/shared/fixtures'; import { convertExamplePronunciations } from '../examples'; describe('examples', () => { it('converts example pronunciations to pronunciation for v1', () => { - const example = { + const updatedAt = new Date(); + const example = exampleFixture({ igbo: 'igbo', english: 'english', meaning: 'meaning', nsibidi: 'nsibidi', - pronunciations: [{ audio: 'first audio', speaker: '' }], - }; + pronunciations: [ + { audio: 'first audio', speaker: '', _id: '', approvals: [], denials: [], review: true }, + ], + updatedAt, + }); expect(convertExamplePronunciations(example)).toEqual({ + id: '', igbo: 'igbo', english: 'english', meaning: 'meaning', nsibidi: 'nsibidi', pronunciation: 'first audio', + associatedDefinitionsSchemas: [], + associatedWords: [], + nsibidiCharacters: [], + updatedAt, }); }); }); diff --git a/src/controllers/utils/__tests__/minimizeVerbsAndSuffixes.test.ts b/src/controllers/utils/__tests__/minimizeVerbsAndSuffixes.test.ts index 38afd92a..b470eabf 100644 --- a/src/controllers/utils/__tests__/minimizeVerbsAndSuffixes.test.ts +++ b/src/controllers/utils/__tests__/minimizeVerbsAndSuffixes.test.ts @@ -1,29 +1,30 @@ +import { expect } from '@jest/globals'; import Version from '../../../shared/constants/Version'; import WordClass from '../../../shared/constants/WordClass'; import { definitionFixture, wordFixture } from '../../../__tests__/shared/fixtures'; import minimizeVerbsAndSuffixes from '../minimizeVerbsAndSuffixes'; +import WordClassEnum from '../../../shared/constants/WordClassEnum'; describe('minimizeVerbsAndSuffixes', () => { it('minimizes the verbs and suffixes to include basic fields', () => { - const definition = definitionFixture({}); const words = [ wordFixture({ word: 'first word', - definitions: [definition], + definitions: [definitionFixture({})], stems: [], relatedTerms: [], id: '123', }), wordFixture({ word: 'second word', - definitions: [definition], + definitions: [definitionFixture({ wordClass: WordClassEnum.ADV })], stems: [], relatedTerms: [], id: '456', }), wordFixture({ word: 'third word', - definitions: [definition], + definitions: [definitionFixture({ wordClass: WordClassEnum.PREP })], stems: [], relatedTerms: [], id: '789', diff --git a/src/controllers/utils/index.ts b/src/controllers/utils/index.ts index 19a983f6..2debc8d1 100644 --- a/src/controllers/utils/index.ts +++ b/src/controllers/utils/index.ts @@ -19,20 +19,31 @@ import ExampleStyles from '../../shared/constants/ExampleStyles'; const createSimpleRegExp = (keywords: { text: string }[]) => ({ wordReg: new RegExp( - `${keywords.map((keyword) => `(${createRegExp(keyword.text, true).wordReg.source})`).join('|')}`, + `${keywords + .map((keyword) => `(${createRegExp(keyword.text, true).wordReg.source})`) + .join('|')}`, 'i' ), exampleReg: new RegExp( - `${keywords.map((keyword) => `(${createRegExp(keyword.text, true).exampleReg.source})`).join('|')}`, + `${keywords + .map((keyword) => `(${createRegExp(keyword.text, true).exampleReg.source})`) + .join('|')}`, 'i' ), definitionsReg: new RegExp( - `${keywords.map((keyword) => `(${createRegExp(keyword.text, true).definitionsReg.source})`).join('|')}`, + `${keywords + .map((keyword) => `(${createRegExp(keyword.text, true).definitionsReg.source})`) + .join('|')}`, 'i' ), hardDefinitionsReg: new RegExp( `${keywords - .map((keyword) => `(${(createRegExp(keyword.text, true).hardDefinitionsReg || { source: keyword.text }).source})`) + .map( + (keyword) => + `(${ + (createRegExp(keyword.text, true).hardDefinitionsReg || { source: keyword.text }).source + })` + ) .join('|')}`, 'i' ), @@ -45,14 +56,18 @@ const constructRegexQuery = ({ isUsingMainKey, keywords, }: { - isUsingMainKey: boolean | undefined; - keywords: { text: string }[]; + isUsingMainKey: boolean | undefined, + keywords: { text: string }[], }) => isUsingMainKey ? createSimpleRegExp(keywords) : keywords?.length - ? createSimpleRegExp(keywords) - : { wordReg: /^[.{0,}\n{0,}]/, exampleReg: /^[.{0,}\n{0,}]/, definitionsReg: /^[.{0,}\n{0,}]/ }; + ? createSimpleRegExp(keywords) + : { + wordReg: /^[.{0,}\n{0,}]/, + exampleReg: /^[.{0,}\n{0,}]/, + definitionsReg: /^[.{0,}\n{0,}]/, + }; /* Packages the res response with sorting */ export const packageResponse = ({ @@ -61,16 +76,16 @@ export const packageResponse = ({ contentLength, version, }: { - res: Response; + res: Response, docs: | Partial | Partial | Partial | Partial[] | Partial[] - | Partial[]; - contentLength: number; - version: Version; + | Partial[], + contentLength: number, + version: Version, }) => { res.set({ 'Content-Range': contentLength }); const response = version === Version.VERSION_2 ? { data: docs, length: contentLength } : docs; @@ -84,7 +99,9 @@ const convertFilterToKeyword = (filter = '{"word": ""}') => { const firstFilterKey = Object.keys(parsedFilter)[0]; return parsedFilter[firstFilterKey]; } catch { - throw new Error(`Invalid filter query syntax. Expected: {"word":"filter"}, Received: ${filter}`); + throw new Error( + `Invalid filter query syntax. Expected: {"word":"filter"}, Received: ${filter}` + ); } }; @@ -93,14 +110,15 @@ const searchAllVerbsAndSuffixes = async ({ query, version, }: { - query: PipelineStage.Match['$match']; - version: Version; -}): Promise<{ words: Word[]; contentLength: number }> => { - const { words, contentLength } = (await findWordsWithMatch({ + query: PipelineStage.Match['$match'], + version: Version, +}): Promise<{ words: Word[], contentLength: number }> => { + const { words, contentLength } = await findWordsWithMatch({ match: query, version, lean: true, - })) as { words: Word[]; contentLength: number }; + }); + // @ts-expect-error types return { words, contentLength }; }; @@ -130,14 +148,19 @@ export const handleQueries = async ({ const hasQuotes = keywordQuery && keywordQuery.match(/["'].*["']/) !== null; const keyword = keywordQuery.replace(/["']/g, ''); const version = baseUrl.endsWith(Version.VERSION_2) ? Version.VERSION_2 : Version.VERSION_1; - const allVerbsAndSuffixesQuery: PipelineStage.Match['$match'] = searchForAllVerbsAndSuffixesQuery(); - const cachedAllVerbsAndSuffixes = await getAllCachedVerbsAndSuffixes({ key: version, redisClient }); + const allVerbsAndSuffixesQuery: PipelineStage.Match['$match'] = + searchForAllVerbsAndSuffixesQuery(); + const cachedAllVerbsAndSuffixes = await getAllCachedVerbsAndSuffixes({ + key: version, + redisClient, + }); if (version === Version.VERSION_2) { if (cachedAllVerbsAndSuffixes) { allVerbsAndSuffixes = cachedAllVerbsAndSuffixes; } else { - const allVerbsAndSuffixesDb = (await searchAllVerbsAndSuffixes({ query: allVerbsAndSuffixesQuery, version })) - .words; + const allVerbsAndSuffixesDb = ( + await searchAllVerbsAndSuffixes({ query: allVerbsAndSuffixesQuery, version }) + ).words; allVerbsAndSuffixes = await setAllCachedVerbsAndSuffixes({ key: version, data: allVerbsAndSuffixesDb, @@ -153,7 +176,10 @@ export const handleQueries = async ({ const regexes = searchWordParts.reduce( (regexesObject, searchWordPart) => ({ ...regexesObject, - [searchWordPart]: constructRegexQuery({ isUsingMainKey, keywords: [{ text: searchWordPart }] }), + [searchWordPart]: constructRegexQuery({ + isUsingMainKey, + keywords: [{ text: searchWordPart }], + }), }), {} ); @@ -181,7 +207,11 @@ export const handleQueries = async ({ version === Version.VERSION_2 ? expandNoun(searchWord, allVerbsAndSuffixes).map(({ text, wordClass }) => ({ text, - wordClass: wordClass.concat([WordClass.NNC.value, WordClass.PRN.value, WordClass.NNP.value]), + wordClass: wordClass.concat([ + WordClass.NNC.value, + WordClass.PRN.value, + WordClass.NNP.value, + ]), regex: pick( constructRegexQuery({ isUsingMainKey, @@ -224,7 +254,7 @@ export const handleQueries = async ({ const dialects = dialectsQuery === 'true'; const examples = examplesQuery === 'true'; // @ts-expect-error toUpperCase - const style = stylesQuery && ExampleStyles[stylesQuery.toUpperCase()].value!; + const style = stylesQuery && ExampleStyles[stylesQuery.toUpperCase()].value; const tags = tagsQuery ? tagsQuery .replace(/[[\]']/g, '') @@ -244,12 +274,7 @@ export const handleQueries = async ({ style, resolve, }; - console.log( - `Search flags: - ${Object.entries(flags) - .map(([key, value]) => `[${key}=${value}]`) - .join(',')}` - ); + const filters: Filters = { ...(tags?.length ? { tags: { $in: tags } } : {}), ...(wordClasses?.length ? { 'definitions.wordClass': { $in: wordClasses } } : {}), diff --git a/src/controllers/utils/types.ts b/src/controllers/utils/types.ts index b390f5c9..9beb2277 100644 --- a/src/controllers/utils/types.ts +++ b/src/controllers/utils/types.ts @@ -1,47 +1,46 @@ import WordClassEnum from '../../shared/constants/WordClassEnum'; -import { SearchRegExp } from '../../shared/utils/createRegExp'; export type Meta = { - depth: number; - isNegatorPrefixed?: boolean; - isPreviousVerb?: boolean; - isPreviousStativePrefix?: boolean; - negativePrefix?: string; - nominalPrefix?: boolean; - negatorPrefixed?: boolean; + depth: number, + isNegatorPrefixed?: boolean, + isPreviousVerb?: boolean, + isPreviousStativePrefix?: boolean, + negativePrefix?: string, + nominalPrefix?: boolean, + negatorPrefixed?: boolean, }; export type MinimizedWord = { - word: string; - definitions: { wordClass: WordClassEnum; nsibidi?: string }[]; + word: string, + definitions: { wordClass: WordClassEnum, nsibidi?: string }[], }; export type WordData = { - verbs?: MinimizedWord[]; - suffixes?: MinimizedWord[]; + verbs?: MinimizedWord[], + suffixes?: MinimizedWord[], }; export type Solution = { - type: { type: string; backgroundColor: string }; - text: string; - wordClass: WordClassEnum[]; - wordInfo?: MinimizedWord; + type: { type: string, backgroundColor: string }, + text: string, + wordClass: WordClassEnum[], + wordInfo?: MinimizedWord, }; -export type TopSolution = { solution: Solution; metaData: Meta }; +export type TopSolution = { solution: Solution, metaData: Meta }; export type Keyword = { - text: string; - wordClass: WordClassEnum[]; + text: string, + wordClass: WordClassEnum[], regex: { - wordReg: RegExp; - definitionsReg?: RegExp; - }; + wordReg: RegExp, + definitionsReg?: RegExp, + }, }; export type Flags = { - dialects: boolean; - examples: boolean; - style: string; - resolve: boolean; + dialects: boolean, + examples: boolean, + style: string, + resolve: boolean, }; diff --git a/src/dictionaries/buildDictionaries.js b/src/dictionaries/buildDictionaries.ts similarity index 75% rename from src/dictionaries/buildDictionaries.js rename to src/dictionaries/buildDictionaries.ts index 549b4cad..d12a0911 100644 --- a/src/dictionaries/buildDictionaries.js +++ b/src/dictionaries/buildDictionaries.ts @@ -18,21 +18,32 @@ const updateJSONDictionary = () => { const dictionaryFilePaths = [ [`${DICTIONARIES_DIR}/ig-en_1000_common.json`, JSON.stringify(commonDictionary, null, 4)], [`${DICTIONARIES_DIR}/ig-en_expanded.json`, JSON.stringify(dictionary, null, 4)], - [`${DICTIONARIES_DIR}/ig-en_normalized_expanded.json`, JSON.stringify(normalizedDictionary, null, 4)], + [ + `${DICTIONARIES_DIR}/ig-en_normalized_expanded.json`, + JSON.stringify(normalizedDictionary, null, 4), + ], [`${DICTIONARIES_DIR}/ig-en.json`, JSON.stringify(dictionary)], ]; const buildDictionaryFilePaths = + // @ts-expect-error NODE_ENV process.env.NODE_ENV === 'build' ? [ - [`${BUILD_DICTIONARIES_DIR}/ig-en_1000_common.json`, JSON.stringify(commonDictionary, null, 4)], + [ + `${BUILD_DICTIONARIES_DIR}/ig-en_1000_common.json`, + JSON.stringify(commonDictionary, null, 4), + ], [`${BUILD_DICTIONARIES_DIR}/ig-en_expanded.json`, JSON.stringify(dictionary, null, 4)], - [`${BUILD_DICTIONARIES_DIR}/ig-en_normalized_expanded.json`, JSON.stringify(normalizedDictionary, null, 4)], + [ + `${BUILD_DICTIONARIES_DIR}/ig-en_normalized_expanded.json`, + JSON.stringify(normalizedDictionary, null, 4), + ], [`${BUILD_DICTIONARIES_DIR}/ig-en.json`, JSON.stringify(dictionary)], ] : []; flatten([dictionaryFilePaths, buildDictionaryFilePaths]).forEach((config) => { + // @ts-expect-error spread fs.writeFileSync(...config, () => { if (process.env.NODE_ENV !== 'test') { console.green(`${config[0]} has been updated`); diff --git a/src/dictionaries/nsibidi/nsibidi_dictionary.js b/src/dictionaries/nsibidi/nsibidi_dictionary.ts similarity index 99% rename from src/dictionaries/nsibidi/nsibidi_dictionary.js rename to src/dictionaries/nsibidi/nsibidi_dictionary.ts index efff2228..45c5d10c 100644 --- a/src/dictionaries/nsibidi/nsibidi_dictionary.js +++ b/src/dictionaries/nsibidi/nsibidi_dictionary.ts @@ -1,3 +1,5 @@ +/* eslint-disable max-len */ + export default [ { sym: 'á', diff --git a/src/models/Developer.js b/src/models/Developer.ts similarity index 100% rename from src/models/Developer.js rename to src/models/Developer.ts diff --git a/src/models/Example.js b/src/models/Example.js deleted file mode 100644 index 4e53168b..00000000 --- a/src/models/Example.js +++ /dev/null @@ -1,43 +0,0 @@ -import mongoose from 'mongoose'; -import { toJSONPlugin, toObjectPlugin } from './plugins'; -import ExampleStyles from '../shared/constants/ExampleStyles'; -import SentenceTypes from '../shared/constants/SentenceTypes'; - -const { Schema, Types } = mongoose; -export const exampleSchema = new Schema({ - igbo: { type: String, default: '' }, - english: { type: String, default: '' }, - meaning: { type: String, default: '' }, - nsibidi: { type: String, default: '' }, - type: { - type: String, - enum: Object.values(SentenceTypes), - default: SentenceTypes.DEFAULT, - }, - style: { - type: String, - enum: Object.values(ExampleStyles).map(({ value }) => value), - default: ExampleStyles.NO_STYLE.value, - }, - associatedWords: { type: [{ type: Types.ObjectId, ref: 'Word' }], default: [] }, - associatedDefinitionsSchemas: { type: [{ type: Types.ObjectId }], default: [] }, - pronunciations: { - type: [{ - audio: { type: String, default: '' }, - speaker: { type: String, default: '' }, - }], - default: [], - }, -}, { toObject: toObjectPlugin, timestamps: true }); - -exampleSchema.index({ - associatedWords: 1, -}); -exampleSchema.index({ - english: 1, -}); -exampleSchema.index({ - igbo: 1, -}); - -toJSONPlugin(exampleSchema); diff --git a/src/models/Example.ts b/src/models/Example.ts new file mode 100644 index 00000000..b7c838b9 --- /dev/null +++ b/src/models/Example.ts @@ -0,0 +1,48 @@ +import mongoose from 'mongoose'; +import { toJSONPlugin, toObjectPlugin } from './plugins'; +import ExampleStyles from '../shared/constants/ExampleStyles'; +import SentenceTypes from '../shared/constants/SentenceTypes'; + +const { Schema, Types } = mongoose; +export const exampleSchema = new Schema( + { + igbo: { type: String, default: '' }, + english: { type: String, default: '' }, + meaning: { type: String, default: '' }, + nsibidi: { type: String, default: '' }, + type: { + type: String, + enum: Object.values(SentenceTypes), + default: SentenceTypes.DEFAULT, + }, + style: { + type: String, + enum: Object.values(ExampleStyles).map(({ value }) => value), + default: ExampleStyles.NO_STYLE.value, + }, + associatedWords: { type: [{ type: Types.ObjectId, ref: 'Word' }], default: [] }, + associatedDefinitionsSchemas: { type: [{ type: Types.ObjectId }], default: [] }, + pronunciations: { + type: [ + { + audio: { type: String, default: '' }, + speaker: { type: String, default: '' }, + }, + ], + default: [], + }, + }, + { toObject: toObjectPlugin, timestamps: true } +); + +exampleSchema.index({ + associatedWords: 1, +}); +exampleSchema.index({ + english: 1, +}); +exampleSchema.index({ + igbo: 1, +}); + +toJSONPlugin(exampleSchema); diff --git a/src/models/NsibidiCharacter.js b/src/models/NsibidiCharacter.ts similarity index 100% rename from src/models/NsibidiCharacter.js rename to src/models/NsibidiCharacter.ts diff --git a/src/models/Stat.js b/src/models/Stat.ts similarity index 89% rename from src/models/Stat.js rename to src/models/Stat.ts index fded00b6..b437e142 100644 --- a/src/models/Stat.js +++ b/src/models/Stat.ts @@ -8,7 +8,7 @@ export const statSchema = new Schema( authorId: { type: String, default: 'SYSTEM' }, value: { type: Schema.Types.Mixed, default: null }, }, - { toObject: toObjectPlugin, timestamps: true }, + { toObject: toObjectPlugin, timestamps: true } ); toJSONPlugin(statSchema); diff --git a/src/models/Word.js b/src/models/Word.ts similarity index 87% rename from src/models/Word.js rename to src/models/Word.ts index e5004a2c..5a7c4b08 100644 --- a/src/models/Word.js +++ b/src/models/Word.ts @@ -6,6 +6,9 @@ import Tenses from '../shared/constants/Tenses'; import WordClass from '../shared/constants/WordClass'; import WordAttributes from '../shared/constants/WordAttributes'; import WordTags from '../shared/constants/WordTags'; +import DialectEnum from '../shared/constants/DialectEnum'; +import WordTagEnum from '../shared/constants/WordTagEnum'; +import { Definition } from '../types'; const { Schema, Types } = mongoose; @@ -25,7 +28,10 @@ const definitionSchema = new Schema( { igbo: String, nsibidi: String, - nsibidiCharacters: { type: [{ type: Types.ObjectId, ref: 'NsibidiCharacter' }], default: [] }, + nsibidiCharacters: { + type: [{ type: Types.ObjectId, ref: 'NsibidiCharacter' }], + default: [], + }, }, ], default: [], @@ -45,7 +51,7 @@ const dialectSchema = new Schema( variations: { type: [{ type: String }], default: [] }, dialects: { type: [{ type: String }], - validate: (v) => every(v, (dialect) => Dialects[dialect].value), + validate: (v: DialectEnum[]) => every(v, (dialect) => Dialects[dialect].value), default: [], }, pronunciation: { type: String, default: '' }, @@ -61,14 +67,15 @@ export const wordSchema = new Schema( definitions: [ { type: definitionSchema, - validate: (definitions) => Array.isArray(definitions) && definitions.length > 0, + validate: (definitions: Definition[]) => + Array.isArray(definitions) && definitions.length > 0, }, ], dialects: { type: [dialectSchema], default: [] }, tags: { type: [String], default: [], - validate: (v) => + validate: (v: WordTagEnum[]) => v.every((tag) => Object.values(WordTags) .map(({ value }) => value) diff --git a/src/models/plugins/index.js b/src/models/plugins/index.ts similarity index 82% rename from src/models/plugins/index.js rename to src/models/plugins/index.ts index 5ea98fc9..b4e841e0 100644 --- a/src/models/plugins/index.js +++ b/src/models/plugins/index.ts @@ -5,7 +5,7 @@ import mongoose from 'mongoose'; /* Replaces the _id key with id */ -export const toJSONPlugin = (schema) => { +export const toJSONPlugin = (schema: mongoose.Schema) => { const toJSON = schema.methods.toJSON || mongoose.Document.prototype.toJSON; schema.set('toJSON', { virtuals: true, @@ -20,11 +20,11 @@ export const toJSONPlugin = (schema) => { }; }; -export const toObjectPlugin = ({ - transform: (doc, ret) => { +export const toObjectPlugin = { + transform: (doc: mongoose.Document, ret: mongoose.Document) => { // remove the _id and __v of every document before returning the result ret.id = doc.id.toString(); delete ret._id; delete ret.__v; }, -}); +}; diff --git a/src/pages/components/Statistics/Statistics.tsx b/src/pages/components/Statistics/Statistics.tsx index 6f3df704..9429fca9 100644 --- a/src/pages/components/Statistics/Statistics.tsx +++ b/src/pages/components/Statistics/Statistics.tsx @@ -22,8 +22,8 @@ const Statistics = ({ Crunching the Numbers - The Igbo API is the most robust, Igbo-English dictionary API that is maintained by our wonderful volunteer - community. + The Igbo API is the most robust, Igbo-English dictionary API that is maintained by our + wonderful volunteer community. @@ -37,7 +37,7 @@ const Statistics = ({ {contributors ? ( - + {contributors .slice(0, 18) diff --git a/src/routers/siteRouter.js b/src/routers/siteRouter.ts similarity index 68% rename from src/routers/siteRouter.js rename to src/routers/siteRouter.ts index 3e9f69f6..ed2c3be7 100644 --- a/src/routers/siteRouter.js +++ b/src/routers/siteRouter.ts @@ -6,9 +6,7 @@ import { parse } from 'url'; const nextApp = nextjs({}); const handle = nextApp.getRequestHandler(); -const routes = compact([ - /^\/$/, -]); +const routes = compact([/^\/$/]); const siteRouter = express.Router(); @@ -16,10 +14,10 @@ siteRouter.use(async (req, res, next) => { try { const parsedUrl = parse(req.url, true); const { pathname, query } = parsedUrl; - if (routes.find((route) => pathname.match(route))) { - return nextApp.render(req, res, pathname, query); + if (routes.find((route) => pathname?.match?.(route))) { + return await nextApp.render(req, res, pathname || '/', query); } - return handle(req, res, parsedUrl); + return await handle(req, res, parsedUrl); } catch (err) { return next(err); } diff --git a/src/routers/testRouter.js b/src/routers/testRouter.ts similarity index 100% rename from src/routers/testRouter.js rename to src/routers/testRouter.ts diff --git a/src/shared/utils/__tests__/createRegExp.test.ts b/src/shared/utils/__tests__/createRegExp.test.ts index 02b94481..1b8225a7 100644 --- a/src/shared/utils/__tests__/createRegExp.test.ts +++ b/src/shared/utils/__tests__/createRegExp.test.ts @@ -1,4 +1,5 @@ /* eslint-disable max-len */ +import { expect } from '@jest/globals'; import createRegExp, { removeSpecialCharacters } from '../createRegExp'; describe('createRegExp', () => { @@ -12,6 +13,8 @@ describe('createRegExp', () => { expect(createRegExp(rawSearchWord)).toEqual({ definitionsReg: /(\W|^)(([nṄǹńNṅǸŃṄǹńṅǸŃ]+[´́`¯̣̄̀]{0,})(g)(w)((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})(r)((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})([\s'])(-)(d)([aAaàāÀÁĀ]+[´́`¯̣̄̀]{0,})([\s'])(?:es|[sx]|ing)?)(\W|$)/i, + exampleReg: + /(\W|^)(([nṄǹńNṅǸŃṄǹńṅǸŃ]+[´́`¯̣̄̀]{0,})(g)(w)((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})(r)((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})([\s'])(-)(d)([aAaàāÀÁĀ]+[´́`¯̣̄̀]{0,})([\s'])(?:es|[sx]|ing)?)(\W|$)/i, hardDefinitionsReg: /(\W|^)(([nṄǹńNṅǸŃṄǹńṅǸŃ]+[´́`¯̣̄̀]{0,})(g)(w)((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})(r)((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})((([oO]+[̣]{0,})|[ọỌ])+[´́`¯̣̄̀]{0,})([\s'])(-)(d)([aAaàāÀÁĀ]+[´́`¯̣̄̀]{0,})([\s']))(\W|$)/i, wordReg: diff --git a/testSetup.js b/testSetup.ts similarity index 89% rename from testSetup.js rename to testSetup.ts index 1a13c689..7209d180 100644 --- a/testSetup.js +++ b/testSetup.ts @@ -14,5 +14,7 @@ export default async () => { await mongoose.connection.db.dropDatabase(); } await populateAPI(); - await new Promise((resolve) => setTimeout(resolve, 10000)); + await new Promise((resolve) => { + setTimeout(resolve, 10000); + }); }; diff --git a/tsconfig.json b/tsconfig.json index 7d7d6f86..6d477736 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -24,7 +24,8 @@ ] }, "include": [ - "./src/**/*", + "./**/*.ts", + "./**/*.tsx", "@types", "env.d.ts" ],