From 12eca608015634628a512ad13ee5a34029f2886f Mon Sep 17 00:00:00 2001 From: "sweep-ai[bot]" <128439645+sweep-ai[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 21:54:06 +0000 Subject: [PATCH] feat: Updated 2 files --- .../lib/conversation2measurements.test.ts | 51 +++++++++++++ apps/nextjs/lib/llm.test.ts | 71 +++++++++++++++++++ 2 files changed, 122 insertions(+) create mode 100644 apps/nextjs/lib/conversation2measurements.test.ts create mode 100644 apps/nextjs/lib/llm.test.ts diff --git a/apps/nextjs/lib/conversation2measurements.test.ts b/apps/nextjs/lib/conversation2measurements.test.ts new file mode 100644 index 000000000..e3a5b098a --- /dev/null +++ b/apps/nextjs/lib/conversation2measurements.test.ts @@ -0,0 +1,51 @@ +import { conversation2MeasurementsPrompt, conversation2measurements } from './conversation2measurements'; +import { textCompletion } from './llm'; +import { Measurement } from '@/types/models/Measurement'; + +describe('conversation2measurements', () => { + it('should generate the correct prompt', () => { + const statement = 'I ate an apple'; + const localDateTime = '2023-06-01T10:00:00'; + const previousStatements = 'I had coffee for breakfast'; + + const prompt = conversation2MeasurementsPrompt(statement, localDateTime, previousStatements); + + expect(prompt).toContain(statement); + expect(prompt).toContain(localDateTime); + expect(prompt).toContain(previousStatements); + }); + + it('should convert conversation to measurements', async () => { + const statement = 'I ate an apple'; + const localDateTime = '2023-06-01T10:00:00'; + const previousStatements = 'I had coffee for breakfast'; + + jest.mock('./llm', () => ({ + textCompletion: jest.fn().mockResolvedValue(JSON.stringify({ + measurements: [ + { + variableName: 'Apple', + value: 1, + unitName: 'Count', + startAt: localDateTime, + combinationOperation: 'SUM', + variableCategoryName: 'Foods' + } + ] + })) + })); + + const measurements = await conversation2measurements(statement, localDateTime, previousStatements); + + expect(textCompletion).toHaveBeenCalledWith(expect.any(String), 'json_object'); + expect(measurements).toHaveLength(1); + expect(measurements[0]).toMatchObject({ + variableName: 'Apple', + value: 1, + unitName: 'Count', + startAt: localDateTime, + combinationOperation: 'SUM', + variableCategoryName: 'Foods' + }); + }); +}); \ No newline at end of file diff --git a/apps/nextjs/lib/llm.test.ts b/apps/nextjs/lib/llm.test.ts new file mode 100644 index 000000000..e834d2c8a --- /dev/null +++ b/apps/nextjs/lib/llm.test.ts @@ -0,0 +1,71 @@ +import { textCompletion } from './llm'; +import OpenAI from 'openai'; + +describe('llm', () => { + it('should return text completion', async () => { + const promptText = 'What is the capital of France?'; + const mockResponse = { + choices: [{ message: { content: 'Paris' } }] + }; + + jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({ + completions: { + create: jest.fn().mockResolvedValue(mockResponse) + } + }); + + const completion = await textCompletion(promptText, 'text'); + + expect(OpenAI.prototype.chat.completions.create).toHaveBeenCalledWith({ + model: 'gpt-4-turbo', + stream: false, + messages: [ + {"role": "system", "content": "You are a helpful assistant that translates user requests into JSON objects"}, + {role: "user", "content": promptText}, + ], + response_format: { type: 'text' }, + }); + expect(completion).toBe('Paris'); + }); + + it('should return JSON object completion', async () => { + const promptText = 'Generate a JSON object with a "message" property'; + const mockResponse = { + choices: [{ message: { content: '{"message":"Hello, world!"}' } }] + }; + + jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({ + completions: { + create: jest.fn().mockResolvedValue(mockResponse) + } + }); + + const completion = await textCompletion(promptText, 'json_object'); + + expect(OpenAI.prototype.chat.completions.create).toHaveBeenCalledWith({ + model: 'gpt-4-turbo', + stream: false, + messages: [ + {"role": "system", "content": "You are a helpful assistant that translates user requests into JSON objects"}, + {role: "user", "content": promptText}, + ], + response_format: { type: 'json_object' }, + }); + expect(completion).toBe('{"message":"Hello, world!"}'); + }); + + it('should throw an error if no content is returned', async () => { + const promptText = 'What is the capital of France?'; + const mockResponse = { + choices: [{ message: { content: undefined } }] + }; + + jest.spyOn(OpenAI.prototype, 'chat').mockResolvedValue({ + completions: { + create: jest.fn().mockResolvedValue(mockResponse) + } + }); + + await expect(textCompletion(promptText, 'text')).rejects.toThrow('No content in response'); + }); +}); \ No newline at end of file