Skip to content

Commit

Permalink
Merge pull request #88 from tak-bro/feature/fix-deepseek
Browse files Browse the repository at this point in the history
Feature/fix deepseek
  • Loading branch information
tak-bro authored Oct 11, 2024
2 parents 337b4a1 + e7357dd commit a314b69
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 72 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
"inquirer": "9.2.8",
"inquirer-reactive-list-prompt": "^1.0.10",
"ollama": "^0.5.6",
"openai": "^4.67.3",
"ora": "^8.0.1",
"readline": "^1.3.0",
"rxjs": "^7.8.1",
Expand Down Expand Up @@ -120,7 +121,6 @@
"ini": "^3.0.1",
"lint-staged": "^13.1.2",
"manten": "^0.7.0",
"openai": "^3.2.1",
"pkgroll": "^1.9.0",
"prettier": "^3.0.0",
"semantic-release": "^23.0.2",
Expand Down
44 changes: 27 additions & 17 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

74 changes: 20 additions & 54 deletions src/services/ai/deep-seek.service.ts
Original file line number Diff line number Diff line change
@@ -1,39 +1,19 @@
import { AxiosResponse } from 'axios';
import chalk from 'chalk';
import { ReactiveListChoice } from 'inquirer-reactive-list-prompt';
import OpenAI from 'openai';
import { Observable, catchError, concatMap, from, map, of } from 'rxjs';
import { fromPromise } from 'rxjs/internal/observable/innerFrom';

import { AIResponse, AIService, AIServiceError, AIServiceParams } from './ai.service.js';
import { CreateChatCompletionsResponse } from './mistral.service.js';
import { KnownError } from '../../utils/error.js';
import { RequestType, createLogResponse } from '../../utils/log.js';
import { DEFAULT_PROMPT_OPTIONS, PromptOptions, codeReviewPrompt, generatePrompt } from '../../utils/prompt.js';
import { HttpRequestBuilder } from '../http/http-request.builder.js';

export interface DeepSeekServiceError extends AIServiceError {}
export interface DeepSeekChatCompletionResponse {
id: string;
object: string;
created: number;
model: string;
choices: {
index: number;
message: {
role: string;
content: string;
};
finish_reason: string;
}[];
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}

export class DeepSeekService extends AIService {
private host = 'https://api.deepseek.com';
private apiKey = '';
private deepSeek: OpenAI;

constructor(private readonly params: AIServiceParams) {
super(params);
Expand All @@ -43,7 +23,11 @@ export class DeepSeekService extends AIService {
};
this.serviceName = chalk.bgHex(this.colors.primary).hex(this.colors.secondary).bold(`[DeepSeek]`);
this.errorPrefix = chalk.red.bold(`[DeepSeek]`);
this.apiKey = this.params.config.key;

this.deepSeek = new OpenAI({
baseURL: this.host,
apiKey: this.params.config.key,
});
}

generateCommitMessage$(): Observable<ReactiveListChoice> {
Expand Down Expand Up @@ -124,17 +108,8 @@ export class DeepSeekService extends AIService {
}

private async createChatCompletions(systemPrompt: string, requestType: RequestType) {
const requestBuilder = new HttpRequestBuilder({
method: 'POST',
baseURL: `${this.host}/chat/completions`,
timeout: this.params.config.timeout,
})
.setHeaders({
Authorization: `Bearer ${this.apiKey}`,
'content-type': 'application/json',
})
.setBody({
model: this.params.config.model,
const chatCompletion = await this.deepSeek.chat.completions.create(
{
messages: [
{
role: 'system',
Expand All @@ -145,26 +120,17 @@ export class DeepSeekService extends AIService {
content: `Here is the diff: ${this.params.stagedDiff.diff}`,
},
],
temperature: this.params.config.temperature,
top_p: this.params.config.topP,
model: this.params.config.model,
max_tokens: this.params.config.maxTokens,
stream: false,
});

if (requestType === 'commit') {
requestBuilder.addBody({
response_format: {
type: 'json_object',
},
});
}
top_p: this.params.config.topP,
temperature: this.params.config.temperature,
},
{
timeout: this.params.config.timeout,
}
);

const response: AxiosResponse<CreateChatCompletionsResponse> = await requestBuilder.execute();
const result: DeepSeekChatCompletionResponse = response.data;
const hasNoChoices = !result.choices || result.choices.length === 0;
if (hasNoChoices || !result.choices[0].message?.content) {
throw new Error(`No Content on response. Please open a Bug report`);
}
return result.choices[0].message.content;
const response = chatCompletion.choices[0].message.content || '';
return response;
}
}

0 comments on commit a314b69

Please sign in to comment.