Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Revert "Exclude generationConfig field"
Browse files Browse the repository at this point in the history
This reverts commit 0be8786.
natebosch committed May 22, 2024
1 parent ce70d71 commit 8acc0f2
Showing 3 changed files with 12 additions and 21 deletions.
8 changes: 5 additions & 3 deletions pkgs/google_generative_ai/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -4,9 +4,11 @@
replies with more than one part.
- Fix handling of `format` argument to `Schema.number` and `Schema.integer`.
- Export `UsageMetadata`.
- Include more fields from `GenerateContentRequest` (previously omitted
`safetySettings`, `tools`, `toolConfig`, and `systemInstruction`) in
`countTokens` requests.
- Include the full `GenerateContentRequest` (previously omitted
`safetySettings`, `generationConfig`, `tools`, `toolConfig`, and
`systemInstruction`) in `countTokens` requests. This aligns the token count
with token count the backend will see in practice if it was a
`generateContent` request.

## 0.4.0

6 changes: 3 additions & 3 deletions pkgs/google_generative_ai/lib/src/model.dart
Original file line number Diff line number Diff line change
@@ -241,6 +241,7 @@ final class GenerativeModel {
Future<CountTokensResponse> countTokens(
Iterable<Content> contents, {
List<SafetySetting>? safetySettings,
GenerationConfig? generationConfig,
List<Tool>? tools,
ToolConfig? toolConfig,
}) async {
@@ -249,9 +250,9 @@ final class GenerativeModel {
_generateContentRequest(
contents,
safetySettings: safetySettings,
generationConfig: generationConfig,
tools: tools,
toolConfig: toolConfig,
includeGenerationConfig: false,
));
return parseCountTokensResponse(response);
}
@@ -312,7 +313,6 @@ final class GenerativeModel {
GenerationConfig? generationConfig,
List<Tool>? tools,
ToolConfig? toolConfig,
bool includeGenerationConfig = true,
}) {
safetySettings ??= _safetySettings;
generationConfig ??= _generationConfig;
@@ -322,7 +322,7 @@ final class GenerativeModel {
'contents': contents.map((c) => c.toJson()).toList(),
if (safetySettings.isNotEmpty)
'safetySettings': safetySettings.map((s) => s.toJson()).toList(),
if (includeGenerationConfig && generationConfig != null)
if (generationConfig != null)
'generationConfig': generationConfig.toJson(),
if (tools != null) 'tools': tools.map((t) => t.toJson()).toList(),
if (toolConfig != null) 'toolConfig': toolConfig.toJson(),
19 changes: 4 additions & 15 deletions pkgs/google_generative_ai/test/generative_model_test.dart
Original file line number Diff line number Diff line change
@@ -26,7 +26,6 @@ void main() {
String modelName = defaultModelName,
RequestOptions? requestOptions,
Content? systemInstruction,
GenerationConfig? generationConfig,
List<Tool>? tools,
ToolConfig? toolConfig,
}) {
@@ -36,7 +35,6 @@ void main() {
client: client.client,
requestOptions: requestOptions,
systemInstruction: systemInstruction,
generationConfig: generationConfig,
tools: tools,
toolConfig: toolConfig,
);
@@ -445,6 +443,7 @@ void main() {
HarmBlockThreshold.high,
),
],
generationConfig: GenerationConfig(stopSequences: ['a']),
tools: [
Tool(functionDeclarations: [
FunctionDeclaration(
@@ -468,6 +467,9 @@ void main() {
'threshold': 'BLOCK_ONLY_HIGH',
},
]);
expect(request['generationConfig'], {
'stopSequences': ['a'],
});
expect(request['tools'], [
{
'functionDeclarations': [
@@ -491,19 +493,6 @@ void main() {
},
);
});

test('excludes generationConfig', () async {
final (client, model) = createModel(
generationConfig: GenerationConfig(maxOutputTokens: 1000));
final prompt = 'Some prompt';
await client.checkRequest(
response: {'totalTokens': 100},
() => model.countTokens([Content.text(prompt)]),
verifyRequest: (_, request) {
expect(request, isNot(contains('generationConfig')));
},
);
});
});

group('embed content', () {

0 comments on commit 8acc0f2

Please sign in to comment.