Skip to content

Commit

Permalink
Add sample TOC and import hints (#214)
Browse files Browse the repository at this point in the history
Add a table of content to the sample directory.
Add comments with the import to individual samples.
  • Loading branch information
natebosch authored Oct 7, 2024
1 parent ec5a820 commit 76ae8f8
Show file tree
Hide file tree
Showing 9 changed files with 57 additions and 0 deletions.
13 changes: 13 additions & 0 deletions samples/dart/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,16 @@ To try these samples out, follow these steps:
the Gemini generative models, or run the below commands with an environment
containing this variable.
- Run any sample from the `bin` directory (e.g., `dart bin/simple_text.dart`).

## Contents

| File | Description |
|----------------------------------------------------------------| ----------- |
| [chat.dart](bin/chat.dart) | Multi-turn chat conversations |
| [code_execution.dart](bin/code_execution.dart) | Executing code |
| [controlled_generation.dart](bin/controlled_generation.dart) | Generating content with output constraints (e.g. JSON mode) |
| [count_tokens.dart](bin/count_tokens.dart) | Counting input and output tokens |
| [function_calling.dart](bin/function_calling.dart) | Using function calling |
| [safety_settings.dart](bin/safety_settings.dart) | Setting and using safety controls |
| [system_instruction.dart](bin/system_instruction.dart) | Setting system instructions |
| [text_generation.dart](bin/text_generation.dart) | Generating text |
6 changes: 6 additions & 0 deletions samples/dart/bin/chat.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> chat() async {
// [START chat]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -46,6 +48,8 @@ Future<void> chat() async {

Future<void> chatStreaming() async {
// [START chat_streaming]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -71,6 +75,8 @@ Future<void> chatStreaming() async {

Future<void> chatStreamingWithImages() async {
// [START chat_streaming_with_images]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down
4 changes: 4 additions & 0 deletions samples/dart/bin/code_execution.dart
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ final apiKey = () {

Future<void> codeExecutionBasic() async {
// [START code_execution_basic]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
// Specify a Gemini model appropriate for your use case
model: 'gemini-1.5-flash',
Expand All @@ -50,6 +52,8 @@ Future<void> codeExecutionBasic() async {

Future<void> codeExecutionChat() async {
// [START code_execution_chat]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
// Specify a Gemini model appropriate for your use case
model: 'gemini-1.5-flash',
Expand Down
4 changes: 4 additions & 0 deletions samples/dart/bin/controlled_generation.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> jsonControlledGeneration() async {
// [START json_controlled_generation]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final schema = Schema.array(
description: 'List of recipes',
items: Schema.object(properties: {
Expand All @@ -50,6 +52,8 @@ Future<void> jsonControlledGeneration() async {

Future<void> jsonNoSchema() async {
// [START json_no_schema]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-pro',
apiKey: apiKey,
Expand Down
10 changes: 10 additions & 0 deletions samples/dart/bin/count_tokens.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> tokensTextOnly() async {
// [START tokens_text_only]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -39,6 +41,8 @@ Future<void> tokensTextOnly() async {

Future<void> tokensChat() async {
// [START tokens_chat]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down Expand Up @@ -66,6 +70,8 @@ Future<void> tokensChat() async {

Future<void> tokensMultimodalImageInline() async {
// [START tokens_multimodal_image_inline]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down Expand Up @@ -95,6 +101,8 @@ Future<void> tokensMultimodalImageInline() async {

Future<void> tokensSystemInstructions() async {
// [START tokens_system_instruction]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
var model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -117,6 +125,8 @@ Future<void> tokensSystemInstructions() async {

Future<void> tokensTools() async {
// [START tokens_tools]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
var model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down
2 changes: 2 additions & 0 deletions samples/dart/bin/function_calling.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> functionCalling() async {
// [START function_calling]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
Map<String, Object?> setLightValues(Map<String, Object?> args) {
return args;
}
Expand Down
4 changes: 4 additions & 0 deletions samples/dart/bin/safety_settings.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> safetySettings() async {
// [START safety_settings]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down Expand Up @@ -55,6 +57,8 @@ Future<void> safetySettings() async {

Future<void> safetySettingsMulti() async {
// [START safety_settings_multi]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down
2 changes: 2 additions & 0 deletions samples/dart/bin/system_instructions.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> systemInstructions() async {
// [START system_instructions]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down
12 changes: 12 additions & 0 deletions samples/dart/bin/text_generation.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ final apiKey = () {

Future<void> textGenTextOnlyPrompt() async {
// [START text_gen_text_only_prompt]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -40,6 +42,8 @@ Future<void> textGenTextOnlyPrompt() async {

Future<void> textGenTextOnlyPromptStreaming() async {
// [START text_gen_text_only_prompt_streaming]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -55,6 +59,8 @@ Future<void> textGenTextOnlyPromptStreaming() async {

Future<void> textGenMultimodalOneImagePrompt() async {
// [START text_gen_multimodal_one_image_prompt]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -76,6 +82,8 @@ Future<void> textGenMultimodalOneImagePrompt() async {

Future<void> textGenMultimodalOneImagePromptStreaming() async {
// [START text_gen_multimodal_one_image_prompt_streaming]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -99,6 +107,8 @@ Future<void> textGenMultimodalOneImagePromptStreaming() async {

Future<void> textGenMultimodalMultiImagePrompt() async {
// [START text_gen_multimodal_multi_image_prompt]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand All @@ -125,6 +135,8 @@ Future<void> textGenMultimodalMultiImagePrompt() async {

Future<void> textGenMultimodalMultiImagePromptStreaming() async {
// [START text_gen_multimodal_multi_image_prompt_streaming]
// Make sure to include this import:
// import 'package:google_generative_ai/google_generative_ai.dart';
final model = GenerativeModel(
model: 'gemini-1.5-flash',
apiKey: apiKey,
Expand Down

0 comments on commit 76ae8f8

Please sign in to comment.