From a8ac99e911697a5f3d03fe4c2947abf675ffab1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ros=C3=A1rio=20P=2E=20Fernandes?= Date: Thu, 15 Feb 2024 16:35:17 +0000 Subject: [PATCH 1/2] update model names to use generations (#62) Googlers see [b/325447428](http://b/325447428) --- README.md | 2 +- .../com/google/ai/sample/GenerativeAiViewModelFactory.kt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 00444dba..622b823a 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ For example, with just a few lines of code, you can access Gemini's multimodal c ```kotlin val generativeModel = GenerativeModel( - modelName = "gemini-pro-vision", + modelName = "gemini-1.0-pro-vision-latest", apiKey = BuildConfig.apiKey ) diff --git a/generativeai-android-sample/app/src/main/kotlin/com/google/ai/sample/GenerativeAiViewModelFactory.kt b/generativeai-android-sample/app/src/main/kotlin/com/google/ai/sample/GenerativeAiViewModelFactory.kt index 007606bc..ff0b98d4 100644 --- a/generativeai-android-sample/app/src/main/kotlin/com/google/ai/sample/GenerativeAiViewModelFactory.kt +++ b/generativeai-android-sample/app/src/main/kotlin/com/google/ai/sample/GenerativeAiViewModelFactory.kt @@ -40,7 +40,7 @@ val GenerativeViewModelFactory = object : ViewModelProvider.Factory { // Initialize a GenerativeModel with the `gemini-pro` AI model // for text generation val generativeModel = GenerativeModel( - modelName = "gemini-pro", + modelName = "gemini-1.0-pro", apiKey = BuildConfig.apiKey, generationConfig = config ) @@ -51,7 +51,7 @@ val GenerativeViewModelFactory = object : ViewModelProvider.Factory { // Initialize a GenerativeModel with the `gemini-pro-vision` AI model // for multimodal text generation val generativeModel = GenerativeModel( - modelName = "gemini-pro-vision", + modelName = "gemini-1.0-pro-vision-latest", apiKey = BuildConfig.apiKey, generationConfig = config ) @@ -61,7 +61,7 @@ val GenerativeViewModelFactory = object : ViewModelProvider.Factory { isAssignableFrom(ChatViewModel::class.java) -> { // Initialize a GenerativeModel with the `gemini-pro` AI model for chat val generativeModel = GenerativeModel( - modelName = "gemini-pro", + modelName = "gemini-1.0-pro", apiKey = BuildConfig.apiKey, generationConfig = config ) From 774d2333c1e16878e1118ebfaaa3a47aad1d2dd3 Mon Sep 17 00:00:00 2001 From: Rodrigo Lazo Paz Date: Thu, 22 Feb 2024 11:03:35 -0500 Subject: [PATCH 2/2] Make "user" the default role. Instead of leaving it empty, we should default to "user". --- .../main/java/com/google/ai/client/generativeai/Chat.kt | 8 ++++---- .../ai/client/generativeai/internal/api/shared/Types.kt | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/generativeai/src/main/java/com/google/ai/client/generativeai/Chat.kt b/generativeai/src/main/java/com/google/ai/client/generativeai/Chat.kt index 2abe4b72..b536c244 100644 --- a/generativeai/src/main/java/com/google/ai/client/generativeai/Chat.kt +++ b/generativeai/src/main/java/com/google/ai/client/generativeai/Chat.kt @@ -73,7 +73,7 @@ class Chat(private val model: GenerativeModel, val history: MutableList * @throws InvalidStateException if the [Chat] instance has an active request. */ suspend fun sendMessage(prompt: String): GenerateContentResponse { - val content = content("user") { text(prompt) } + val content = content { text(prompt) } return sendMessage(content) } @@ -84,7 +84,7 @@ class Chat(private val model: GenerativeModel, val history: MutableList * @throws InvalidStateException if the [Chat] instance has an active request. */ suspend fun sendMessage(prompt: Bitmap): GenerateContentResponse { - val content = content("user") { image(prompt) } + val content = content { image(prompt) } return sendMessage(content) } @@ -150,7 +150,7 @@ class Chat(private val model: GenerativeModel, val history: MutableList * @throws InvalidStateException if the [Chat] instance has an active request. */ fun sendMessageStream(prompt: String): Flow { - val content = content("user") { text(prompt) } + val content = content { text(prompt) } return sendMessageStream(content) } @@ -162,7 +162,7 @@ class Chat(private val model: GenerativeModel, val history: MutableList * @throws InvalidStateException if the [Chat] instance has an active request. */ fun sendMessageStream(prompt: Bitmap): Flow { - val content = content("user") { image(prompt) } + val content = content { image(prompt) } return sendMessageStream(content) } diff --git a/generativeai/src/main/java/com/google/ai/client/generativeai/internal/api/shared/Types.kt b/generativeai/src/main/java/com/google/ai/client/generativeai/internal/api/shared/Types.kt index 3f15f807..8f7e2242 100644 --- a/generativeai/src/main/java/com/google/ai/client/generativeai/internal/api/shared/Types.kt +++ b/generativeai/src/main/java/com/google/ai/client/generativeai/internal/api/shared/Types.kt @@ -40,7 +40,7 @@ internal enum class HarmCategory { typealias Base64 = String -@Serializable internal data class Content(val role: String? = null, val parts: List) +@Serializable internal data class Content(val role: String? = "user", val parts: List) @Serializable(PartSerializer::class) internal sealed interface Part