Skip to content

Commit 283f429

Browse files
authored
Add sample app for bidirectional streaming (#2716)
Adds a sample app for bidirectional streaming. The sample app also has inbuilt function calling allowing users to use function calling along with bidi.
1 parent c8767a2 commit 283f429

File tree

6 files changed

+275
-5
lines changed

6 files changed

+275
-5
lines changed

firebase-ai/app/src/main/AndroidManifest.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@
22
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
33
xmlns:tools="http://schemas.android.com/tools">
44

5+
<uses-permission android:name="android.permission.INTERNET" />
6+
<uses-permission android:name="android.permission.RECORD_AUDIO" />
7+
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
8+
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
59
<application
610
android:allowBackup="true"
711
android:dataExtractionRules="@xml/data_extraction_rules"

firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/FirebaseAISamples.kt

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -270,6 +270,34 @@ val FIREBASE_AI_SAMPLES = listOf(
270270
)
271271
}
272272
),
273+
Sample(
274+
title = "ForecastTalk",
275+
description = "Use bidirectional streaming to get information about" +
276+
" weather conditions for a specific US city on a specific date",
277+
navRoute = "stream",
278+
categories = listOf(Category.LIVE_API, Category.AUDIO, Category.FUNCTION_CALLING),
279+
tools = listOf(
280+
Tool.functionDeclarations(
281+
listOf(
282+
FunctionDeclaration(
283+
"fetchWeather",
284+
"Get the weather conditions for a specific US city on a specific date.",
285+
mapOf(
286+
"city" to Schema.string("The US city of the location."),
287+
"state" to Schema.string("The US state of the location."),
288+
"date" to Schema.string(
289+
"The date for which to get the weather." +
290+
" Date must be in the format: YYYY-MM-DD."
291+
),
292+
),
293+
)
294+
)
295+
)
296+
),
297+
initialPrompt = content {
298+
text("What was the weather in Boston, MA on October 17, 2024?")
299+
}
300+
),
273301
Sample(
274302
title = "Weather Chat",
275303
description = "Use function calling to get the weather conditions" +

firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/MainActivity.kt

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
package com.google.firebase.quickstart.ai
22

3+
import android.Manifest
4+
import android.content.pm.PackageManager
35
import android.graphics.Bitmap
46
import android.graphics.BitmapFactory
57
import android.os.Bundle
@@ -19,6 +21,8 @@ import androidx.compose.runtime.mutableStateOf
1921
import androidx.compose.runtime.saveable.rememberSaveable
2022
import androidx.compose.runtime.setValue
2123
import androidx.compose.ui.Modifier
24+
import androidx.core.app.ActivityCompat
25+
import androidx.core.content.ContextCompat
2226
import androidx.navigation.NavController
2327
import androidx.navigation.NavDestination
2428
import androidx.navigation.compose.NavHost
@@ -38,6 +42,10 @@ class MainActivity : ComponentActivity() {
3842
@OptIn(ExperimentalMaterial3Api::class)
3943
override fun onCreate(savedInstanceState: Bundle?) {
4044
super.onCreate(savedInstanceState)
45+
if(ContextCompat.checkSelfPermission(this,
46+
Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
47+
ActivityCompat.requestPermissions(this, arrayOf(Manifest.permission.RECORD_AUDIO), 1)
48+
}
4149
enableEdgeToEdge()
4250
catImage = BitmapFactory.decodeResource(applicationContext.resources, R.drawable.cat)
4351
setContent {
Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
package com.google.firebase.quickstart.ai.feature.media.imagen
2+
3+
import android.Manifest
4+
import android.graphics.Bitmap
5+
import androidx.annotation.RequiresPermission
6+
import androidx.lifecycle.SavedStateHandle
7+
import androidx.lifecycle.ViewModel
8+
import androidx.lifecycle.viewModelScope
9+
import androidx.navigation.toRoute
10+
import com.google.firebase.Firebase
11+
import com.google.firebase.ai.FirebaseAI
12+
import com.google.firebase.ai.ImagenModel
13+
import com.google.firebase.ai.LiveGenerativeModel
14+
import com.google.firebase.ai.ai
15+
import com.google.firebase.ai.type.FunctionCallPart
16+
import com.google.firebase.ai.type.FunctionResponsePart
17+
import com.google.firebase.ai.type.GenerativeBackend
18+
import com.google.firebase.ai.type.ImagenAspectRatio
19+
import com.google.firebase.ai.type.ImagenImageFormat
20+
import com.google.firebase.ai.type.ImagenPersonFilterLevel
21+
import com.google.firebase.ai.type.ImagenSafetyFilterLevel
22+
import com.google.firebase.ai.type.ImagenSafetySettings
23+
import com.google.firebase.ai.type.InlineDataPart
24+
import com.google.firebase.ai.type.LiveServerContent
25+
import com.google.firebase.ai.type.LiveServerMessage
26+
import com.google.firebase.ai.type.LiveSession
27+
import com.google.firebase.ai.type.PublicPreviewAPI
28+
import com.google.firebase.ai.type.ResponseModality
29+
import com.google.firebase.ai.type.SpeechConfig
30+
import com.google.firebase.ai.type.TextPart
31+
import com.google.firebase.ai.type.Tool
32+
import com.google.firebase.ai.type.Voice
33+
import com.google.firebase.ai.type.asTextOrNull
34+
import com.google.firebase.ai.type.imagenGenerationConfig
35+
import com.google.firebase.ai.type.liveGenerationConfig
36+
import com.google.firebase.app
37+
import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES
38+
import com.google.firebase.quickstart.ai.feature.live.StreamRealtimeRoute
39+
import com.google.firebase.quickstart.ai.feature.text.functioncalling.WeatherRepository
40+
import com.google.firebase.quickstart.ai.feature.text.functioncalling.WeatherRepository.Companion.fetchWeather
41+
import kotlinx.coroutines.CoroutineScope
42+
import kotlinx.coroutines.Dispatchers
43+
import kotlinx.coroutines.flow.MutableStateFlow
44+
import kotlinx.coroutines.flow.StateFlow
45+
import kotlinx.coroutines.launch
46+
import kotlinx.coroutines.runBlocking
47+
import kotlinx.serialization.json.JsonObject
48+
import kotlinx.serialization.json.jsonPrimitive
49+
50+
@OptIn(PublicPreviewAPI::class)
51+
class BidiViewModel(
52+
savedStateHandle: SavedStateHandle
53+
) : ViewModel() {
54+
private val sampleId = savedStateHandle.toRoute<StreamRealtimeRoute>().sampleId
55+
private val sample = FIREBASE_AI_SAMPLES.first { it.id == sampleId }
56+
57+
// Firebase AI Logic
58+
private var liveSession: LiveSession
59+
60+
init {
61+
val liveGenerationConfig = liveGenerationConfig {
62+
speechConfig = SpeechConfig(voice = Voice("CHARON"))
63+
// Change this to ContentModality.TEXT if you want text output.
64+
responseModality = ResponseModality.AUDIO
65+
}
66+
@OptIn(PublicPreviewAPI::class)
67+
val liveModel = FirebaseAI.getInstance(Firebase.app, sample.backend).liveModel(
68+
"gemini-live-2.5-flash",
69+
generationConfig = liveGenerationConfig,
70+
tools = sample.tools
71+
)
72+
runBlocking {
73+
liveSession = liveModel.connect()
74+
}
75+
}
76+
77+
fun handler(fetchWeatherCall: FunctionCallPart) : FunctionResponsePart {
78+
val response:JsonObject
79+
fetchWeatherCall.let {
80+
val city = it.args["city"]?.jsonPrimitive?.content
81+
val state = it.args["state"]?.jsonPrimitive?.content
82+
val date = it.args["date"]?.jsonPrimitive?.content
83+
runBlocking {
84+
response = if(!city.isNullOrEmpty() and !state.isNullOrEmpty() and date.isNullOrEmpty()) {
85+
fetchWeather(city!!, state!!, date!!)
86+
} else {
87+
JsonObject(emptyMap())
88+
}
89+
}
90+
}
91+
return FunctionResponsePart("fetchWeather", response, fetchWeatherCall.id)
92+
}
93+
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
94+
suspend fun startConversation() {
95+
liveSession.startAudioConversation(::handler)
96+
}
97+
98+
fun endConversation() {
99+
liveSession.stopAudioConversation()
100+
}
101+
102+
103+
}
Lines changed: 131 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,146 @@
11
package com.google.firebase.quickstart.ai.feature.live
22

3-
import androidx.compose.foundation.layout.Box
3+
import android.Manifest
4+
import androidx.annotation.RequiresPermission
5+
import androidx.compose.animation.animateContentSize
6+
import androidx.compose.foundation.layout.Arrangement
7+
import androidx.compose.foundation.layout.Column
8+
import androidx.compose.foundation.layout.Spacer
49
import androidx.compose.foundation.layout.fillMaxSize
10+
import androidx.compose.foundation.layout.height
11+
import androidx.compose.foundation.layout.padding
12+
import androidx.compose.foundation.layout.size
13+
import androidx.compose.foundation.shape.CircleShape
14+
import androidx.compose.material.icons.Icons
15+
import androidx.compose.material.icons.filled.CallEnd
16+
import androidx.compose.material.icons.filled.Mic
17+
import androidx.compose.material3.Icon
18+
import androidx.compose.material3.IconButton
19+
import androidx.compose.material3.IconButtonDefaults
20+
import androidx.compose.material3.MaterialTheme
21+
import androidx.compose.material3.Surface
522
import androidx.compose.material3.Text
623
import androidx.compose.runtime.Composable
24+
import androidx.compose.ui.Alignment
725
import androidx.compose.ui.Modifier
26+
import androidx.compose.ui.draw.clip
27+
import androidx.compose.ui.graphics.Color
28+
import androidx.compose.ui.text.font.FontWeight
29+
import androidx.compose.ui.unit.dp
30+
import androidx.compose.ui.unit.sp
31+
import androidx.compose.runtime.remember
32+
import androidx.compose.runtime.mutableStateOf
33+
34+
import androidx.lifecycle.viewmodel.compose.viewModel
35+
import com.google.firebase.quickstart.ai.feature.media.imagen.BidiViewModel
36+
import com.google.firebase.quickstart.ai.feature.media.imagen.ImagenViewModel
37+
import kotlinx.coroutines.CoroutineScope
38+
import kotlinx.coroutines.Dispatchers
39+
import kotlinx.coroutines.launch
840
import kotlinx.serialization.Serializable
941

1042
@Serializable
1143
class StreamRealtimeRoute(val sampleId: String)
1244

45+
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
1346
@Composable
14-
fun StreamRealtimeScreen() {
15-
Box(
16-
modifier = Modifier.fillMaxSize()
47+
fun StreamRealtimeScreen(bidiView: BidiViewModel = viewModel<BidiViewModel>()) {
48+
val isConversationActive = remember { mutableStateOf(false) }
49+
val backgroundColor =
50+
MaterialTheme.colorScheme.background
51+
Surface(
52+
modifier = Modifier.fillMaxSize(),
53+
color = backgroundColor
1754
) {
18-
Text("Coming soon")
55+
Column(
56+
modifier = Modifier
57+
.fillMaxSize()
58+
.padding(16.dp),
59+
horizontalAlignment = Alignment.CenterHorizontally,
60+
verticalArrangement = Arrangement.Center
61+
) {
62+
// The content will animate its size when it changes
63+
Column(
64+
horizontalAlignment = Alignment.CenterHorizontally,
65+
modifier = Modifier.animateContentSize()
66+
) {
67+
if (isConversationActive.value) {
68+
// Active state UI
69+
Text(
70+
text = "Conversation Active",
71+
fontSize = 22.sp,
72+
fontWeight = FontWeight.Bold,
73+
color = MaterialTheme.colorScheme.onSurface
74+
)
75+
Spacer(modifier = Modifier.height(8.dp))
76+
Text(
77+
text = "Tap the end button to stop",
78+
fontSize = 18.sp,
79+
color = MaterialTheme.colorScheme.onSurfaceVariant
80+
)
81+
} else {
82+
// Idle state UI
83+
Text(
84+
text = "Start Conversation",
85+
fontSize = 22.sp,
86+
fontWeight = FontWeight.Bold,
87+
color = MaterialTheme.colorScheme.onSurface
88+
)
89+
Spacer(modifier = Modifier.height(8.dp))
90+
Text(
91+
text = "Tap the microphone to begin",
92+
fontSize = 18.sp,
93+
color = MaterialTheme.colorScheme.onSurfaceVariant
94+
)
95+
}
96+
}
97+
98+
Spacer(modifier = Modifier.height(80.dp))
99+
100+
// The main button with pulsing animation
101+
if (isConversationActive.value) {
102+
// Button to end the conversation
103+
IconButton(
104+
onClick = {
105+
bidiView.endConversation()
106+
isConversationActive.value = false },
107+
modifier = Modifier
108+
.size(90.dp)
109+
.clip(CircleShape),
110+
colors = IconButtonDefaults.iconButtonColors(
111+
containerColor = Color(0xFFE63946), // A nice red color
112+
contentColor = Color.White
113+
)
114+
) {
115+
Icon(
116+
imageVector = Icons.Default.CallEnd,
117+
contentDescription = "End Conversation",
118+
modifier = Modifier.size(48.dp)
119+
)
120+
}
121+
} else {
122+
// Button to start the conversation
123+
IconButton(
124+
onClick = {
125+
CoroutineScope(Dispatchers.IO).launch {
126+
bidiView.startConversation()
127+
}
128+
isConversationActive.value = true },
129+
modifier = Modifier
130+
.size(90.dp)
131+
.clip(CircleShape),
132+
colors = IconButtonDefaults.iconButtonColors(
133+
containerColor = MaterialTheme.colorScheme.primary,
134+
contentColor = Color.White
135+
)
136+
) {
137+
Icon(
138+
imageVector = Icons.Default.Mic,
139+
contentDescription = "Start Conversation",
140+
modifier = Modifier.size(48.dp)
141+
)
142+
}
143+
}
144+
}
19145
}
20146
}

firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/ui/navigation/Sample.kt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ enum class Category(
2121
AUDIO("Audio"),
2222
DOCUMENT("Document"),
2323
FUNCTION_CALLING("Function calling"),
24+
LIVE_API("LiveAPI Streaming")
2425
}
2526

2627
@OptIn(PublicPreviewAPI::class)

0 commit comments

Comments
 (0)