diff --git a/apps/android/app/src/main/java/ai/openclaw/app/AssistantLaunch.kt b/apps/android/app/src/main/java/ai/openclaw/app/AssistantLaunch.kt new file mode 100644 index 00000000000..59dd5c3ef57 --- /dev/null +++ b/apps/android/app/src/main/java/ai/openclaw/app/AssistantLaunch.kt @@ -0,0 +1,40 @@ +package ai.openclaw.app + +import android.content.Intent + +const val actionAskOpenClaw = "ai.openclaw.app.action.ASK_OPENCLAW" +const val extraAssistantPrompt = "prompt" + +enum class HomeDestination { + Connect, + Chat, + Voice, + Screen, + Settings, +} + +data class AssistantLaunchRequest( + val source: String, + val prompt: String?, +) + +fun parseAssistantLaunchIntent(intent: Intent?): AssistantLaunchRequest? { + val action = intent?.action ?: return null + return when (action) { + Intent.ACTION_ASSIST -> + AssistantLaunchRequest( + source = "assist", + prompt = null, + ) + + actionAskOpenClaw -> { + val prompt = intent.getStringExtra(extraAssistantPrompt)?.trim()?.ifEmpty { null } + AssistantLaunchRequest( + source = "app_action", + prompt = prompt, + ) + } + + else -> null + } +} diff --git a/apps/android/app/src/main/java/ai/openclaw/app/MainActivity.kt b/apps/android/app/src/main/java/ai/openclaw/app/MainActivity.kt index d9ad83175b4..e9fa6d61853 100644 --- a/apps/android/app/src/main/java/ai/openclaw/app/MainActivity.kt +++ b/apps/android/app/src/main/java/ai/openclaw/app/MainActivity.kt @@ -23,6 +23,7 @@ class MainActivity : ComponentActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) + handleAssistantIntent(intent) WindowCompat.setDecorFitsSystemWindows(window, false) permissionRequester = PermissionRequester(this) @@ -70,4 +71,15 @@ class MainActivity : ComponentActivity() { viewModel.setForeground(false) super.onStop() } + + override fun onNewIntent(intent: android.content.Intent) { + super.onNewIntent(intent) + setIntent(intent) + handleAssistantIntent(intent) + } + + private fun handleAssistantIntent(intent: android.content.Intent?) { + val request = parseAssistantLaunchIntent(intent) ?: return + viewModel.handleAssistantLaunch(request) + } } diff --git a/apps/android/app/src/main/java/ai/openclaw/app/MainViewModel.kt b/apps/android/app/src/main/java/ai/openclaw/app/MainViewModel.kt index 0e27b801a49..2fa4e2783c2 100644 --- a/apps/android/app/src/main/java/ai/openclaw/app/MainViewModel.kt +++ b/apps/android/app/src/main/java/ai/openclaw/app/MainViewModel.kt @@ -27,6 +27,10 @@ class MainViewModel(app: Application) : AndroidViewModel(app) { private val prefs = nodeApp.prefs private val runtimeRef = MutableStateFlow(null) private var foreground = true + private val _requestedHomeDestination = MutableStateFlow(null) + val requestedHomeDestination: StateFlow = _requestedHomeDestination + private val _chatDraft = MutableStateFlow(null) + val chatDraft: StateFlow = _chatDraft private fun ensureRuntime(): NodeRuntime { runtimeRef.value?.let { return it } @@ -246,6 +250,19 @@ class MainViewModel(app: Application) : AndroidViewModel(app) { ensureRuntime().setVoiceScreenActive(active) } + fun handleAssistantLaunch(request: AssistantLaunchRequest) { + _requestedHomeDestination.value = HomeDestination.Chat + _chatDraft.value = request.prompt + } + + fun clearRequestedHomeDestination() { + _requestedHomeDestination.value = null + } + + fun clearChatDraft() { + _chatDraft.value = null + } + fun setMicEnabled(enabled: Boolean) { ensureRuntime().setMicEnabled(enabled) } diff --git a/apps/android/app/src/main/java/ai/openclaw/app/ui/PostOnboardingTabs.kt b/apps/android/app/src/main/java/ai/openclaw/app/ui/PostOnboardingTabs.kt index 133252c6f8e..87c179ffe1b 100644 --- a/apps/android/app/src/main/java/ai/openclaw/app/ui/PostOnboardingTabs.kt +++ b/apps/android/app/src/main/java/ai/openclaw/app/ui/PostOnboardingTabs.kt @@ -46,6 +46,7 @@ import androidx.compose.ui.graphics.vector.ImageVector import androidx.compose.ui.platform.LocalDensity import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.unit.dp +import ai.openclaw.app.HomeDestination import ai.openclaw.app.MainViewModel private enum class HomeTab( @@ -72,6 +73,20 @@ fun PostOnboardingTabs(viewModel: MainViewModel, modifier: Modifier = Modifier) var activeTab by rememberSaveable { mutableStateOf(HomeTab.Connect) } var chatTabStarted by rememberSaveable { mutableStateOf(false) } var screenTabStarted by rememberSaveable { mutableStateOf(false) } + val requestedHomeDestination by viewModel.requestedHomeDestination.collectAsState() + + LaunchedEffect(requestedHomeDestination) { + val destination = requestedHomeDestination ?: return@LaunchedEffect + activeTab = + when (destination) { + HomeDestination.Connect -> HomeTab.Connect + HomeDestination.Chat -> HomeTab.Chat + HomeDestination.Voice -> HomeTab.Voice + HomeDestination.Screen -> HomeTab.Screen + HomeDestination.Settings -> HomeTab.Settings + } + viewModel.clearRequestedHomeDestination() + } // Stop TTS when user navigates away from voice tab, and lazily keep the Chat/Screen tabs // alive after the first visit so repeated tab switches do not rebuild their UI trees. diff --git a/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatComposer.kt b/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatComposer.kt index 1adcc34c2d6..b453c38d898 100644 --- a/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatComposer.kt +++ b/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatComposer.kt @@ -33,6 +33,7 @@ import androidx.compose.material3.OutlinedTextFieldDefaults import androidx.compose.material3.Surface import androidx.compose.material3.Text import androidx.compose.runtime.Composable +import androidx.compose.runtime.LaunchedEffect import androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.remember @@ -61,10 +62,12 @@ import ai.openclaw.app.ui.mobileTextTertiary @Composable fun ChatComposer( + draftText: String?, healthOk: Boolean, thinkingLevel: String, pendingRunCount: Int, attachments: List, + onDraftApplied: () -> Unit, onPickImages: () -> Unit, onRemoveAttachment: (id: String) -> Unit, onSetThinkingLevel: (level: String) -> Unit, @@ -73,8 +76,17 @@ fun ChatComposer( onSend: (text: String) -> Unit, ) { var input by rememberSaveable { mutableStateOf("") } + var lastAppliedDraft by rememberSaveable { mutableStateOf(null) } var showThinkingMenu by remember { mutableStateOf(false) } + LaunchedEffect(draftText) { + val draft = draftText?.trim()?.ifEmpty { null } ?: return@LaunchedEffect + if (draft == lastAppliedDraft) return@LaunchedEffect + input = draft + lastAppliedDraft = draft + onDraftApplied() + } + val canSend = pendingRunCount == 0 && (input.trim().isNotEmpty() || attachments.isNotEmpty()) && healthOk val sendBusy = pendingRunCount > 0 diff --git a/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatSheetContent.kt b/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatSheetContent.kt index 491d07dd98e..9367de647e8 100644 --- a/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatSheetContent.kt +++ b/apps/android/app/src/main/java/ai/openclaw/app/ui/chat/ChatSheetContent.kt @@ -60,6 +60,7 @@ fun ChatSheetContent(viewModel: MainViewModel) { val streamingAssistantText by viewModel.chatStreamingAssistantText.collectAsState() val pendingToolCalls by viewModel.chatPendingToolCalls.collectAsState() val sessions by viewModel.chatSessions.collectAsState() + val chatDraft by viewModel.chatDraft.collectAsState() LaunchedEffect(Unit) { viewModel.loadChat(mainSessionKey) @@ -118,10 +119,12 @@ fun ChatSheetContent(viewModel: MainViewModel) { Row(modifier = Modifier.fillMaxWidth().imePadding()) { ChatComposer( + draftText = chatDraft, healthOk = healthOk, thinkingLevel = thinkingLevel, pendingRunCount = pendingRunCount, attachments = attachments, + onDraftApplied = viewModel::clearChatDraft, onPickImages = { pickImages.launch("image/*") }, onRemoveAttachment = { id -> attachments.removeAll { it.id == id } }, onSetThinkingLevel = { level -> viewModel.setChatThinkingLevel(level) },