├── .gitignore ├── .google └── packaging.yaml ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── app ├── build.gradle └── src │ └── main │ ├── AndroidManifest.xml │ ├── java │ └── com │ │ └── example │ │ └── androidthings │ │ └── assistant │ │ ├── AssistantActivity.java │ │ ├── BoardDefaults.java │ │ ├── Credentials.java │ │ └── EmbeddedAssistant.java │ └── res │ ├── layout │ └── activity_main.xml │ └── values │ └── strings.xml ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── grpc ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ └── main │ ├── AndroidManifest.xml │ └── proto │ └── google │ ├── api │ ├── annotations.proto │ └── http.proto │ ├── assistant │ └── embedded │ │ ├── README.md │ │ └── v1alpha2 │ │ └── embedded_assistant.proto │ ├── protobuf │ ├── any.proto │ └── descriptor.proto │ ├── rpc │ └── status.proto │ └── type │ └── latlng.proto └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | ### Android ### 2 | # Built application files 3 | *.apk 4 | *.ap_ 5 | 6 | # Files for the ART/Dalvik VM 7 | *.dex 8 | 9 | # Java class files 10 | *.class 11 | 12 | # Generated files 13 | bin/ 14 | gen/ 15 | out/ 16 | 17 | # Gradle files 18 | .gradle/ 19 | build/ 20 | 21 | # Local configuration file (sdk path, etc) 22 | local.properties 23 | gradle.properties 24 | 25 | # Proguard folder generated by Eclipse 26 | proguard/ 27 | 28 | # Log Files 29 | *.log 30 | 31 | # Android Studio Navigation editor temp files 32 | .navigation/ 33 | 34 | # Android Studio captures folder 35 | captures/ 36 | 37 | # Intellij 38 | *.iml 39 | *.iws 40 | .idea/ 41 | 42 | # External native build folder generated in Android Studio 2.2 and later 43 | .externalNativeBuild 44 | 45 | .DS_Store 46 | 47 | app/src/main/res/raw/credentials.json 48 | -------------------------------------------------------------------------------- /.google/packaging.yaml: -------------------------------------------------------------------------------- 1 | status: PUBLISHED 2 | technologies: [Android, Android Things] 3 | categories: [Android Things] 4 | languages: [Java] 5 | solutions: [IoT] 6 | github: androidthings/sample-googleassistant 7 | license: apache2 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to become a contributor and submit your own code 2 | 3 | ## Contributor License Agreements 4 | 5 | We'd love to accept your sample apps and patches! Before we can take them, we 6 | have to jump a couple of legal hurdles. 7 | 8 | Please fill out either the individual or corporate Contributor License Agreement (CLA). 9 | 10 | * If you are an individual writing original source code and you're sure you 11 | own the intellectual property, then you'll need to sign an 12 | [individual CLA](https://developers.google.com/open-source/cla/individual). 13 | * If you work for a company that wants to allow you to contribute your work, 14 | then you'll need to sign a 15 | [corporate CLA](https://developers.google.com/open-source/cla/corporate). 16 | 17 | Follow either of the two links above to access the appropriate CLA and 18 | instructions for how to sign and return it. Once we receive it, we'll be able to 19 | accept your pull requests. 20 | 21 | ## Contributing A Patch 22 | 23 | 1. Submit an issue describing your proposed change to the repo in question. 24 | 1. The repo owner will respond to your issue promptly. 25 | 1. If your proposed change is accepted, and you haven't already done so, sign a 26 | Contributor License Agreement (see details above). 27 | 1. Fork the desired repo, develop and test your code changes. 28 | 1. Ensure that your code adheres to the existing style in the sample to which 29 | you are contributing. Refer to the 30 | [Android Code Style Guide](https://source.android.com/source/code-style.html) 31 | for the recommended coding standards for this organization. 32 | 1. Ensure that your code has an appropriate set of unit tests which all pass. 33 | 1. Submit a pull request. 34 | 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2014 The Android Open Source Project 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Google Assistant SDK for devices - Android Things 2 | 3 | This sample shows how to call the [Google Assistant Service](https://developers.google.com/assistant/sdk/guides/service/python/) 4 | from Android Things using gRPC. It records a spoken request from the 5 | connected microphones, sends it to the Google Assistant API and plays 6 | back the Assistant's spoken response on the connected speaker. 7 | 8 | > **Note:** The Android Things Console will be turned down for non-commercial 9 | > use on January 5, 2022. For more details, see the 10 | > [FAQ page](https://developer.android.com/things/faq). 11 | 12 | ## Pre-requisites 13 | 14 | - Android Studio 2.2+. 15 | - Android Things compatible board. 16 | - [AIY Projects Voice Kit][voice-kit] or supported [microphone][mic] and [speaker][speaker] (See [audio configuration](#audio-configuration)). 17 | - [Google API Console Project][console]. 18 | 19 | ## Run the sample 20 | 21 | 1. Create or open a project in the [Actions Console](http://console.actions.google.com) 22 | 1. Follow the instructions to [register a device model](https://developers.google.com/assistant/sdk/guides/service/python/embed/register-device) 23 | 1. Download `client_secret_XXXX.json` 24 | 1. Configure the [OAuth consent screen](https://console.developers.google.com/apis/credentials/consent) for your project 25 | 1. Install the [`google-oauthlib-tool`](https://github.com/GoogleCloudPlatform/google-auth-library-python-oauthlib) in a [Python 3](https://www.python.org/downloads/) virtual environment: 26 | 27 | ``` 28 | python3 -m venv env 29 | env/bin/python -m pip install --upgrade pip setuptools 30 | env/bin/pip install --upgrade google-auth-oauthlib[tool] 31 | source env/bin/activate 32 | ``` 33 | 34 | - Use the [`google-oauthlib-tool`][google-oauthlib-tool] to generate user credentials: 35 | 36 | ```bash 37 | google-oauthlib-tool --client-secrets client_secret_XXXX.json \ 38 | --credentials app/src/main/res/raw/credentials.json \ 39 | --scope https://www.googleapis.com/auth/assistant-sdk-prototype \ 40 | --save 41 | ``` 42 | - Make sure to set the [Activity Controls][set-activity-controls] for the Google Account using the application. 43 | - On the first install, grant the sample required permissions for audio and internet access: 44 | 45 | ```bash 46 | ./gradlew assembleDebug 47 | adb install -g app/build/outputs/apk/debug/app-debug.apk 48 | ``` 49 | 50 | - On Android Studio, click on the "Run" button or on the command line, type: 51 | 52 | ```bash 53 | ./gradlew installDebug 54 | adb shell am start com.example.androidthings.assistant/.AssistantActivity 55 | ``` 56 | - Try the assistant demo: 57 | 58 | - Press the button: recording starts. 59 | - Ask a question in the microphone. After your question is finished, recording will end. 60 | - The Google Assistant answer should playback on the speaker. 61 | 62 | ## Audio Configuration 63 | 64 | By default the sample routes audio to the I2S Voice Hat on Raspberry Pi 3 and default audio on other boards (on-board Line out or HDMI/USB if connected). 65 | 66 | You can change those mappings by changing the `USE_VOICEHAT_I2S_DAC` 67 | constant or replacing the audio configuration in the `onCreate` method of [AssistantActivity](https://github.com/androidthings/sample-googleassistant/blob/master/app/src/main/java/com/example/androidthings/assistant/AssistantActivity.java) with one of the following: 68 | 69 | ```Java 70 | // Force using on-board Line out: 71 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_BUILTIN_MIC); 72 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_BUILTIN_SPEAKER); 73 | 74 | // Force using USB: 75 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_USB_DEVICE); 76 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_USB_DEVICE); 77 | 78 | // Force using I2S: 79 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_BUS); 80 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_BUS); 81 | ``` 82 | 83 | ## Device Actions 84 | With Device Actions, you can control hardware connected to your device. 85 | In this sample, you can turn on and off the LED attached to your Android 86 | Things board. 87 | 88 | Follow the guide [here](https://developers.google.com/assistant/sdk/guides/service/python/embed/register-device) 89 | to learn how to register your device. 90 | 91 | - After you register your device model and id, replace the device model and instance 92 | `PLACEHOLDER` values in `AssistantActivity`: 93 | 94 | ```Java 95 | private static final String DEVICE_MODEL_ID = "my-device-model-id"; 96 | private static final String DEVICE_INSTANCE_ID = "my-device-instance-id"; 97 | ``` 98 | 99 | - Handle a Device Actions response if you get one. 100 | 101 | ```Java 102 | mEmbeddedAssistant = new EmbeddedAssistant.Builder() 103 | ... 104 | .setConversationCallback(new ConversationCallback() { 105 | ... 106 | @Override 107 | public void onDeviceAction(String intentName, JSONObject parameters) { 108 | // Check the type of command 109 | if (intentName.equals("action.devices.commands.OnOff")) { 110 | try { 111 | boolean turnOn = parameters.getBoolean("on"); 112 | mLed.setValue(turnOn); 113 | } catch (JSONException e) { 114 | Log.e(TAG, "Cannot get value of command", e); 115 | } catch (IOException e) { 116 | Log.e(TAG, "Cannot set value of LED", e); 117 | } 118 | } 119 | } 120 | } 121 | ... 122 | ... 123 | ``` 124 | 125 | Try it: 126 | 127 | - "Turn on" 128 | - "Turn off" 129 | 130 | The LED should change states based on your command. 131 | 132 | ## Enable auto-launch behavior 133 | 134 | This sample app is currently configured to launch only when deployed from your 135 | development machine. To enable the main activity to launch automatically on boot, 136 | add the following `intent-filter` to the app's manifest file: 137 | 138 | ```xml 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | ``` 149 | 150 | ## License 151 | 152 | Copyright 2017 The Android Open Source Project, Inc. 153 | 154 | Licensed to the Apache Software Foundation (ASF) under one or more contributor 155 | license agreements. See the NOTICE file distributed with this work for 156 | additional information regarding copyright ownership. The ASF licenses this 157 | file to you under the Apache License, Version 2.0 (the "License"); you may not 158 | use this file except in compliance with the License. You may obtain a copy of 159 | the License at 160 | 161 | http://www.apache.org/licenses/LICENSE-2.0 162 | 163 | Unless required by applicable law or agreed to in writing, software 164 | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 165 | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 166 | License for the specific language governing permissions and limitations under 167 | the License. 168 | 169 | [voice-kit]: https://aiyprojects.withgoogle.com/voice/ 170 | [console]: https://console.developers.google.com 171 | [google-assistant-api-config]: https://developers.google.com/assistant/sdk/prototype/getting-started-other-platforms/config-dev-project-and-account 172 | [console-credentials]: https://console.developers.google.com/apis/credentials 173 | [google-oauthlib-tool]: https://github.com/GoogleCloudPlatform/google-auth-library-python-oauthlib 174 | [dev-preview-download]: https://partner.android.com/things/console/ 175 | [set-activity-controls]: https://developers.google.com/assistant/sdk/prototype/getting-started-other-platforms/config-dev-project-and-account#set-activity-controls 176 | [mic]: https://www.adafruit.com/product/3367 177 | [speaker]: https://www.adafruit.com/product/3369 178 | [python3]: https://www.python.org/downloads/ 179 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017, The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | apply plugin: 'com.android.application' 18 | 19 | android { 20 | compileSdkVersion 27 21 | 22 | defaultConfig { 23 | applicationId "com.example.androidthings.assistant" 24 | minSdkVersion 27 25 | targetSdkVersion 27 26 | versionCode 1 27 | versionName "1.0" 28 | } 29 | buildTypes { 30 | release { 31 | minifyEnabled false 32 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 33 | } 34 | } 35 | } 36 | 37 | dependencies { 38 | implementation project(':grpc') 39 | compileOnly 'com.google.android.things:androidthings:1.0' 40 | 41 | implementation('com.google.auth:google-auth-library-oauth2-http:0.6.0') { 42 | exclude group: 'org.apache.httpcomponents', module: 'httpclient' 43 | } 44 | 45 | implementation 'com.google.android.things.contrib:driver-voicehat:1.0' 46 | implementation 'com.android.support:support-annotations:27.1.0' 47 | } 48 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 19 | 20 | 21 | 22 | 23 | 24 | 26 | 27 | 29 | 30 | 31 | 32 | 33 | 34 | 38 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/androidthings/assistant/AssistantActivity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017, The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.androidthings.assistant; 18 | 19 | import android.app.Activity; 20 | import android.content.Context; 21 | import android.content.SharedPreferences; 22 | import android.content.SharedPreferences.Editor; 23 | import android.media.AudioDeviceInfo; 24 | import android.media.AudioManager; 25 | import android.os.Build; 26 | import android.os.Bundle; 27 | import android.os.Handler; 28 | import android.preference.PreferenceManager; 29 | import android.util.Base64; 30 | import android.util.Log; 31 | import android.view.View; 32 | import android.view.View.OnClickListener; 33 | import android.webkit.WebView; 34 | import android.widget.ArrayAdapter; 35 | import android.widget.CheckBox; 36 | import android.widget.CompoundButton; 37 | import android.widget.CompoundButton.OnCheckedChangeListener; 38 | import android.widget.ListView; 39 | import com.example.androidthings.assistant.EmbeddedAssistant.ConversationCallback; 40 | import com.example.androidthings.assistant.EmbeddedAssistant.RequestCallback; 41 | import com.google.android.things.contrib.driver.button.Button; 42 | import com.google.android.things.contrib.driver.voicehat.Max98357A; 43 | import com.google.android.things.contrib.driver.voicehat.VoiceHat; 44 | import com.google.android.things.pio.Gpio; 45 | import com.google.android.things.pio.PeripheralManager; 46 | import com.google.assistant.embedded.v1alpha2.SpeechRecognitionResult; 47 | import com.google.auth.oauth2.UserCredentials; 48 | import java.io.IOException; 49 | import java.io.UnsupportedEncodingException; 50 | import java.util.ArrayList; 51 | import java.util.List; 52 | import org.json.JSONException; 53 | import org.json.JSONObject; 54 | 55 | public class AssistantActivity extends Activity implements Button.OnButtonEventListener { 56 | private static final String TAG = AssistantActivity.class.getSimpleName(); 57 | 58 | // Peripheral and drivers constants. 59 | private static final int BUTTON_DEBOUNCE_DELAY_MS = 20; 60 | // Default on using the Voice Hat on Raspberry Pi 3. 61 | private static final boolean USE_VOICEHAT_I2S_DAC = Build.DEVICE.equals(BoardDefaults.DEVICE_RPI3); 62 | 63 | // Audio constants. 64 | private static final String PREF_CURRENT_VOLUME = "current_volume"; 65 | private static final int SAMPLE_RATE = 16000; 66 | private static final int DEFAULT_VOLUME = 100; 67 | 68 | // Assistant SDK constants. 69 | private static final String DEVICE_MODEL_ID = "PLACEHOLDER"; 70 | private static final String DEVICE_INSTANCE_ID = "PLACEHOLDER"; 71 | private static final String LANGUAGE_CODE = "en-US"; 72 | 73 | // Hardware peripherals. 74 | private Button mButton; 75 | private android.widget.Button mButtonWidget; 76 | private Gpio mLed; 77 | private Max98357A mDac; 78 | 79 | private Handler mMainHandler; 80 | 81 | // List & adapter to store and display the history of Assistant Requests. 82 | private EmbeddedAssistant mEmbeddedAssistant; 83 | private ArrayList mAssistantRequests = new ArrayList<>(); 84 | private ArrayAdapter mAssistantRequestsAdapter; 85 | private CheckBox mHtmlOutputCheckbox; 86 | private WebView mWebView; 87 | 88 | @Override 89 | protected void onCreate(Bundle savedInstanceState) { 90 | super.onCreate(savedInstanceState); 91 | Log.i(TAG, "starting assistant demo"); 92 | 93 | setContentView(R.layout.activity_main); 94 | 95 | final ListView assistantRequestsListView = findViewById(R.id.assistantRequestsListView); 96 | mAssistantRequestsAdapter = 97 | new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, 98 | mAssistantRequests); 99 | assistantRequestsListView.setAdapter(mAssistantRequestsAdapter); 100 | mHtmlOutputCheckbox = findViewById(R.id.htmlOutput); 101 | mHtmlOutputCheckbox.setOnCheckedChangeListener(new OnCheckedChangeListener() { 102 | @Override 103 | public void onCheckedChanged(CompoundButton compoundButton, boolean useHtml) { 104 | mWebView.setVisibility(useHtml ? View.VISIBLE : View.GONE); 105 | assistantRequestsListView.setVisibility(useHtml ? View.GONE : View.VISIBLE); 106 | mEmbeddedAssistant.setResponseFormat(useHtml 107 | ? EmbeddedAssistant.HTML : EmbeddedAssistant.TEXT); 108 | } 109 | }); 110 | mWebView = findViewById(R.id.webview); 111 | mWebView.getSettings().setJavaScriptEnabled(true); 112 | 113 | mMainHandler = new Handler(getMainLooper()); 114 | mButtonWidget = findViewById(R.id.assistantQueryButton); 115 | mButtonWidget.setOnClickListener(new OnClickListener() { 116 | @Override 117 | public void onClick(View view) { 118 | mEmbeddedAssistant.startConversation(); 119 | } 120 | }); 121 | 122 | 123 | // Audio routing configuration: use default routing. 124 | AudioDeviceInfo audioInputDevice = null; 125 | AudioDeviceInfo audioOutputDevice = null; 126 | if (USE_VOICEHAT_I2S_DAC) { 127 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_BUS); 128 | if (audioInputDevice == null) { 129 | Log.e(TAG, "failed to find I2S audio input device, using default"); 130 | } 131 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_BUS); 132 | if (audioOutputDevice == null) { 133 | Log.e(TAG, "failed to found I2S audio output device, using default"); 134 | } 135 | } 136 | 137 | try { 138 | if (USE_VOICEHAT_I2S_DAC) { 139 | Log.i(TAG, "initializing DAC trigger"); 140 | mDac = VoiceHat.openDac(); 141 | mDac.setSdMode(Max98357A.SD_MODE_SHUTDOWN); 142 | 143 | mButton = VoiceHat.openButton(); 144 | mLed = VoiceHat.openLed(); 145 | } else { 146 | PeripheralManager pioManager = PeripheralManager.getInstance(); 147 | mButton = new Button(BoardDefaults.getGPIOForButton(), 148 | Button.LogicState.PRESSED_WHEN_LOW); 149 | mLed = pioManager.openGpio(BoardDefaults.getGPIOForLED()); 150 | } 151 | 152 | mButton.setDebounceDelay(BUTTON_DEBOUNCE_DELAY_MS); 153 | mButton.setOnButtonEventListener(this); 154 | 155 | mLed.setDirection(Gpio.DIRECTION_OUT_INITIALLY_LOW); 156 | mLed.setActiveType(Gpio.ACTIVE_HIGH); 157 | } catch (IOException e) { 158 | Log.e(TAG, "error configuring peripherals:", e); 159 | return; 160 | } 161 | 162 | // Set volume from preferences 163 | SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(this); 164 | int initVolume = preferences.getInt(PREF_CURRENT_VOLUME, DEFAULT_VOLUME); 165 | Log.i(TAG, "setting audio track volume to: " + initVolume); 166 | 167 | UserCredentials userCredentials = null; 168 | try { 169 | userCredentials = 170 | EmbeddedAssistant.generateCredentials(this, R.raw.credentials); 171 | } catch (IOException | JSONException e) { 172 | Log.e(TAG, "error getting user credentials", e); 173 | } 174 | mEmbeddedAssistant = new EmbeddedAssistant.Builder() 175 | .setCredentials(userCredentials) 176 | .setDeviceInstanceId(DEVICE_INSTANCE_ID) 177 | .setDeviceModelId(DEVICE_MODEL_ID) 178 | .setLanguageCode(LANGUAGE_CODE) 179 | .setAudioInputDevice(audioInputDevice) 180 | .setAudioOutputDevice(audioOutputDevice) 181 | .setAudioSampleRate(SAMPLE_RATE) 182 | .setAudioVolume(initVolume) 183 | .setRequestCallback(new RequestCallback() { 184 | @Override 185 | public void onRequestStart() { 186 | Log.i(TAG, "starting assistant request, enable microphones"); 187 | mButtonWidget.setText(R.string.button_listening); 188 | mButtonWidget.setEnabled(false); 189 | } 190 | 191 | @Override 192 | public void onSpeechRecognition(List results) { 193 | for (final SpeechRecognitionResult result : results) { 194 | Log.i(TAG, "assistant request text: " + result.getTranscript() + 195 | " stability: " + Float.toString(result.getStability())); 196 | mAssistantRequestsAdapter.add(result.getTranscript()); 197 | } 198 | } 199 | }) 200 | .setConversationCallback(new ConversationCallback() { 201 | @Override 202 | public void onResponseStarted() { 203 | super.onResponseStarted(); 204 | // When bus type is switched, the AudioManager needs to reset the stream volume 205 | if (mDac != null) { 206 | try { 207 | mDac.setSdMode(Max98357A.SD_MODE_LEFT); 208 | } catch (IOException e) { 209 | Log.e(TAG, "error enabling DAC", e); 210 | } 211 | } 212 | } 213 | 214 | @Override 215 | public void onResponseFinished() { 216 | super.onResponseFinished(); 217 | if (mDac != null) { 218 | try { 219 | mDac.setSdMode(Max98357A.SD_MODE_SHUTDOWN); 220 | } catch (IOException e) { 221 | Log.e(TAG, "error disabling DAC", e); 222 | } 223 | } 224 | if (mLed != null) { 225 | try { 226 | mLed.setValue(false); 227 | } catch (IOException e) { 228 | Log.e(TAG, "cannot turn off LED", e); 229 | } 230 | } 231 | } 232 | 233 | @Override 234 | public void onError(Throwable throwable) { 235 | Log.e(TAG, "assist error: " + throwable.getMessage(), throwable); 236 | } 237 | 238 | @Override 239 | public void onVolumeChanged(int percentage) { 240 | Log.i(TAG, "assistant volume changed: " + percentage); 241 | // Update our shared preferences 242 | Editor editor = PreferenceManager 243 | .getDefaultSharedPreferences(AssistantActivity.this) 244 | .edit(); 245 | editor.putInt(PREF_CURRENT_VOLUME, percentage); 246 | editor.apply(); 247 | } 248 | 249 | @Override 250 | public void onConversationFinished() { 251 | Log.i(TAG, "assistant conversation finished"); 252 | mButtonWidget.setText(R.string.button_new_request); 253 | mButtonWidget.setEnabled(true); 254 | } 255 | 256 | @Override 257 | public void onAssistantResponse(final String response) { 258 | if(!response.isEmpty()) { 259 | mMainHandler.post(new Runnable() { 260 | @Override 261 | public void run() { 262 | mAssistantRequestsAdapter.add("Google Assistant: " + response); 263 | } 264 | }); 265 | } 266 | } 267 | 268 | @Override 269 | public void onAssistantDisplayOut(final String html) { 270 | mMainHandler.post(new Runnable() { 271 | @Override 272 | public void run() { 273 | // Need to convert to base64 274 | try { 275 | final byte[] data = html.getBytes("UTF-8"); 276 | final String base64String = 277 | Base64.encodeToString(data, Base64.DEFAULT); 278 | mWebView.loadData(base64String, "text/html; charset=utf-8", 279 | "base64"); 280 | } catch (UnsupportedEncodingException e) { 281 | e.printStackTrace(); 282 | } 283 | } 284 | }); 285 | } 286 | 287 | public void onDeviceAction(String intentName, JSONObject parameters) { 288 | if (parameters != null) { 289 | Log.d(TAG, "Get device action " + intentName + " with parameters: " + 290 | parameters.toString()); 291 | } else { 292 | Log.d(TAG, "Get device action " + intentName + " with no paramete" 293 | + "rs"); 294 | } 295 | if (intentName.equals("action.devices.commands.OnOff")) { 296 | try { 297 | boolean turnOn = parameters.getBoolean("on"); 298 | mLed.setValue(turnOn); 299 | } catch (JSONException e) { 300 | Log.e(TAG, "Cannot get value of command", e); 301 | } catch (IOException e) { 302 | Log.e(TAG, "Cannot set value of LED", e); 303 | } 304 | } 305 | } 306 | }) 307 | .build(); 308 | mEmbeddedAssistant.connect(); 309 | } 310 | 311 | private AudioDeviceInfo findAudioDevice(int deviceFlag, int deviceType) { 312 | AudioManager manager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); 313 | AudioDeviceInfo[] adis = manager.getDevices(deviceFlag); 314 | for (AudioDeviceInfo adi : adis) { 315 | if (adi.getType() == deviceType) { 316 | return adi; 317 | } 318 | } 319 | return null; 320 | } 321 | 322 | @Override 323 | public void onButtonEvent(Button button, boolean pressed) { 324 | try { 325 | if (mLed != null) { 326 | mLed.setValue(pressed); 327 | } 328 | } catch (IOException e) { 329 | Log.d(TAG, "error toggling LED:", e); 330 | } 331 | if (pressed) { 332 | mEmbeddedAssistant.startConversation(); 333 | } 334 | } 335 | 336 | @Override 337 | protected void onDestroy() { 338 | super.onDestroy(); 339 | Log.i(TAG, "destroying assistant demo"); 340 | if (mLed != null) { 341 | try { 342 | mLed.close(); 343 | } catch (IOException e) { 344 | Log.w(TAG, "error closing LED", e); 345 | } 346 | mLed = null; 347 | } 348 | if (mButton != null) { 349 | try { 350 | mButton.close(); 351 | } catch (IOException e) { 352 | Log.w(TAG, "error closing button", e); 353 | } 354 | mButton = null; 355 | } 356 | if (mDac != null) { 357 | try { 358 | mDac.close(); 359 | } catch (IOException e) { 360 | Log.w(TAG, "error closing voice hat trigger", e); 361 | } 362 | mDac = null; 363 | } 364 | mEmbeddedAssistant.destroy(); 365 | } 366 | } 367 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/androidthings/assistant/BoardDefaults.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016, The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.androidthings.assistant; 18 | 19 | import android.media.AudioDeviceInfo; 20 | import android.os.Build; 21 | 22 | @SuppressWarnings("WeakerAccess") 23 | public class BoardDefaults { 24 | public static final String DEVICE_RPI3 = "rpi3"; 25 | public static final String DEVICE_RPI3BP = "rpi3bp"; 26 | public static final String DEVICE_IMX7D_PICO = "imx7d_pico"; 27 | 28 | /** 29 | * Return the GPIO pin that the LED is connected on. 30 | * For example, on Intel Edison Arduino breakout, pin "IO13" is connected to an onboard LED 31 | * that turns on when the GPIO pin is HIGH, and off when low. 32 | */ 33 | public static String getGPIOForLED() { 34 | switch (Build.DEVICE) { 35 | case DEVICE_RPI3: 36 | case DEVICE_RPI3BP: 37 | return "BCM25"; 38 | case DEVICE_IMX7D_PICO: 39 | return "GPIO2_IO02"; 40 | default: 41 | throw new IllegalStateException("Unknown Build.DEVICE " + Build.DEVICE); 42 | } 43 | } 44 | 45 | /** 46 | * Return the GPIO pin that the Button is connected on. 47 | */ 48 | public static String getGPIOForButton() { 49 | switch (Build.DEVICE) { 50 | case DEVICE_RPI3: 51 | case DEVICE_RPI3BP: 52 | return "BCM23"; 53 | case DEVICE_IMX7D_PICO: 54 | return "GPIO6_IO14"; 55 | default: 56 | throw new IllegalStateException("Unknown Build.DEVICE " + Build.DEVICE); 57 | } 58 | } 59 | 60 | /** 61 | * Return the GPIO pin for the Voice Hat DAC trigger. 62 | */ 63 | public static String getGPIOForDacTrigger() { 64 | switch (Build.DEVICE) { 65 | case DEVICE_RPI3: 66 | case DEVICE_RPI3BP: 67 | return "BCM16"; 68 | default: 69 | throw new IllegalStateException("Unknown Build.DEVICE " + Build.DEVICE); 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/androidthings/assistant/Credentials.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017, The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.androidthings.assistant; 18 | 19 | import android.content.Context; 20 | 21 | import com.google.auth.oauth2.UserCredentials; 22 | 23 | import org.json.JSONException; 24 | import org.json.JSONObject; 25 | 26 | import java.io.IOException; 27 | import java.io.InputStream; 28 | 29 | class Credentials { 30 | static UserCredentials fromResource(Context context, int resourceId) 31 | throws IOException, JSONException { 32 | InputStream is = context.getResources().openRawResource(resourceId); 33 | byte[] bytes = new byte[is.available()]; 34 | is.read(bytes); 35 | JSONObject json = new JSONObject(new String(bytes, "UTF-8")); 36 | return new UserCredentials( 37 | json.getString("client_id"), 38 | json.getString("client_secret"), 39 | json.getString("refresh_token") 40 | ); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/androidthings/assistant/EmbeddedAssistant.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017, The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.androidthings.assistant; 18 | 19 | import android.content.Context; 20 | import android.media.AudioDeviceInfo; 21 | import android.media.AudioFormat; 22 | import android.media.AudioRecord; 23 | import android.media.AudioTrack; 24 | import android.media.MediaRecorder.AudioSource; 25 | import android.os.Handler; 26 | import android.os.HandlerThread; 27 | import android.support.annotation.IntDef; 28 | import android.support.annotation.Nullable; 29 | import android.util.Log; 30 | import com.google.assistant.embedded.v1alpha2.AssistConfig; 31 | import com.google.assistant.embedded.v1alpha2.AssistRequest; 32 | import com.google.assistant.embedded.v1alpha2.AssistResponse; 33 | import com.google.assistant.embedded.v1alpha2.AssistResponse.EventType; 34 | import com.google.assistant.embedded.v1alpha2.AudioInConfig; 35 | import com.google.assistant.embedded.v1alpha2.AudioOutConfig; 36 | import com.google.assistant.embedded.v1alpha2.DeviceConfig; 37 | import com.google.assistant.embedded.v1alpha2.DeviceLocation; 38 | import com.google.assistant.embedded.v1alpha2.DialogStateIn; 39 | import com.google.assistant.embedded.v1alpha2.DialogStateOut.MicrophoneMode; 40 | import com.google.assistant.embedded.v1alpha2.EmbeddedAssistantGrpc; 41 | import com.google.assistant.embedded.v1alpha2.ScreenOutConfig; 42 | import com.google.assistant.embedded.v1alpha2.SpeechRecognitionResult; 43 | import com.google.auth.oauth2.UserCredentials; 44 | import com.google.protobuf.ByteString; 45 | import io.grpc.ManagedChannel; 46 | import io.grpc.ManagedChannelBuilder; 47 | import io.grpc.auth.MoreCallCredentials; 48 | import io.grpc.stub.StreamObserver; 49 | import java.io.IOException; 50 | import java.lang.annotation.Retention; 51 | import java.lang.annotation.RetentionPolicy; 52 | import java.nio.ByteBuffer; 53 | import java.util.ArrayList; 54 | import java.util.List; 55 | import org.json.JSONArray; 56 | import org.json.JSONException; 57 | import org.json.JSONObject; 58 | 59 | public class EmbeddedAssistant { 60 | private static final String TAG = EmbeddedAssistant.class.getSimpleName(); 61 | private static final boolean DEBUG = false; 62 | 63 | private static final String ASSISTANT_API_ENDPOINT = "embeddedassistant.googleapis.com"; 64 | private static final int AUDIO_RECORD_BLOCK_SIZE = 1024; 65 | 66 | // Device Actions 67 | private DeviceConfig mDeviceConfig; 68 | 69 | // Callbacks 70 | private Handler mRequestHandler; 71 | private RequestCallback mRequestCallback; 72 | private Handler mConversationHandler; 73 | private ConversationCallback mConversationCallback; 74 | 75 | // Assistant Thread and Runnables implementing the push-to-talk functionality. 76 | private ByteString mConversationState; 77 | private String mLanguageCode = "en-US"; 78 | private AudioRecord mAudioRecord; 79 | private DeviceLocation mDeviceLocation; 80 | private AudioInConfig mAudioInConfig; 81 | private AudioOutConfig mAudioOutConfig; 82 | private AudioDeviceInfo mAudioInputDevice; 83 | private AudioDeviceInfo mAudioOutputDevice; 84 | private AudioFormat mAudioInputFormat; 85 | private AudioFormat mAudioOutputFormat; 86 | private int mAudioInputBufferSize; 87 | private int mAudioOutputBufferSize; 88 | private int mVolume = 100; // Default to maximum volume. 89 | private ScreenOutConfig mScreenOutConfig; 90 | 91 | private MicrophoneMode mMicrophoneMode; 92 | private HandlerThread mAssistantThread; 93 | private Handler mAssistantHandler; 94 | private ArrayList mAssistantResponses = new ArrayList<>(); 95 | 96 | // gRPC client and stream observers. 97 | private int mAudioOutSize; // Tracks the size of audio responses to determine when it ends. 98 | private EmbeddedAssistantGrpc.EmbeddedAssistantStub mAssistantService; 99 | private StreamObserver mAssistantRequestObserver; 100 | private StreamObserver mAssistantResponseObserver = 101 | new StreamObserver() { 102 | @Override 103 | public void onNext(final AssistResponse value) { 104 | if (DEBUG) { 105 | Log.d(TAG, "Received response: " + value.toString()); 106 | } 107 | if (value.getDeviceAction() != null && 108 | !value.getDeviceAction().getDeviceRequestJson().isEmpty()) { 109 | // Iterate through JSON object 110 | try { 111 | JSONObject deviceAction = new JSONObject(value.getDeviceAction() 112 | .getDeviceRequestJson()); 113 | JSONArray inputs = deviceAction.getJSONArray("inputs"); 114 | for (int i = 0; i < inputs.length(); i++) { 115 | if (inputs.getJSONObject(i).getString("intent").equals( 116 | "action.devices.EXECUTE")) { 117 | JSONArray commands = inputs.getJSONObject(i) 118 | .getJSONObject("payload") 119 | .getJSONArray("commands"); 120 | for (int j = 0; j < commands.length(); j++) { 121 | final JSONArray execution = commands.getJSONObject(j) 122 | .getJSONArray("execution"); 123 | for (int k = 0; k < execution.length(); k++) { 124 | final int finalK = k; 125 | mConversationHandler.post(new Runnable() { 126 | @Override 127 | public void run() { 128 | try { 129 | mConversationCallback.onDeviceAction( 130 | execution 131 | .getJSONObject(finalK) 132 | .getString("command"), 133 | execution.getJSONObject(finalK) 134 | .optJSONObject("params")); 135 | } catch (JSONException e) { 136 | e.printStackTrace(); 137 | } 138 | } 139 | }); 140 | 141 | } 142 | } 143 | } 144 | } 145 | } catch (JSONException e) { 146 | e.printStackTrace(); 147 | } 148 | } 149 | if (value.getEventType() == EventType.END_OF_UTTERANCE) { 150 | mRequestHandler.post(new Runnable() { 151 | @Override 152 | public void run() { 153 | mRequestCallback.onRequestFinish(); 154 | } 155 | }); 156 | mConversationHandler.post(new Runnable() { 157 | @Override 158 | public void run() { 159 | mConversationCallback.onResponseStarted(); 160 | } 161 | }); 162 | } 163 | if (value.getDialogStateOut() != null) { 164 | mConversationState = value.getDialogStateOut().getConversationState(); 165 | if (value.getDialogStateOut().getVolumePercentage() != 0) { 166 | final int volumePercentage = value.getDialogStateOut().getVolumePercentage(); 167 | mVolume = volumePercentage; 168 | mConversationHandler.post(new Runnable() { 169 | @Override 170 | public void run() { 171 | mConversationCallback.onVolumeChanged(volumePercentage); 172 | } 173 | }); 174 | } 175 | mRequestHandler.post(new Runnable() { 176 | @Override 177 | public void run() { 178 | mRequestCallback.onSpeechRecognition(value.getSpeechResultsList()); 179 | } 180 | }); 181 | mMicrophoneMode = value.getDialogStateOut().getMicrophoneMode(); 182 | mConversationCallback.onAssistantResponse(value.getDialogStateOut() 183 | .getSupplementalDisplayText()); 184 | } 185 | if (value.getAudioOut() != null) { 186 | if (mAudioOutSize <= value.getAudioOut().getSerializedSize()) { 187 | mAudioOutSize = value.getAudioOut().getSerializedSize(); 188 | } else { 189 | mAudioOutSize = 0; 190 | onCompleted(); 191 | } 192 | final ByteBuffer audioData = 193 | ByteBuffer.wrap(value.getAudioOut().getAudioData().toByteArray()); 194 | mAssistantResponses.add(audioData); 195 | mConversationHandler.post(new Runnable() { 196 | @Override 197 | public void run() { 198 | mConversationCallback.onAudioSample(audioData); 199 | } 200 | }); 201 | } 202 | if (value.hasScreenOut()) { 203 | mConversationHandler.post(new Runnable() { 204 | @Override 205 | public void run() { 206 | mConversationCallback.onAssistantDisplayOut( 207 | value.getScreenOut().getData().toStringUtf8()); 208 | } 209 | }); 210 | } 211 | } 212 | 213 | @Override 214 | public void onError(final Throwable t) { 215 | mConversationHandler.post(new Runnable() { 216 | @Override 217 | public void run() { 218 | mConversationCallback.onError(t); 219 | } 220 | }); 221 | } 222 | 223 | @Override 224 | public void onCompleted() { 225 | // create a new AudioTrack to workaround audio routing issues. 226 | AudioTrack audioTrack = new AudioTrack.Builder() 227 | .setAudioFormat(mAudioOutputFormat) 228 | .setBufferSizeInBytes(mAudioOutputBufferSize) 229 | .setTransferMode(AudioTrack.MODE_STREAM) 230 | .build(); 231 | if (mAudioOutputDevice != null) { 232 | audioTrack.setPreferredDevice(mAudioOutputDevice); 233 | } 234 | audioTrack.setVolume(AudioTrack.getMaxVolume() * mVolume / 100.0f); 235 | audioTrack.play(); 236 | mConversationHandler.post(new Runnable() { 237 | @Override 238 | public void run() { 239 | mConversationCallback.onResponseStarted(); 240 | } 241 | }); 242 | for (ByteBuffer audioData : mAssistantResponses) { 243 | final ByteBuffer buf = audioData; 244 | mConversationHandler.post(new Runnable() { 245 | @Override 246 | public void run() { 247 | mConversationCallback.onAudioSample(buf); 248 | } 249 | }); 250 | audioTrack.write(buf, buf.remaining(), 251 | AudioTrack.WRITE_BLOCKING); 252 | } 253 | mAssistantResponses.clear(); 254 | audioTrack.stop(); 255 | audioTrack.release(); 256 | 257 | mConversationHandler.post(new Runnable() { 258 | @Override 259 | public void run() { 260 | mConversationCallback.onResponseFinished(); 261 | } 262 | }); 263 | if (mMicrophoneMode == MicrophoneMode.DIALOG_FOLLOW_ON) { 264 | // Automatically start a new request 265 | startConversation(); 266 | } else { 267 | // The conversation is done 268 | mConversationHandler.post(new Runnable() { 269 | @Override 270 | public void run() { 271 | mConversationCallback.onConversationFinished(); 272 | } 273 | }); 274 | } 275 | } 276 | }; 277 | 278 | private Runnable mStreamAssistantRequest = new Runnable() { 279 | @Override 280 | public void run() { 281 | ByteBuffer audioData = ByteBuffer.allocateDirect(AUDIO_RECORD_BLOCK_SIZE); 282 | int result = mAudioRecord.read(audioData, audioData.capacity(), 283 | AudioRecord.READ_BLOCKING); 284 | if (result < 0) { 285 | return; 286 | } 287 | mRequestHandler.post(new Runnable() { 288 | @Override 289 | public void run() { 290 | mRequestCallback.onAudioRecording(); 291 | } 292 | }); 293 | mAssistantRequestObserver.onNext(AssistRequest.newBuilder() 294 | .setAudioIn(ByteString.copyFrom(audioData)) 295 | .build()); 296 | mAssistantHandler.post(mStreamAssistantRequest); 297 | } 298 | }; 299 | 300 | private UserCredentials mUserCredentials; 301 | 302 | private EmbeddedAssistant() {} 303 | 304 | /** 305 | * Initializes the Assistant. 306 | */ 307 | public void connect() { 308 | mAssistantThread = new HandlerThread("assistantThread"); 309 | mAssistantThread.start(); 310 | mAssistantHandler = new Handler(mAssistantThread.getLooper()); 311 | 312 | ManagedChannel channel = ManagedChannelBuilder.forTarget(ASSISTANT_API_ENDPOINT).build(); 313 | mAssistantService = EmbeddedAssistantGrpc.newStub(channel) 314 | .withCallCredentials(MoreCallCredentials.from(mUserCredentials)); 315 | } 316 | 317 | /** 318 | * Starts a request to the Assistant. 319 | */ 320 | public void startConversation() { 321 | mAudioRecord.startRecording(); 322 | mRequestHandler.post(new Runnable() { 323 | @Override 324 | public void run() { 325 | mRequestCallback.onRequestStart(); 326 | } 327 | }); 328 | mAssistantHandler.post(new Runnable() { 329 | @Override 330 | public void run() { 331 | mAssistantRequestObserver = mAssistantService.assist(mAssistantResponseObserver); 332 | AssistConfig.Builder assistConfigBuilder = AssistConfig.newBuilder() 333 | .setAudioInConfig(mAudioInConfig) 334 | .setAudioOutConfig(mAudioOutConfig) 335 | .setDeviceConfig(mDeviceConfig); 336 | if (mScreenOutConfig != null) { 337 | assistConfigBuilder.setScreenOutConfig(mScreenOutConfig); 338 | } 339 | DialogStateIn.Builder dialogStateInBuilder = DialogStateIn.newBuilder(); 340 | if (mConversationState != null) { 341 | dialogStateInBuilder.setConversationState(mConversationState); 342 | } 343 | if (mDeviceLocation != null) { 344 | dialogStateInBuilder.setDeviceLocation(mDeviceLocation); 345 | } 346 | dialogStateInBuilder.setLanguageCode(mLanguageCode); 347 | assistConfigBuilder.setDialogStateIn(dialogStateInBuilder.build()); 348 | mAssistantRequestObserver.onNext( 349 | AssistRequest.newBuilder() 350 | .setConfig(assistConfigBuilder.build()) 351 | .build()); 352 | } 353 | }); 354 | mAssistantHandler.post(mStreamAssistantRequest); 355 | } 356 | 357 | public void startConversation(final String inputQuery) { 358 | mRequestCallback.onRequestStart(); 359 | mAssistantHandler.post(new Runnable() { 360 | @Override 361 | public void run() { 362 | mAssistantRequestObserver = mAssistantService.assist(mAssistantResponseObserver); 363 | AssistConfig.Builder assistConfigBuilder = AssistConfig.newBuilder() 364 | .setTextQuery(inputQuery) 365 | .setAudioOutConfig(mAudioOutConfig) 366 | .setDeviceConfig(mDeviceConfig) 367 | .setScreenOutConfig(mScreenOutConfig); 368 | DialogStateIn.Builder dialogStateInBuilder = DialogStateIn.newBuilder(); 369 | if (mConversationState != null) { 370 | dialogStateInBuilder.setConversationState(mConversationState); 371 | } 372 | if (mDeviceLocation != null) { 373 | dialogStateInBuilder.setDeviceLocation(mDeviceLocation); 374 | } 375 | dialogStateInBuilder.setLanguageCode(mLanguageCode); 376 | assistConfigBuilder.setDialogStateIn(dialogStateInBuilder.build()); 377 | mAssistantRequestObserver.onNext( 378 | AssistRequest.newBuilder() 379 | .setConfig(assistConfigBuilder.build()) 380 | .build()); 381 | } 382 | }); 383 | } 384 | 385 | /** 386 | * Manually ends a conversation with the Assistant. 387 | */ 388 | public void stopConversation() { 389 | mAssistantHandler.post(new Runnable() { 390 | @Override 391 | public void run() { 392 | mAssistantHandler.removeCallbacks(mStreamAssistantRequest); 393 | if (mAssistantRequestObserver != null) { 394 | mAssistantRequestObserver.onCompleted(); 395 | mAssistantRequestObserver = null; 396 | } 397 | } 398 | }); 399 | 400 | mAudioRecord.stop(); 401 | mConversationHandler.post(new Runnable() { 402 | @Override 403 | public void run() { 404 | mConversationCallback.onConversationFinished(); 405 | } 406 | }); 407 | } 408 | 409 | @Retention(RetentionPolicy.SOURCE) 410 | @IntDef({TEXT, HTML}) 411 | public @interface ResponseFormat {} 412 | public static final int TEXT = 0; 413 | public static final int HTML = 1; 414 | 415 | /** 416 | * Set desired assistant response format. 417 | */ 418 | public void setResponseFormat(@ResponseFormat int format) { 419 | mScreenOutConfig = ScreenOutConfig.newBuilder() 420 | .setScreenMode(format == HTML 421 | ? ScreenOutConfig.ScreenMode.PLAYING 422 | : ScreenOutConfig.ScreenMode.SCREEN_MODE_UNSPECIFIED) 423 | .build(); 424 | } 425 | 426 | /** 427 | * Removes callbacks and exists the Assistant service. This should be called when an activity is 428 | * closing to safely quit the Assistant service. 429 | */ 430 | public void destroy() { 431 | mAssistantHandler.post(new Runnable() { 432 | @Override 433 | public void run() { 434 | mAssistantHandler.removeCallbacks(mStreamAssistantRequest); 435 | } 436 | }); 437 | mAssistantThread.quitSafely(); 438 | if (mAudioRecord != null) { 439 | mAudioRecord.stop(); 440 | mAudioRecord = null; 441 | } 442 | } 443 | 444 | /** 445 | * Generates access tokens for the Assistant based on a credentials JSON file. 446 | * 447 | * @param context Application context 448 | * @param resourceId The resource that contains the project credentials 449 | * 450 | * @return A {@link UserCredentials} object which can be used by the Assistant. 451 | * @throws IOException If the resource does not exist. 452 | * @throws JSONException If the resource is incorrectly formatted. 453 | */ 454 | public static UserCredentials generateCredentials(Context context, int resourceId) 455 | throws IOException, JSONException { 456 | return Credentials.fromResource(context, resourceId); 457 | } 458 | 459 | /** 460 | * Used to build an AssistantManager object. 461 | */ 462 | public static class Builder { 463 | private EmbeddedAssistant mEmbeddedAssistant; 464 | private int mSampleRate; 465 | private String mDeviceModelId; 466 | private String mDeviceInstanceId; 467 | 468 | /** 469 | * Creates a Builder. 470 | */ 471 | public Builder() { 472 | mEmbeddedAssistant = new EmbeddedAssistant(); 473 | } 474 | 475 | /** 476 | * Sets a preferred {@link AudioDeviceInfo} device for input. 477 | * 478 | * @param device The preferred audio device to acquire audio from. 479 | * @return Returns this builder to allow for chaining. 480 | */ 481 | public Builder setAudioInputDevice(AudioDeviceInfo device) { 482 | mEmbeddedAssistant.mAudioInputDevice = device; 483 | return this; 484 | } 485 | 486 | /** 487 | * Sets a preferred {@link AudioDeviceInfo} device for output. 488 | * 489 | * param device The preferred audio device to route audio to. 490 | * @return Returns this builder to allow for chaining. 491 | */ 492 | public Builder setAudioOutputDevice(AudioDeviceInfo device) { 493 | mEmbeddedAssistant.mAudioOutputDevice = device; 494 | return this; 495 | } 496 | 497 | /** 498 | * Sets a {@link RequestCallback}, which is when a request is being made to the Assistant. 499 | * 500 | * @param requestCallback The methods that will run during a request. 501 | * @return Returns this builder to allow for chaining. 502 | */ 503 | public Builder setRequestCallback(RequestCallback requestCallback) { 504 | setRequestCallback(requestCallback, null); 505 | return this; 506 | } 507 | 508 | /** 509 | * Sets a {@link RequestCallback}, which is when a request is being made to the Assistant. 510 | * 511 | * @param requestCallback The methods that will run during a request. 512 | * @param requestHandler Handler used to dispatch the callback. 513 | * @return Returns this builder to allow for chaining. 514 | */ 515 | public Builder setRequestCallback(RequestCallback requestCallback, 516 | @Nullable Handler requestHandler) { 517 | if (requestHandler == null) { 518 | requestHandler = new Handler(); 519 | } 520 | mEmbeddedAssistant.mRequestCallback = requestCallback; 521 | mEmbeddedAssistant.mRequestHandler = requestHandler; 522 | return this; 523 | } 524 | 525 | /** 526 | * Sets a {@link ConversationCallback}, which is when a response is being given from the 527 | * Assistant. 528 | * 529 | * @param responseCallback The methods that will run during a response. 530 | * @return Returns this builder to allow for chaining. 531 | */ 532 | public Builder setConversationCallback(ConversationCallback responseCallback) { 533 | setConversationCallback(responseCallback, null); 534 | return this; 535 | } 536 | 537 | /** 538 | * Sets a {@link ConversationCallback}, which is when a response is being given from the 539 | * Assistant. 540 | * 541 | * @param responseCallback The methods that will run during a response. 542 | * @param responseHandler Handler used to dispatch the callback. 543 | * @return Returns this builder to allow for chaining. 544 | */ 545 | public Builder setConversationCallback(ConversationCallback responseCallback, 546 | @Nullable Handler responseHandler) { 547 | if (responseHandler == null) { 548 | responseHandler = new Handler(); 549 | } 550 | mEmbeddedAssistant.mConversationCallback = responseCallback; 551 | mEmbeddedAssistant.mConversationHandler = responseHandler; 552 | return this; 553 | } 554 | 555 | /** 556 | * Sets the credentials for the user. 557 | * 558 | * @param userCredentials Credentials generated by 559 | * {@link EmbeddedAssistant#generateCredentials(Context, int)}. 560 | * @return Returns this builder to allow for chaining. 561 | */ 562 | public Builder setCredentials(UserCredentials userCredentials) { 563 | mEmbeddedAssistant.mUserCredentials = userCredentials; 564 | return this; 565 | } 566 | 567 | /** 568 | * Sets the audio sampling rate for input and output streams 569 | * 570 | * @param sampleRate The audio sample rate 571 | * @return Returns this builder to allow for chaining. 572 | */ 573 | public Builder setAudioSampleRate(int sampleRate) { 574 | mSampleRate = sampleRate; 575 | return this; 576 | } 577 | 578 | /** 579 | * Sets the volume for the Assistant response 580 | * 581 | * @param volume The audio volume in the range 0 - 100. 582 | * @return Returns this builder to allow for chaining. 583 | */ 584 | public Builder setAudioVolume(int volume) { 585 | mEmbeddedAssistant.mVolume = volume; 586 | return this; 587 | } 588 | 589 | /** 590 | * Sets the model id for each Assistant request. 591 | * 592 | * @param deviceModelId The device model id. 593 | * @return Returns this builder to allow for chaining. 594 | */ 595 | public Builder setDeviceModelId(String deviceModelId) { 596 | mDeviceModelId = deviceModelId; 597 | return this; 598 | } 599 | 600 | /** 601 | * Sets the instance id for each Assistant request. 602 | * 603 | * @param deviceInstanceId The device instance id. 604 | * @return Returns this builder to allow for chaining. 605 | */ 606 | public Builder setDeviceInstanceId(String deviceInstanceId) { 607 | mDeviceInstanceId = deviceInstanceId; 608 | return this; 609 | } 610 | 611 | /** 612 | * Sets language code of the request using IETF BCP 47 syntax. 613 | * See for the documentation. 614 | * For example: "en-US". 615 | * 616 | * @param languageCode Code for the language. Only Assistant-supported languages are valid. 617 | * @return Returns this builder to allow for chaining. 618 | */ 619 | public Builder setLanguageCode(String languageCode) { 620 | mEmbeddedAssistant.mLanguageCode = languageCode; 621 | return this; 622 | } 623 | 624 | public Builder setDeviceLocation(DeviceLocation deviceLocation) { 625 | mEmbeddedAssistant.mDeviceLocation = deviceLocation; 626 | return this; 627 | } 628 | 629 | /** 630 | * Returns an AssistantManager if all required parameters have been supplied. 631 | * 632 | * @return An inactive AssistantManager. Call {@link EmbeddedAssistant#connect()} to start 633 | * it. 634 | */ 635 | public EmbeddedAssistant build() { 636 | if (mEmbeddedAssistant.mRequestCallback == null) { 637 | throw new NullPointerException("There must be a defined RequestCallback"); 638 | } 639 | if (mEmbeddedAssistant.mConversationCallback == null) { 640 | throw new NullPointerException("There must be a defined ConversationCallback"); 641 | } 642 | if (mEmbeddedAssistant.mUserCredentials == null) { 643 | throw new NullPointerException("There must be provided credentials"); 644 | } 645 | if (mSampleRate == 0) { 646 | throw new NullPointerException("There must be a defined sample rate"); 647 | } 648 | final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; 649 | 650 | // Construct audio configurations. 651 | mEmbeddedAssistant.mAudioInConfig = AudioInConfig.newBuilder() 652 | .setEncoding(AudioInConfig.Encoding.LINEAR16) 653 | .setSampleRateHertz(mSampleRate) 654 | .build(); 655 | mEmbeddedAssistant.mAudioOutConfig = AudioOutConfig.newBuilder() 656 | .setEncoding(AudioOutConfig.Encoding.LINEAR16) 657 | .setSampleRateHertz(mSampleRate) 658 | .setVolumePercentage(mEmbeddedAssistant.mVolume) 659 | .build(); 660 | 661 | // Initialize Audio framework parameters. 662 | mEmbeddedAssistant.mAudioInputFormat = new AudioFormat.Builder() 663 | .setChannelMask(AudioFormat.CHANNEL_IN_MONO) 664 | .setEncoding(audioEncoding) 665 | .setSampleRate(mSampleRate) 666 | .build(); 667 | mEmbeddedAssistant.mAudioInputBufferSize = AudioRecord.getMinBufferSize( 668 | mEmbeddedAssistant.mAudioInputFormat.getSampleRate(), 669 | mEmbeddedAssistant.mAudioInputFormat.getChannelMask(), 670 | mEmbeddedAssistant.mAudioInputFormat.getEncoding()); 671 | mEmbeddedAssistant.mAudioOutputFormat = new AudioFormat.Builder() 672 | .setChannelMask(AudioFormat.CHANNEL_OUT_MONO) 673 | .setEncoding(audioEncoding) 674 | .setSampleRate(mSampleRate) 675 | .build(); 676 | mEmbeddedAssistant.mAudioOutputBufferSize = AudioTrack.getMinBufferSize( 677 | mEmbeddedAssistant.mAudioOutputFormat.getSampleRate(), 678 | mEmbeddedAssistant.mAudioOutputFormat.getChannelMask(), 679 | mEmbeddedAssistant.mAudioOutputFormat.getEncoding()); 680 | 681 | // create new AudioRecord to workaround audio routing issues. 682 | mEmbeddedAssistant.mAudioRecord = new AudioRecord.Builder() 683 | .setAudioSource(AudioSource.VOICE_RECOGNITION) 684 | .setAudioFormat(mEmbeddedAssistant.mAudioInputFormat) 685 | .setBufferSizeInBytes(mEmbeddedAssistant.mAudioInputBufferSize) 686 | .build(); 687 | if (mEmbeddedAssistant.mAudioInputDevice != null) { 688 | boolean result = mEmbeddedAssistant.mAudioRecord.setPreferredDevice( 689 | mEmbeddedAssistant.mAudioInputDevice); 690 | if (!result) { 691 | Log.e(TAG, "failed to set preferred input device"); 692 | } 693 | } 694 | 695 | // Construct DeviceConfig 696 | mEmbeddedAssistant.mDeviceConfig = DeviceConfig.newBuilder() 697 | .setDeviceId(mDeviceInstanceId) 698 | .setDeviceModelId(mDeviceModelId) 699 | .build(); 700 | 701 | // Construct default ScreenOutConfig 702 | mEmbeddedAssistant.mScreenOutConfig = ScreenOutConfig.newBuilder() 703 | .setScreenMode(ScreenOutConfig.ScreenMode.SCREEN_MODE_UNSPECIFIED) 704 | .build(); 705 | 706 | return mEmbeddedAssistant; 707 | } 708 | } 709 | 710 | /** 711 | * Callback for methods during a request to the Assistant. 712 | */ 713 | public static abstract class RequestCallback { 714 | 715 | /** 716 | * Called when a request is first made. 717 | */ 718 | public void onRequestStart() {} 719 | 720 | /** 721 | * Called when a request has completed. 722 | */ 723 | public void onRequestFinish() {} 724 | 725 | /** 726 | * Called when audio is being recording. This may be called multiple times during a single 727 | * request. 728 | */ 729 | public void onAudioRecording() {} 730 | 731 | /** 732 | * Called when the request is complete and the Assistant returns the user's speech-to-text. 733 | */ 734 | public void onSpeechRecognition(List results) {} 735 | } 736 | 737 | /** 738 | * Callback for methods during a conversation from the Assistant. 739 | */ 740 | public static abstract class ConversationCallback { 741 | 742 | /** 743 | * Called when the user's voice query ends and the response from the Assistant is about to 744 | * start a response. 745 | */ 746 | public void onResponseStarted() {} 747 | 748 | /** 749 | * Called when the Assistant's response is complete. 750 | */ 751 | public void onResponseFinished() {} 752 | 753 | /** 754 | * Called when audio is being played. This may be called multiple times during a single 755 | * response. The audio will play using the AudioTrack, although this method may be used 756 | * to provide auxiliary effects. 757 | * 758 | * @param audioSample The raw audio sample from the Assistant 759 | */ 760 | public void onAudioSample(ByteBuffer audioSample) {} 761 | 762 | /** 763 | * Called when an error occurs during the response 764 | * 765 | * @param throwable A {@link Throwable} which contains information about the response error. 766 | */ 767 | public void onError(Throwable throwable) {} 768 | 769 | /** 770 | * Called when the user requests to change the Assistant's volume. 771 | * 772 | * @param percentage The desired volume as a percentage of intensity, in the range 0 - 100. 773 | */ 774 | public void onVolumeChanged(int percentage) {} 775 | 776 | /** 777 | * Called when the response contains a DeviceAction. 778 | * 779 | * @param intentName The name of the intent to execute. 780 | * @param parameters A JSONObject containing parameters related to this intent. 781 | */ 782 | public void onDeviceAction(String intentName, JSONObject parameters) {} 783 | 784 | /** 785 | * Called when the response contains supplemental display text from the Assistant. 786 | * 787 | * @param response Supplemental display text. 788 | */ 789 | public void onAssistantResponse(String response) {} 790 | 791 | /** 792 | * Called when the response contains HTML output from the Assistant. 793 | * 794 | * @param html HTML data showing a rich response 795 | */ 796 | public void onAssistantDisplayOut(String html) {} 797 | 798 | /** 799 | * Called when the entire conversation is finished. 800 | */ 801 | public void onConversationFinished() {} 802 | } 803 | } -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 29 | 35 |