├── .gitignore
├── .google
└── packaging.yaml
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── app
├── build.gradle
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── example
│ │ └── androidthings
│ │ └── assistant
│ │ ├── AssistantActivity.java
│ │ ├── BoardDefaults.java
│ │ ├── Credentials.java
│ │ └── EmbeddedAssistant.java
│ └── res
│ ├── layout
│ └── activity_main.xml
│ └── values
│ └── strings.xml
├── build.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── grpc
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ └── proto
│ └── google
│ ├── api
│ ├── annotations.proto
│ └── http.proto
│ ├── assistant
│ └── embedded
│ │ ├── README.md
│ │ └── v1alpha2
│ │ └── embedded_assistant.proto
│ ├── protobuf
│ ├── any.proto
│ └── descriptor.proto
│ ├── rpc
│ └── status.proto
│ └── type
│ └── latlng.proto
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Android ###
2 | # Built application files
3 | *.apk
4 | *.ap_
5 |
6 | # Files for the ART/Dalvik VM
7 | *.dex
8 |
9 | # Java class files
10 | *.class
11 |
12 | # Generated files
13 | bin/
14 | gen/
15 | out/
16 |
17 | # Gradle files
18 | .gradle/
19 | build/
20 |
21 | # Local configuration file (sdk path, etc)
22 | local.properties
23 | gradle.properties
24 |
25 | # Proguard folder generated by Eclipse
26 | proguard/
27 |
28 | # Log Files
29 | *.log
30 |
31 | # Android Studio Navigation editor temp files
32 | .navigation/
33 |
34 | # Android Studio captures folder
35 | captures/
36 |
37 | # Intellij
38 | *.iml
39 | *.iws
40 | .idea/
41 |
42 | # External native build folder generated in Android Studio 2.2 and later
43 | .externalNativeBuild
44 |
45 | .DS_Store
46 |
47 | app/src/main/res/raw/credentials.json
48 |
--------------------------------------------------------------------------------
/.google/packaging.yaml:
--------------------------------------------------------------------------------
1 | status: PUBLISHED
2 | technologies: [Android, Android Things]
3 | categories: [Android Things]
4 | languages: [Java]
5 | solutions: [IoT]
6 | github: androidthings/sample-googleassistant
7 | license: apache2
8 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to become a contributor and submit your own code
2 |
3 | ## Contributor License Agreements
4 |
5 | We'd love to accept your sample apps and patches! Before we can take them, we
6 | have to jump a couple of legal hurdles.
7 |
8 | Please fill out either the individual or corporate Contributor License Agreement (CLA).
9 |
10 | * If you are an individual writing original source code and you're sure you
11 | own the intellectual property, then you'll need to sign an
12 | [individual CLA](https://developers.google.com/open-source/cla/individual).
13 | * If you work for a company that wants to allow you to contribute your work,
14 | then you'll need to sign a
15 | [corporate CLA](https://developers.google.com/open-source/cla/corporate).
16 |
17 | Follow either of the two links above to access the appropriate CLA and
18 | instructions for how to sign and return it. Once we receive it, we'll be able to
19 | accept your pull requests.
20 |
21 | ## Contributing A Patch
22 |
23 | 1. Submit an issue describing your proposed change to the repo in question.
24 | 1. The repo owner will respond to your issue promptly.
25 | 1. If your proposed change is accepted, and you haven't already done so, sign a
26 | Contributor License Agreement (see details above).
27 | 1. Fork the desired repo, develop and test your code changes.
28 | 1. Ensure that your code adheres to the existing style in the sample to which
29 | you are contributing. Refer to the
30 | [Android Code Style Guide](https://source.android.com/source/code-style.html)
31 | for the recommended coding standards for this organization.
32 | 1. Ensure that your code has an appropriate set of unit tests which all pass.
33 | 1. Submit a pull request.
34 |
35 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2014 The Android Open Source Project
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Google Assistant SDK for devices - Android Things
2 |
3 | This sample shows how to call the [Google Assistant Service](https://developers.google.com/assistant/sdk/guides/service/python/)
4 | from Android Things using gRPC. It records a spoken request from the
5 | connected microphones, sends it to the Google Assistant API and plays
6 | back the Assistant's spoken response on the connected speaker.
7 |
8 | > **Note:** The Android Things Console will be turned down for non-commercial
9 | > use on January 5, 2022. For more details, see the
10 | > [FAQ page](https://developer.android.com/things/faq).
11 |
12 | ## Pre-requisites
13 |
14 | - Android Studio 2.2+.
15 | - Android Things compatible board.
16 | - [AIY Projects Voice Kit][voice-kit] or supported [microphone][mic] and [speaker][speaker] (See [audio configuration](#audio-configuration)).
17 | - [Google API Console Project][console].
18 |
19 | ## Run the sample
20 |
21 | 1. Create or open a project in the [Actions Console](http://console.actions.google.com)
22 | 1. Follow the instructions to [register a device model](https://developers.google.com/assistant/sdk/guides/service/python/embed/register-device)
23 | 1. Download `client_secret_XXXX.json`
24 | 1. Configure the [OAuth consent screen](https://console.developers.google.com/apis/credentials/consent) for your project
25 | 1. Install the [`google-oauthlib-tool`](https://github.com/GoogleCloudPlatform/google-auth-library-python-oauthlib) in a [Python 3](https://www.python.org/downloads/) virtual environment:
26 |
27 | ```
28 | python3 -m venv env
29 | env/bin/python -m pip install --upgrade pip setuptools
30 | env/bin/pip install --upgrade google-auth-oauthlib[tool]
31 | source env/bin/activate
32 | ```
33 |
34 | - Use the [`google-oauthlib-tool`][google-oauthlib-tool] to generate user credentials:
35 |
36 | ```bash
37 | google-oauthlib-tool --client-secrets client_secret_XXXX.json \
38 | --credentials app/src/main/res/raw/credentials.json \
39 | --scope https://www.googleapis.com/auth/assistant-sdk-prototype \
40 | --save
41 | ```
42 | - Make sure to set the [Activity Controls][set-activity-controls] for the Google Account using the application.
43 | - On the first install, grant the sample required permissions for audio and internet access:
44 |
45 | ```bash
46 | ./gradlew assembleDebug
47 | adb install -g app/build/outputs/apk/debug/app-debug.apk
48 | ```
49 |
50 | - On Android Studio, click on the "Run" button or on the command line, type:
51 |
52 | ```bash
53 | ./gradlew installDebug
54 | adb shell am start com.example.androidthings.assistant/.AssistantActivity
55 | ```
56 | - Try the assistant demo:
57 |
58 | - Press the button: recording starts.
59 | - Ask a question in the microphone. After your question is finished, recording will end.
60 | - The Google Assistant answer should playback on the speaker.
61 |
62 | ## Audio Configuration
63 |
64 | By default the sample routes audio to the I2S Voice Hat on Raspberry Pi 3 and default audio on other boards (on-board Line out or HDMI/USB if connected).
65 |
66 | You can change those mappings by changing the `USE_VOICEHAT_I2S_DAC`
67 | constant or replacing the audio configuration in the `onCreate` method of [AssistantActivity](https://github.com/androidthings/sample-googleassistant/blob/master/app/src/main/java/com/example/androidthings/assistant/AssistantActivity.java) with one of the following:
68 |
69 | ```Java
70 | // Force using on-board Line out:
71 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_BUILTIN_MIC);
72 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_BUILTIN_SPEAKER);
73 |
74 | // Force using USB:
75 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_USB_DEVICE);
76 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_USB_DEVICE);
77 |
78 | // Force using I2S:
79 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_BUS);
80 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_BUS);
81 | ```
82 |
83 | ## Device Actions
84 | With Device Actions, you can control hardware connected to your device.
85 | In this sample, you can turn on and off the LED attached to your Android
86 | Things board.
87 |
88 | Follow the guide [here](https://developers.google.com/assistant/sdk/guides/service/python/embed/register-device)
89 | to learn how to register your device.
90 |
91 | - After you register your device model and id, replace the device model and instance
92 | `PLACEHOLDER` values in `AssistantActivity`:
93 |
94 | ```Java
95 | private static final String DEVICE_MODEL_ID = "my-device-model-id";
96 | private static final String DEVICE_INSTANCE_ID = "my-device-instance-id";
97 | ```
98 |
99 | - Handle a Device Actions response if you get one.
100 |
101 | ```Java
102 | mEmbeddedAssistant = new EmbeddedAssistant.Builder()
103 | ...
104 | .setConversationCallback(new ConversationCallback() {
105 | ...
106 | @Override
107 | public void onDeviceAction(String intentName, JSONObject parameters) {
108 | // Check the type of command
109 | if (intentName.equals("action.devices.commands.OnOff")) {
110 | try {
111 | boolean turnOn = parameters.getBoolean("on");
112 | mLed.setValue(turnOn);
113 | } catch (JSONException e) {
114 | Log.e(TAG, "Cannot get value of command", e);
115 | } catch (IOException e) {
116 | Log.e(TAG, "Cannot set value of LED", e);
117 | }
118 | }
119 | }
120 | }
121 | ...
122 | ...
123 | ```
124 |
125 | Try it:
126 |
127 | - "Turn on"
128 | - "Turn off"
129 |
130 | The LED should change states based on your command.
131 |
132 | ## Enable auto-launch behavior
133 |
134 | This sample app is currently configured to launch only when deployed from your
135 | development machine. To enable the main activity to launch automatically on boot,
136 | add the following `intent-filter` to the app's manifest file:
137 |
138 | ```xml
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 | ```
149 |
150 | ## License
151 |
152 | Copyright 2017 The Android Open Source Project, Inc.
153 |
154 | Licensed to the Apache Software Foundation (ASF) under one or more contributor
155 | license agreements. See the NOTICE file distributed with this work for
156 | additional information regarding copyright ownership. The ASF licenses this
157 | file to you under the Apache License, Version 2.0 (the "License"); you may not
158 | use this file except in compliance with the License. You may obtain a copy of
159 | the License at
160 |
161 | http://www.apache.org/licenses/LICENSE-2.0
162 |
163 | Unless required by applicable law or agreed to in writing, software
164 | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
165 | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
166 | License for the specific language governing permissions and limitations under
167 | the License.
168 |
169 | [voice-kit]: https://aiyprojects.withgoogle.com/voice/
170 | [console]: https://console.developers.google.com
171 | [google-assistant-api-config]: https://developers.google.com/assistant/sdk/prototype/getting-started-other-platforms/config-dev-project-and-account
172 | [console-credentials]: https://console.developers.google.com/apis/credentials
173 | [google-oauthlib-tool]: https://github.com/GoogleCloudPlatform/google-auth-library-python-oauthlib
174 | [dev-preview-download]: https://partner.android.com/things/console/
175 | [set-activity-controls]: https://developers.google.com/assistant/sdk/prototype/getting-started-other-platforms/config-dev-project-and-account#set-activity-controls
176 | [mic]: https://www.adafruit.com/product/3367
177 | [speaker]: https://www.adafruit.com/product/3369
178 | [python3]: https://www.python.org/downloads/
179 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | apply plugin: 'com.android.application'
18 |
19 | android {
20 | compileSdkVersion 27
21 |
22 | defaultConfig {
23 | applicationId "com.example.androidthings.assistant"
24 | minSdkVersion 27
25 | targetSdkVersion 27
26 | versionCode 1
27 | versionName "1.0"
28 | }
29 | buildTypes {
30 | release {
31 | minifyEnabled false
32 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
33 | }
34 | }
35 | }
36 |
37 | dependencies {
38 | implementation project(':grpc')
39 | compileOnly 'com.google.android.things:androidthings:1.0'
40 |
41 | implementation('com.google.auth:google-auth-library-oauth2-http:0.6.0') {
42 | exclude group: 'org.apache.httpcomponents', module: 'httpclient'
43 | }
44 |
45 | implementation 'com.google.android.things.contrib:driver-voicehat:1.0'
46 | implementation 'com.android.support:support-annotations:27.1.0'
47 | }
48 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
19 |
20 |
21 |
22 |
23 |
24 |
26 |
27 |
29 |
30 |
31 |
32 |
33 |
34 |
38 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/androidthings/assistant/AssistantActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.androidthings.assistant;
18 |
19 | import android.app.Activity;
20 | import android.content.Context;
21 | import android.content.SharedPreferences;
22 | import android.content.SharedPreferences.Editor;
23 | import android.media.AudioDeviceInfo;
24 | import android.media.AudioManager;
25 | import android.os.Build;
26 | import android.os.Bundle;
27 | import android.os.Handler;
28 | import android.preference.PreferenceManager;
29 | import android.util.Base64;
30 | import android.util.Log;
31 | import android.view.View;
32 | import android.view.View.OnClickListener;
33 | import android.webkit.WebView;
34 | import android.widget.ArrayAdapter;
35 | import android.widget.CheckBox;
36 | import android.widget.CompoundButton;
37 | import android.widget.CompoundButton.OnCheckedChangeListener;
38 | import android.widget.ListView;
39 | import com.example.androidthings.assistant.EmbeddedAssistant.ConversationCallback;
40 | import com.example.androidthings.assistant.EmbeddedAssistant.RequestCallback;
41 | import com.google.android.things.contrib.driver.button.Button;
42 | import com.google.android.things.contrib.driver.voicehat.Max98357A;
43 | import com.google.android.things.contrib.driver.voicehat.VoiceHat;
44 | import com.google.android.things.pio.Gpio;
45 | import com.google.android.things.pio.PeripheralManager;
46 | import com.google.assistant.embedded.v1alpha2.SpeechRecognitionResult;
47 | import com.google.auth.oauth2.UserCredentials;
48 | import java.io.IOException;
49 | import java.io.UnsupportedEncodingException;
50 | import java.util.ArrayList;
51 | import java.util.List;
52 | import org.json.JSONException;
53 | import org.json.JSONObject;
54 |
55 | public class AssistantActivity extends Activity implements Button.OnButtonEventListener {
56 | private static final String TAG = AssistantActivity.class.getSimpleName();
57 |
58 | // Peripheral and drivers constants.
59 | private static final int BUTTON_DEBOUNCE_DELAY_MS = 20;
60 | // Default on using the Voice Hat on Raspberry Pi 3.
61 | private static final boolean USE_VOICEHAT_I2S_DAC = Build.DEVICE.equals(BoardDefaults.DEVICE_RPI3);
62 |
63 | // Audio constants.
64 | private static final String PREF_CURRENT_VOLUME = "current_volume";
65 | private static final int SAMPLE_RATE = 16000;
66 | private static final int DEFAULT_VOLUME = 100;
67 |
68 | // Assistant SDK constants.
69 | private static final String DEVICE_MODEL_ID = "PLACEHOLDER";
70 | private static final String DEVICE_INSTANCE_ID = "PLACEHOLDER";
71 | private static final String LANGUAGE_CODE = "en-US";
72 |
73 | // Hardware peripherals.
74 | private Button mButton;
75 | private android.widget.Button mButtonWidget;
76 | private Gpio mLed;
77 | private Max98357A mDac;
78 |
79 | private Handler mMainHandler;
80 |
81 | // List & adapter to store and display the history of Assistant Requests.
82 | private EmbeddedAssistant mEmbeddedAssistant;
83 | private ArrayList mAssistantRequests = new ArrayList<>();
84 | private ArrayAdapter mAssistantRequestsAdapter;
85 | private CheckBox mHtmlOutputCheckbox;
86 | private WebView mWebView;
87 |
88 | @Override
89 | protected void onCreate(Bundle savedInstanceState) {
90 | super.onCreate(savedInstanceState);
91 | Log.i(TAG, "starting assistant demo");
92 |
93 | setContentView(R.layout.activity_main);
94 |
95 | final ListView assistantRequestsListView = findViewById(R.id.assistantRequestsListView);
96 | mAssistantRequestsAdapter =
97 | new ArrayAdapter<>(this, android.R.layout.simple_list_item_1,
98 | mAssistantRequests);
99 | assistantRequestsListView.setAdapter(mAssistantRequestsAdapter);
100 | mHtmlOutputCheckbox = findViewById(R.id.htmlOutput);
101 | mHtmlOutputCheckbox.setOnCheckedChangeListener(new OnCheckedChangeListener() {
102 | @Override
103 | public void onCheckedChanged(CompoundButton compoundButton, boolean useHtml) {
104 | mWebView.setVisibility(useHtml ? View.VISIBLE : View.GONE);
105 | assistantRequestsListView.setVisibility(useHtml ? View.GONE : View.VISIBLE);
106 | mEmbeddedAssistant.setResponseFormat(useHtml
107 | ? EmbeddedAssistant.HTML : EmbeddedAssistant.TEXT);
108 | }
109 | });
110 | mWebView = findViewById(R.id.webview);
111 | mWebView.getSettings().setJavaScriptEnabled(true);
112 |
113 | mMainHandler = new Handler(getMainLooper());
114 | mButtonWidget = findViewById(R.id.assistantQueryButton);
115 | mButtonWidget.setOnClickListener(new OnClickListener() {
116 | @Override
117 | public void onClick(View view) {
118 | mEmbeddedAssistant.startConversation();
119 | }
120 | });
121 |
122 |
123 | // Audio routing configuration: use default routing.
124 | AudioDeviceInfo audioInputDevice = null;
125 | AudioDeviceInfo audioOutputDevice = null;
126 | if (USE_VOICEHAT_I2S_DAC) {
127 | audioInputDevice = findAudioDevice(AudioManager.GET_DEVICES_INPUTS, AudioDeviceInfo.TYPE_BUS);
128 | if (audioInputDevice == null) {
129 | Log.e(TAG, "failed to find I2S audio input device, using default");
130 | }
131 | audioOutputDevice = findAudioDevice(AudioManager.GET_DEVICES_OUTPUTS, AudioDeviceInfo.TYPE_BUS);
132 | if (audioOutputDevice == null) {
133 | Log.e(TAG, "failed to found I2S audio output device, using default");
134 | }
135 | }
136 |
137 | try {
138 | if (USE_VOICEHAT_I2S_DAC) {
139 | Log.i(TAG, "initializing DAC trigger");
140 | mDac = VoiceHat.openDac();
141 | mDac.setSdMode(Max98357A.SD_MODE_SHUTDOWN);
142 |
143 | mButton = VoiceHat.openButton();
144 | mLed = VoiceHat.openLed();
145 | } else {
146 | PeripheralManager pioManager = PeripheralManager.getInstance();
147 | mButton = new Button(BoardDefaults.getGPIOForButton(),
148 | Button.LogicState.PRESSED_WHEN_LOW);
149 | mLed = pioManager.openGpio(BoardDefaults.getGPIOForLED());
150 | }
151 |
152 | mButton.setDebounceDelay(BUTTON_DEBOUNCE_DELAY_MS);
153 | mButton.setOnButtonEventListener(this);
154 |
155 | mLed.setDirection(Gpio.DIRECTION_OUT_INITIALLY_LOW);
156 | mLed.setActiveType(Gpio.ACTIVE_HIGH);
157 | } catch (IOException e) {
158 | Log.e(TAG, "error configuring peripherals:", e);
159 | return;
160 | }
161 |
162 | // Set volume from preferences
163 | SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(this);
164 | int initVolume = preferences.getInt(PREF_CURRENT_VOLUME, DEFAULT_VOLUME);
165 | Log.i(TAG, "setting audio track volume to: " + initVolume);
166 |
167 | UserCredentials userCredentials = null;
168 | try {
169 | userCredentials =
170 | EmbeddedAssistant.generateCredentials(this, R.raw.credentials);
171 | } catch (IOException | JSONException e) {
172 | Log.e(TAG, "error getting user credentials", e);
173 | }
174 | mEmbeddedAssistant = new EmbeddedAssistant.Builder()
175 | .setCredentials(userCredentials)
176 | .setDeviceInstanceId(DEVICE_INSTANCE_ID)
177 | .setDeviceModelId(DEVICE_MODEL_ID)
178 | .setLanguageCode(LANGUAGE_CODE)
179 | .setAudioInputDevice(audioInputDevice)
180 | .setAudioOutputDevice(audioOutputDevice)
181 | .setAudioSampleRate(SAMPLE_RATE)
182 | .setAudioVolume(initVolume)
183 | .setRequestCallback(new RequestCallback() {
184 | @Override
185 | public void onRequestStart() {
186 | Log.i(TAG, "starting assistant request, enable microphones");
187 | mButtonWidget.setText(R.string.button_listening);
188 | mButtonWidget.setEnabled(false);
189 | }
190 |
191 | @Override
192 | public void onSpeechRecognition(List results) {
193 | for (final SpeechRecognitionResult result : results) {
194 | Log.i(TAG, "assistant request text: " + result.getTranscript() +
195 | " stability: " + Float.toString(result.getStability()));
196 | mAssistantRequestsAdapter.add(result.getTranscript());
197 | }
198 | }
199 | })
200 | .setConversationCallback(new ConversationCallback() {
201 | @Override
202 | public void onResponseStarted() {
203 | super.onResponseStarted();
204 | // When bus type is switched, the AudioManager needs to reset the stream volume
205 | if (mDac != null) {
206 | try {
207 | mDac.setSdMode(Max98357A.SD_MODE_LEFT);
208 | } catch (IOException e) {
209 | Log.e(TAG, "error enabling DAC", e);
210 | }
211 | }
212 | }
213 |
214 | @Override
215 | public void onResponseFinished() {
216 | super.onResponseFinished();
217 | if (mDac != null) {
218 | try {
219 | mDac.setSdMode(Max98357A.SD_MODE_SHUTDOWN);
220 | } catch (IOException e) {
221 | Log.e(TAG, "error disabling DAC", e);
222 | }
223 | }
224 | if (mLed != null) {
225 | try {
226 | mLed.setValue(false);
227 | } catch (IOException e) {
228 | Log.e(TAG, "cannot turn off LED", e);
229 | }
230 | }
231 | }
232 |
233 | @Override
234 | public void onError(Throwable throwable) {
235 | Log.e(TAG, "assist error: " + throwable.getMessage(), throwable);
236 | }
237 |
238 | @Override
239 | public void onVolumeChanged(int percentage) {
240 | Log.i(TAG, "assistant volume changed: " + percentage);
241 | // Update our shared preferences
242 | Editor editor = PreferenceManager
243 | .getDefaultSharedPreferences(AssistantActivity.this)
244 | .edit();
245 | editor.putInt(PREF_CURRENT_VOLUME, percentage);
246 | editor.apply();
247 | }
248 |
249 | @Override
250 | public void onConversationFinished() {
251 | Log.i(TAG, "assistant conversation finished");
252 | mButtonWidget.setText(R.string.button_new_request);
253 | mButtonWidget.setEnabled(true);
254 | }
255 |
256 | @Override
257 | public void onAssistantResponse(final String response) {
258 | if(!response.isEmpty()) {
259 | mMainHandler.post(new Runnable() {
260 | @Override
261 | public void run() {
262 | mAssistantRequestsAdapter.add("Google Assistant: " + response);
263 | }
264 | });
265 | }
266 | }
267 |
268 | @Override
269 | public void onAssistantDisplayOut(final String html) {
270 | mMainHandler.post(new Runnable() {
271 | @Override
272 | public void run() {
273 | // Need to convert to base64
274 | try {
275 | final byte[] data = html.getBytes("UTF-8");
276 | final String base64String =
277 | Base64.encodeToString(data, Base64.DEFAULT);
278 | mWebView.loadData(base64String, "text/html; charset=utf-8",
279 | "base64");
280 | } catch (UnsupportedEncodingException e) {
281 | e.printStackTrace();
282 | }
283 | }
284 | });
285 | }
286 |
287 | public void onDeviceAction(String intentName, JSONObject parameters) {
288 | if (parameters != null) {
289 | Log.d(TAG, "Get device action " + intentName + " with parameters: " +
290 | parameters.toString());
291 | } else {
292 | Log.d(TAG, "Get device action " + intentName + " with no paramete"
293 | + "rs");
294 | }
295 | if (intentName.equals("action.devices.commands.OnOff")) {
296 | try {
297 | boolean turnOn = parameters.getBoolean("on");
298 | mLed.setValue(turnOn);
299 | } catch (JSONException e) {
300 | Log.e(TAG, "Cannot get value of command", e);
301 | } catch (IOException e) {
302 | Log.e(TAG, "Cannot set value of LED", e);
303 | }
304 | }
305 | }
306 | })
307 | .build();
308 | mEmbeddedAssistant.connect();
309 | }
310 |
311 | private AudioDeviceInfo findAudioDevice(int deviceFlag, int deviceType) {
312 | AudioManager manager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
313 | AudioDeviceInfo[] adis = manager.getDevices(deviceFlag);
314 | for (AudioDeviceInfo adi : adis) {
315 | if (adi.getType() == deviceType) {
316 | return adi;
317 | }
318 | }
319 | return null;
320 | }
321 |
322 | @Override
323 | public void onButtonEvent(Button button, boolean pressed) {
324 | try {
325 | if (mLed != null) {
326 | mLed.setValue(pressed);
327 | }
328 | } catch (IOException e) {
329 | Log.d(TAG, "error toggling LED:", e);
330 | }
331 | if (pressed) {
332 | mEmbeddedAssistant.startConversation();
333 | }
334 | }
335 |
336 | @Override
337 | protected void onDestroy() {
338 | super.onDestroy();
339 | Log.i(TAG, "destroying assistant demo");
340 | if (mLed != null) {
341 | try {
342 | mLed.close();
343 | } catch (IOException e) {
344 | Log.w(TAG, "error closing LED", e);
345 | }
346 | mLed = null;
347 | }
348 | if (mButton != null) {
349 | try {
350 | mButton.close();
351 | } catch (IOException e) {
352 | Log.w(TAG, "error closing button", e);
353 | }
354 | mButton = null;
355 | }
356 | if (mDac != null) {
357 | try {
358 | mDac.close();
359 | } catch (IOException e) {
360 | Log.w(TAG, "error closing voice hat trigger", e);
361 | }
362 | mDac = null;
363 | }
364 | mEmbeddedAssistant.destroy();
365 | }
366 | }
367 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/androidthings/assistant/BoardDefaults.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.androidthings.assistant;
18 |
19 | import android.media.AudioDeviceInfo;
20 | import android.os.Build;
21 |
22 | @SuppressWarnings("WeakerAccess")
23 | public class BoardDefaults {
24 | public static final String DEVICE_RPI3 = "rpi3";
25 | public static final String DEVICE_RPI3BP = "rpi3bp";
26 | public static final String DEVICE_IMX7D_PICO = "imx7d_pico";
27 |
28 | /**
29 | * Return the GPIO pin that the LED is connected on.
30 | * For example, on Intel Edison Arduino breakout, pin "IO13" is connected to an onboard LED
31 | * that turns on when the GPIO pin is HIGH, and off when low.
32 | */
33 | public static String getGPIOForLED() {
34 | switch (Build.DEVICE) {
35 | case DEVICE_RPI3:
36 | case DEVICE_RPI3BP:
37 | return "BCM25";
38 | case DEVICE_IMX7D_PICO:
39 | return "GPIO2_IO02";
40 | default:
41 | throw new IllegalStateException("Unknown Build.DEVICE " + Build.DEVICE);
42 | }
43 | }
44 |
45 | /**
46 | * Return the GPIO pin that the Button is connected on.
47 | */
48 | public static String getGPIOForButton() {
49 | switch (Build.DEVICE) {
50 | case DEVICE_RPI3:
51 | case DEVICE_RPI3BP:
52 | return "BCM23";
53 | case DEVICE_IMX7D_PICO:
54 | return "GPIO6_IO14";
55 | default:
56 | throw new IllegalStateException("Unknown Build.DEVICE " + Build.DEVICE);
57 | }
58 | }
59 |
60 | /**
61 | * Return the GPIO pin for the Voice Hat DAC trigger.
62 | */
63 | public static String getGPIOForDacTrigger() {
64 | switch (Build.DEVICE) {
65 | case DEVICE_RPI3:
66 | case DEVICE_RPI3BP:
67 | return "BCM16";
68 | default:
69 | throw new IllegalStateException("Unknown Build.DEVICE " + Build.DEVICE);
70 | }
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/androidthings/assistant/Credentials.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.androidthings.assistant;
18 |
19 | import android.content.Context;
20 |
21 | import com.google.auth.oauth2.UserCredentials;
22 |
23 | import org.json.JSONException;
24 | import org.json.JSONObject;
25 |
26 | import java.io.IOException;
27 | import java.io.InputStream;
28 |
29 | class Credentials {
30 | static UserCredentials fromResource(Context context, int resourceId)
31 | throws IOException, JSONException {
32 | InputStream is = context.getResources().openRawResource(resourceId);
33 | byte[] bytes = new byte[is.available()];
34 | is.read(bytes);
35 | JSONObject json = new JSONObject(new String(bytes, "UTF-8"));
36 | return new UserCredentials(
37 | json.getString("client_id"),
38 | json.getString("client_secret"),
39 | json.getString("refresh_token")
40 | );
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/androidthings/assistant/EmbeddedAssistant.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.androidthings.assistant;
18 |
19 | import android.content.Context;
20 | import android.media.AudioDeviceInfo;
21 | import android.media.AudioFormat;
22 | import android.media.AudioRecord;
23 | import android.media.AudioTrack;
24 | import android.media.MediaRecorder.AudioSource;
25 | import android.os.Handler;
26 | import android.os.HandlerThread;
27 | import android.support.annotation.IntDef;
28 | import android.support.annotation.Nullable;
29 | import android.util.Log;
30 | import com.google.assistant.embedded.v1alpha2.AssistConfig;
31 | import com.google.assistant.embedded.v1alpha2.AssistRequest;
32 | import com.google.assistant.embedded.v1alpha2.AssistResponse;
33 | import com.google.assistant.embedded.v1alpha2.AssistResponse.EventType;
34 | import com.google.assistant.embedded.v1alpha2.AudioInConfig;
35 | import com.google.assistant.embedded.v1alpha2.AudioOutConfig;
36 | import com.google.assistant.embedded.v1alpha2.DeviceConfig;
37 | import com.google.assistant.embedded.v1alpha2.DeviceLocation;
38 | import com.google.assistant.embedded.v1alpha2.DialogStateIn;
39 | import com.google.assistant.embedded.v1alpha2.DialogStateOut.MicrophoneMode;
40 | import com.google.assistant.embedded.v1alpha2.EmbeddedAssistantGrpc;
41 | import com.google.assistant.embedded.v1alpha2.ScreenOutConfig;
42 | import com.google.assistant.embedded.v1alpha2.SpeechRecognitionResult;
43 | import com.google.auth.oauth2.UserCredentials;
44 | import com.google.protobuf.ByteString;
45 | import io.grpc.ManagedChannel;
46 | import io.grpc.ManagedChannelBuilder;
47 | import io.grpc.auth.MoreCallCredentials;
48 | import io.grpc.stub.StreamObserver;
49 | import java.io.IOException;
50 | import java.lang.annotation.Retention;
51 | import java.lang.annotation.RetentionPolicy;
52 | import java.nio.ByteBuffer;
53 | import java.util.ArrayList;
54 | import java.util.List;
55 | import org.json.JSONArray;
56 | import org.json.JSONException;
57 | import org.json.JSONObject;
58 |
59 | public class EmbeddedAssistant {
60 | private static final String TAG = EmbeddedAssistant.class.getSimpleName();
61 | private static final boolean DEBUG = false;
62 |
63 | private static final String ASSISTANT_API_ENDPOINT = "embeddedassistant.googleapis.com";
64 | private static final int AUDIO_RECORD_BLOCK_SIZE = 1024;
65 |
66 | // Device Actions
67 | private DeviceConfig mDeviceConfig;
68 |
69 | // Callbacks
70 | private Handler mRequestHandler;
71 | private RequestCallback mRequestCallback;
72 | private Handler mConversationHandler;
73 | private ConversationCallback mConversationCallback;
74 |
75 | // Assistant Thread and Runnables implementing the push-to-talk functionality.
76 | private ByteString mConversationState;
77 | private String mLanguageCode = "en-US";
78 | private AudioRecord mAudioRecord;
79 | private DeviceLocation mDeviceLocation;
80 | private AudioInConfig mAudioInConfig;
81 | private AudioOutConfig mAudioOutConfig;
82 | private AudioDeviceInfo mAudioInputDevice;
83 | private AudioDeviceInfo mAudioOutputDevice;
84 | private AudioFormat mAudioInputFormat;
85 | private AudioFormat mAudioOutputFormat;
86 | private int mAudioInputBufferSize;
87 | private int mAudioOutputBufferSize;
88 | private int mVolume = 100; // Default to maximum volume.
89 | private ScreenOutConfig mScreenOutConfig;
90 |
91 | private MicrophoneMode mMicrophoneMode;
92 | private HandlerThread mAssistantThread;
93 | private Handler mAssistantHandler;
94 | private ArrayList mAssistantResponses = new ArrayList<>();
95 |
96 | // gRPC client and stream observers.
97 | private int mAudioOutSize; // Tracks the size of audio responses to determine when it ends.
98 | private EmbeddedAssistantGrpc.EmbeddedAssistantStub mAssistantService;
99 | private StreamObserver mAssistantRequestObserver;
100 | private StreamObserver mAssistantResponseObserver =
101 | new StreamObserver() {
102 | @Override
103 | public void onNext(final AssistResponse value) {
104 | if (DEBUG) {
105 | Log.d(TAG, "Received response: " + value.toString());
106 | }
107 | if (value.getDeviceAction() != null &&
108 | !value.getDeviceAction().getDeviceRequestJson().isEmpty()) {
109 | // Iterate through JSON object
110 | try {
111 | JSONObject deviceAction = new JSONObject(value.getDeviceAction()
112 | .getDeviceRequestJson());
113 | JSONArray inputs = deviceAction.getJSONArray("inputs");
114 | for (int i = 0; i < inputs.length(); i++) {
115 | if (inputs.getJSONObject(i).getString("intent").equals(
116 | "action.devices.EXECUTE")) {
117 | JSONArray commands = inputs.getJSONObject(i)
118 | .getJSONObject("payload")
119 | .getJSONArray("commands");
120 | for (int j = 0; j < commands.length(); j++) {
121 | final JSONArray execution = commands.getJSONObject(j)
122 | .getJSONArray("execution");
123 | for (int k = 0; k < execution.length(); k++) {
124 | final int finalK = k;
125 | mConversationHandler.post(new Runnable() {
126 | @Override
127 | public void run() {
128 | try {
129 | mConversationCallback.onDeviceAction(
130 | execution
131 | .getJSONObject(finalK)
132 | .getString("command"),
133 | execution.getJSONObject(finalK)
134 | .optJSONObject("params"));
135 | } catch (JSONException e) {
136 | e.printStackTrace();
137 | }
138 | }
139 | });
140 |
141 | }
142 | }
143 | }
144 | }
145 | } catch (JSONException e) {
146 | e.printStackTrace();
147 | }
148 | }
149 | if (value.getEventType() == EventType.END_OF_UTTERANCE) {
150 | mRequestHandler.post(new Runnable() {
151 | @Override
152 | public void run() {
153 | mRequestCallback.onRequestFinish();
154 | }
155 | });
156 | mConversationHandler.post(new Runnable() {
157 | @Override
158 | public void run() {
159 | mConversationCallback.onResponseStarted();
160 | }
161 | });
162 | }
163 | if (value.getDialogStateOut() != null) {
164 | mConversationState = value.getDialogStateOut().getConversationState();
165 | if (value.getDialogStateOut().getVolumePercentage() != 0) {
166 | final int volumePercentage = value.getDialogStateOut().getVolumePercentage();
167 | mVolume = volumePercentage;
168 | mConversationHandler.post(new Runnable() {
169 | @Override
170 | public void run() {
171 | mConversationCallback.onVolumeChanged(volumePercentage);
172 | }
173 | });
174 | }
175 | mRequestHandler.post(new Runnable() {
176 | @Override
177 | public void run() {
178 | mRequestCallback.onSpeechRecognition(value.getSpeechResultsList());
179 | }
180 | });
181 | mMicrophoneMode = value.getDialogStateOut().getMicrophoneMode();
182 | mConversationCallback.onAssistantResponse(value.getDialogStateOut()
183 | .getSupplementalDisplayText());
184 | }
185 | if (value.getAudioOut() != null) {
186 | if (mAudioOutSize <= value.getAudioOut().getSerializedSize()) {
187 | mAudioOutSize = value.getAudioOut().getSerializedSize();
188 | } else {
189 | mAudioOutSize = 0;
190 | onCompleted();
191 | }
192 | final ByteBuffer audioData =
193 | ByteBuffer.wrap(value.getAudioOut().getAudioData().toByteArray());
194 | mAssistantResponses.add(audioData);
195 | mConversationHandler.post(new Runnable() {
196 | @Override
197 | public void run() {
198 | mConversationCallback.onAudioSample(audioData);
199 | }
200 | });
201 | }
202 | if (value.hasScreenOut()) {
203 | mConversationHandler.post(new Runnable() {
204 | @Override
205 | public void run() {
206 | mConversationCallback.onAssistantDisplayOut(
207 | value.getScreenOut().getData().toStringUtf8());
208 | }
209 | });
210 | }
211 | }
212 |
213 | @Override
214 | public void onError(final Throwable t) {
215 | mConversationHandler.post(new Runnable() {
216 | @Override
217 | public void run() {
218 | mConversationCallback.onError(t);
219 | }
220 | });
221 | }
222 |
223 | @Override
224 | public void onCompleted() {
225 | // create a new AudioTrack to workaround audio routing issues.
226 | AudioTrack audioTrack = new AudioTrack.Builder()
227 | .setAudioFormat(mAudioOutputFormat)
228 | .setBufferSizeInBytes(mAudioOutputBufferSize)
229 | .setTransferMode(AudioTrack.MODE_STREAM)
230 | .build();
231 | if (mAudioOutputDevice != null) {
232 | audioTrack.setPreferredDevice(mAudioOutputDevice);
233 | }
234 | audioTrack.setVolume(AudioTrack.getMaxVolume() * mVolume / 100.0f);
235 | audioTrack.play();
236 | mConversationHandler.post(new Runnable() {
237 | @Override
238 | public void run() {
239 | mConversationCallback.onResponseStarted();
240 | }
241 | });
242 | for (ByteBuffer audioData : mAssistantResponses) {
243 | final ByteBuffer buf = audioData;
244 | mConversationHandler.post(new Runnable() {
245 | @Override
246 | public void run() {
247 | mConversationCallback.onAudioSample(buf);
248 | }
249 | });
250 | audioTrack.write(buf, buf.remaining(),
251 | AudioTrack.WRITE_BLOCKING);
252 | }
253 | mAssistantResponses.clear();
254 | audioTrack.stop();
255 | audioTrack.release();
256 |
257 | mConversationHandler.post(new Runnable() {
258 | @Override
259 | public void run() {
260 | mConversationCallback.onResponseFinished();
261 | }
262 | });
263 | if (mMicrophoneMode == MicrophoneMode.DIALOG_FOLLOW_ON) {
264 | // Automatically start a new request
265 | startConversation();
266 | } else {
267 | // The conversation is done
268 | mConversationHandler.post(new Runnable() {
269 | @Override
270 | public void run() {
271 | mConversationCallback.onConversationFinished();
272 | }
273 | });
274 | }
275 | }
276 | };
277 |
278 | private Runnable mStreamAssistantRequest = new Runnable() {
279 | @Override
280 | public void run() {
281 | ByteBuffer audioData = ByteBuffer.allocateDirect(AUDIO_RECORD_BLOCK_SIZE);
282 | int result = mAudioRecord.read(audioData, audioData.capacity(),
283 | AudioRecord.READ_BLOCKING);
284 | if (result < 0) {
285 | return;
286 | }
287 | mRequestHandler.post(new Runnable() {
288 | @Override
289 | public void run() {
290 | mRequestCallback.onAudioRecording();
291 | }
292 | });
293 | mAssistantRequestObserver.onNext(AssistRequest.newBuilder()
294 | .setAudioIn(ByteString.copyFrom(audioData))
295 | .build());
296 | mAssistantHandler.post(mStreamAssistantRequest);
297 | }
298 | };
299 |
300 | private UserCredentials mUserCredentials;
301 |
302 | private EmbeddedAssistant() {}
303 |
304 | /**
305 | * Initializes the Assistant.
306 | */
307 | public void connect() {
308 | mAssistantThread = new HandlerThread("assistantThread");
309 | mAssistantThread.start();
310 | mAssistantHandler = new Handler(mAssistantThread.getLooper());
311 |
312 | ManagedChannel channel = ManagedChannelBuilder.forTarget(ASSISTANT_API_ENDPOINT).build();
313 | mAssistantService = EmbeddedAssistantGrpc.newStub(channel)
314 | .withCallCredentials(MoreCallCredentials.from(mUserCredentials));
315 | }
316 |
317 | /**
318 | * Starts a request to the Assistant.
319 | */
320 | public void startConversation() {
321 | mAudioRecord.startRecording();
322 | mRequestHandler.post(new Runnable() {
323 | @Override
324 | public void run() {
325 | mRequestCallback.onRequestStart();
326 | }
327 | });
328 | mAssistantHandler.post(new Runnable() {
329 | @Override
330 | public void run() {
331 | mAssistantRequestObserver = mAssistantService.assist(mAssistantResponseObserver);
332 | AssistConfig.Builder assistConfigBuilder = AssistConfig.newBuilder()
333 | .setAudioInConfig(mAudioInConfig)
334 | .setAudioOutConfig(mAudioOutConfig)
335 | .setDeviceConfig(mDeviceConfig);
336 | if (mScreenOutConfig != null) {
337 | assistConfigBuilder.setScreenOutConfig(mScreenOutConfig);
338 | }
339 | DialogStateIn.Builder dialogStateInBuilder = DialogStateIn.newBuilder();
340 | if (mConversationState != null) {
341 | dialogStateInBuilder.setConversationState(mConversationState);
342 | }
343 | if (mDeviceLocation != null) {
344 | dialogStateInBuilder.setDeviceLocation(mDeviceLocation);
345 | }
346 | dialogStateInBuilder.setLanguageCode(mLanguageCode);
347 | assistConfigBuilder.setDialogStateIn(dialogStateInBuilder.build());
348 | mAssistantRequestObserver.onNext(
349 | AssistRequest.newBuilder()
350 | .setConfig(assistConfigBuilder.build())
351 | .build());
352 | }
353 | });
354 | mAssistantHandler.post(mStreamAssistantRequest);
355 | }
356 |
357 | public void startConversation(final String inputQuery) {
358 | mRequestCallback.onRequestStart();
359 | mAssistantHandler.post(new Runnable() {
360 | @Override
361 | public void run() {
362 | mAssistantRequestObserver = mAssistantService.assist(mAssistantResponseObserver);
363 | AssistConfig.Builder assistConfigBuilder = AssistConfig.newBuilder()
364 | .setTextQuery(inputQuery)
365 | .setAudioOutConfig(mAudioOutConfig)
366 | .setDeviceConfig(mDeviceConfig)
367 | .setScreenOutConfig(mScreenOutConfig);
368 | DialogStateIn.Builder dialogStateInBuilder = DialogStateIn.newBuilder();
369 | if (mConversationState != null) {
370 | dialogStateInBuilder.setConversationState(mConversationState);
371 | }
372 | if (mDeviceLocation != null) {
373 | dialogStateInBuilder.setDeviceLocation(mDeviceLocation);
374 | }
375 | dialogStateInBuilder.setLanguageCode(mLanguageCode);
376 | assistConfigBuilder.setDialogStateIn(dialogStateInBuilder.build());
377 | mAssistantRequestObserver.onNext(
378 | AssistRequest.newBuilder()
379 | .setConfig(assistConfigBuilder.build())
380 | .build());
381 | }
382 | });
383 | }
384 |
385 | /**
386 | * Manually ends a conversation with the Assistant.
387 | */
388 | public void stopConversation() {
389 | mAssistantHandler.post(new Runnable() {
390 | @Override
391 | public void run() {
392 | mAssistantHandler.removeCallbacks(mStreamAssistantRequest);
393 | if (mAssistantRequestObserver != null) {
394 | mAssistantRequestObserver.onCompleted();
395 | mAssistantRequestObserver = null;
396 | }
397 | }
398 | });
399 |
400 | mAudioRecord.stop();
401 | mConversationHandler.post(new Runnable() {
402 | @Override
403 | public void run() {
404 | mConversationCallback.onConversationFinished();
405 | }
406 | });
407 | }
408 |
409 | @Retention(RetentionPolicy.SOURCE)
410 | @IntDef({TEXT, HTML})
411 | public @interface ResponseFormat {}
412 | public static final int TEXT = 0;
413 | public static final int HTML = 1;
414 |
415 | /**
416 | * Set desired assistant response format.
417 | */
418 | public void setResponseFormat(@ResponseFormat int format) {
419 | mScreenOutConfig = ScreenOutConfig.newBuilder()
420 | .setScreenMode(format == HTML
421 | ? ScreenOutConfig.ScreenMode.PLAYING
422 | : ScreenOutConfig.ScreenMode.SCREEN_MODE_UNSPECIFIED)
423 | .build();
424 | }
425 |
426 | /**
427 | * Removes callbacks and exists the Assistant service. This should be called when an activity is
428 | * closing to safely quit the Assistant service.
429 | */
430 | public void destroy() {
431 | mAssistantHandler.post(new Runnable() {
432 | @Override
433 | public void run() {
434 | mAssistantHandler.removeCallbacks(mStreamAssistantRequest);
435 | }
436 | });
437 | mAssistantThread.quitSafely();
438 | if (mAudioRecord != null) {
439 | mAudioRecord.stop();
440 | mAudioRecord = null;
441 | }
442 | }
443 |
444 | /**
445 | * Generates access tokens for the Assistant based on a credentials JSON file.
446 | *
447 | * @param context Application context
448 | * @param resourceId The resource that contains the project credentials
449 | *
450 | * @return A {@link UserCredentials} object which can be used by the Assistant.
451 | * @throws IOException If the resource does not exist.
452 | * @throws JSONException If the resource is incorrectly formatted.
453 | */
454 | public static UserCredentials generateCredentials(Context context, int resourceId)
455 | throws IOException, JSONException {
456 | return Credentials.fromResource(context, resourceId);
457 | }
458 |
459 | /**
460 | * Used to build an AssistantManager object.
461 | */
462 | public static class Builder {
463 | private EmbeddedAssistant mEmbeddedAssistant;
464 | private int mSampleRate;
465 | private String mDeviceModelId;
466 | private String mDeviceInstanceId;
467 |
468 | /**
469 | * Creates a Builder.
470 | */
471 | public Builder() {
472 | mEmbeddedAssistant = new EmbeddedAssistant();
473 | }
474 |
475 | /**
476 | * Sets a preferred {@link AudioDeviceInfo} device for input.
477 | *
478 | * @param device The preferred audio device to acquire audio from.
479 | * @return Returns this builder to allow for chaining.
480 | */
481 | public Builder setAudioInputDevice(AudioDeviceInfo device) {
482 | mEmbeddedAssistant.mAudioInputDevice = device;
483 | return this;
484 | }
485 |
486 | /**
487 | * Sets a preferred {@link AudioDeviceInfo} device for output.
488 | *
489 | * param device The preferred audio device to route audio to.
490 | * @return Returns this builder to allow for chaining.
491 | */
492 | public Builder setAudioOutputDevice(AudioDeviceInfo device) {
493 | mEmbeddedAssistant.mAudioOutputDevice = device;
494 | return this;
495 | }
496 |
497 | /**
498 | * Sets a {@link RequestCallback}, which is when a request is being made to the Assistant.
499 | *
500 | * @param requestCallback The methods that will run during a request.
501 | * @return Returns this builder to allow for chaining.
502 | */
503 | public Builder setRequestCallback(RequestCallback requestCallback) {
504 | setRequestCallback(requestCallback, null);
505 | return this;
506 | }
507 |
508 | /**
509 | * Sets a {@link RequestCallback}, which is when a request is being made to the Assistant.
510 | *
511 | * @param requestCallback The methods that will run during a request.
512 | * @param requestHandler Handler used to dispatch the callback.
513 | * @return Returns this builder to allow for chaining.
514 | */
515 | public Builder setRequestCallback(RequestCallback requestCallback,
516 | @Nullable Handler requestHandler) {
517 | if (requestHandler == null) {
518 | requestHandler = new Handler();
519 | }
520 | mEmbeddedAssistant.mRequestCallback = requestCallback;
521 | mEmbeddedAssistant.mRequestHandler = requestHandler;
522 | return this;
523 | }
524 |
525 | /**
526 | * Sets a {@link ConversationCallback}, which is when a response is being given from the
527 | * Assistant.
528 | *
529 | * @param responseCallback The methods that will run during a response.
530 | * @return Returns this builder to allow for chaining.
531 | */
532 | public Builder setConversationCallback(ConversationCallback responseCallback) {
533 | setConversationCallback(responseCallback, null);
534 | return this;
535 | }
536 |
537 | /**
538 | * Sets a {@link ConversationCallback}, which is when a response is being given from the
539 | * Assistant.
540 | *
541 | * @param responseCallback The methods that will run during a response.
542 | * @param responseHandler Handler used to dispatch the callback.
543 | * @return Returns this builder to allow for chaining.
544 | */
545 | public Builder setConversationCallback(ConversationCallback responseCallback,
546 | @Nullable Handler responseHandler) {
547 | if (responseHandler == null) {
548 | responseHandler = new Handler();
549 | }
550 | mEmbeddedAssistant.mConversationCallback = responseCallback;
551 | mEmbeddedAssistant.mConversationHandler = responseHandler;
552 | return this;
553 | }
554 |
555 | /**
556 | * Sets the credentials for the user.
557 | *
558 | * @param userCredentials Credentials generated by
559 | * {@link EmbeddedAssistant#generateCredentials(Context, int)}.
560 | * @return Returns this builder to allow for chaining.
561 | */
562 | public Builder setCredentials(UserCredentials userCredentials) {
563 | mEmbeddedAssistant.mUserCredentials = userCredentials;
564 | return this;
565 | }
566 |
567 | /**
568 | * Sets the audio sampling rate for input and output streams
569 | *
570 | * @param sampleRate The audio sample rate
571 | * @return Returns this builder to allow for chaining.
572 | */
573 | public Builder setAudioSampleRate(int sampleRate) {
574 | mSampleRate = sampleRate;
575 | return this;
576 | }
577 |
578 | /**
579 | * Sets the volume for the Assistant response
580 | *
581 | * @param volume The audio volume in the range 0 - 100.
582 | * @return Returns this builder to allow for chaining.
583 | */
584 | public Builder setAudioVolume(int volume) {
585 | mEmbeddedAssistant.mVolume = volume;
586 | return this;
587 | }
588 |
589 | /**
590 | * Sets the model id for each Assistant request.
591 | *
592 | * @param deviceModelId The device model id.
593 | * @return Returns this builder to allow for chaining.
594 | */
595 | public Builder setDeviceModelId(String deviceModelId) {
596 | mDeviceModelId = deviceModelId;
597 | return this;
598 | }
599 |
600 | /**
601 | * Sets the instance id for each Assistant request.
602 | *
603 | * @param deviceInstanceId The device instance id.
604 | * @return Returns this builder to allow for chaining.
605 | */
606 | public Builder setDeviceInstanceId(String deviceInstanceId) {
607 | mDeviceInstanceId = deviceInstanceId;
608 | return this;
609 | }
610 |
611 | /**
612 | * Sets language code of the request using IETF BCP 47 syntax.
613 | * See for the documentation.
614 | * For example: "en-US".
615 | *
616 | * @param languageCode Code for the language. Only Assistant-supported languages are valid.
617 | * @return Returns this builder to allow for chaining.
618 | */
619 | public Builder setLanguageCode(String languageCode) {
620 | mEmbeddedAssistant.mLanguageCode = languageCode;
621 | return this;
622 | }
623 |
624 | public Builder setDeviceLocation(DeviceLocation deviceLocation) {
625 | mEmbeddedAssistant.mDeviceLocation = deviceLocation;
626 | return this;
627 | }
628 |
629 | /**
630 | * Returns an AssistantManager if all required parameters have been supplied.
631 | *
632 | * @return An inactive AssistantManager. Call {@link EmbeddedAssistant#connect()} to start
633 | * it.
634 | */
635 | public EmbeddedAssistant build() {
636 | if (mEmbeddedAssistant.mRequestCallback == null) {
637 | throw new NullPointerException("There must be a defined RequestCallback");
638 | }
639 | if (mEmbeddedAssistant.mConversationCallback == null) {
640 | throw new NullPointerException("There must be a defined ConversationCallback");
641 | }
642 | if (mEmbeddedAssistant.mUserCredentials == null) {
643 | throw new NullPointerException("There must be provided credentials");
644 | }
645 | if (mSampleRate == 0) {
646 | throw new NullPointerException("There must be a defined sample rate");
647 | }
648 | final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
649 |
650 | // Construct audio configurations.
651 | mEmbeddedAssistant.mAudioInConfig = AudioInConfig.newBuilder()
652 | .setEncoding(AudioInConfig.Encoding.LINEAR16)
653 | .setSampleRateHertz(mSampleRate)
654 | .build();
655 | mEmbeddedAssistant.mAudioOutConfig = AudioOutConfig.newBuilder()
656 | .setEncoding(AudioOutConfig.Encoding.LINEAR16)
657 | .setSampleRateHertz(mSampleRate)
658 | .setVolumePercentage(mEmbeddedAssistant.mVolume)
659 | .build();
660 |
661 | // Initialize Audio framework parameters.
662 | mEmbeddedAssistant.mAudioInputFormat = new AudioFormat.Builder()
663 | .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
664 | .setEncoding(audioEncoding)
665 | .setSampleRate(mSampleRate)
666 | .build();
667 | mEmbeddedAssistant.mAudioInputBufferSize = AudioRecord.getMinBufferSize(
668 | mEmbeddedAssistant.mAudioInputFormat.getSampleRate(),
669 | mEmbeddedAssistant.mAudioInputFormat.getChannelMask(),
670 | mEmbeddedAssistant.mAudioInputFormat.getEncoding());
671 | mEmbeddedAssistant.mAudioOutputFormat = new AudioFormat.Builder()
672 | .setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
673 | .setEncoding(audioEncoding)
674 | .setSampleRate(mSampleRate)
675 | .build();
676 | mEmbeddedAssistant.mAudioOutputBufferSize = AudioTrack.getMinBufferSize(
677 | mEmbeddedAssistant.mAudioOutputFormat.getSampleRate(),
678 | mEmbeddedAssistant.mAudioOutputFormat.getChannelMask(),
679 | mEmbeddedAssistant.mAudioOutputFormat.getEncoding());
680 |
681 | // create new AudioRecord to workaround audio routing issues.
682 | mEmbeddedAssistant.mAudioRecord = new AudioRecord.Builder()
683 | .setAudioSource(AudioSource.VOICE_RECOGNITION)
684 | .setAudioFormat(mEmbeddedAssistant.mAudioInputFormat)
685 | .setBufferSizeInBytes(mEmbeddedAssistant.mAudioInputBufferSize)
686 | .build();
687 | if (mEmbeddedAssistant.mAudioInputDevice != null) {
688 | boolean result = mEmbeddedAssistant.mAudioRecord.setPreferredDevice(
689 | mEmbeddedAssistant.mAudioInputDevice);
690 | if (!result) {
691 | Log.e(TAG, "failed to set preferred input device");
692 | }
693 | }
694 |
695 | // Construct DeviceConfig
696 | mEmbeddedAssistant.mDeviceConfig = DeviceConfig.newBuilder()
697 | .setDeviceId(mDeviceInstanceId)
698 | .setDeviceModelId(mDeviceModelId)
699 | .build();
700 |
701 | // Construct default ScreenOutConfig
702 | mEmbeddedAssistant.mScreenOutConfig = ScreenOutConfig.newBuilder()
703 | .setScreenMode(ScreenOutConfig.ScreenMode.SCREEN_MODE_UNSPECIFIED)
704 | .build();
705 |
706 | return mEmbeddedAssistant;
707 | }
708 | }
709 |
710 | /**
711 | * Callback for methods during a request to the Assistant.
712 | */
713 | public static abstract class RequestCallback {
714 |
715 | /**
716 | * Called when a request is first made.
717 | */
718 | public void onRequestStart() {}
719 |
720 | /**
721 | * Called when a request has completed.
722 | */
723 | public void onRequestFinish() {}
724 |
725 | /**
726 | * Called when audio is being recording. This may be called multiple times during a single
727 | * request.
728 | */
729 | public void onAudioRecording() {}
730 |
731 | /**
732 | * Called when the request is complete and the Assistant returns the user's speech-to-text.
733 | */
734 | public void onSpeechRecognition(List results) {}
735 | }
736 |
737 | /**
738 | * Callback for methods during a conversation from the Assistant.
739 | */
740 | public static abstract class ConversationCallback {
741 |
742 | /**
743 | * Called when the user's voice query ends and the response from the Assistant is about to
744 | * start a response.
745 | */
746 | public void onResponseStarted() {}
747 |
748 | /**
749 | * Called when the Assistant's response is complete.
750 | */
751 | public void onResponseFinished() {}
752 |
753 | /**
754 | * Called when audio is being played. This may be called multiple times during a single
755 | * response. The audio will play using the AudioTrack, although this method may be used
756 | * to provide auxiliary effects.
757 | *
758 | * @param audioSample The raw audio sample from the Assistant
759 | */
760 | public void onAudioSample(ByteBuffer audioSample) {}
761 |
762 | /**
763 | * Called when an error occurs during the response
764 | *
765 | * @param throwable A {@link Throwable} which contains information about the response error.
766 | */
767 | public void onError(Throwable throwable) {}
768 |
769 | /**
770 | * Called when the user requests to change the Assistant's volume.
771 | *
772 | * @param percentage The desired volume as a percentage of intensity, in the range 0 - 100.
773 | */
774 | public void onVolumeChanged(int percentage) {}
775 |
776 | /**
777 | * Called when the response contains a DeviceAction.
778 | *
779 | * @param intentName The name of the intent to execute.
780 | * @param parameters A JSONObject containing parameters related to this intent.
781 | */
782 | public void onDeviceAction(String intentName, JSONObject parameters) {}
783 |
784 | /**
785 | * Called when the response contains supplemental display text from the Assistant.
786 | *
787 | * @param response Supplemental display text.
788 | */
789 | public void onAssistantResponse(String response) {}
790 |
791 | /**
792 | * Called when the response contains HTML output from the Assistant.
793 | *
794 | * @param html HTML data showing a rich response
795 | */
796 | public void onAssistantDisplayOut(String html) {}
797 |
798 | /**
799 | * Called when the entire conversation is finished.
800 | */
801 | public void onConversationFinished() {}
802 | }
803 | }
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
29 |
35 |
41 |
47 |
48 |
49 |
54 |
59 |
60 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 | Google Assistant Service Sample
19 | New Request
20 | Retry
21 | Listening
22 |
23 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | buildscript {
18 | repositories {
19 | google()
20 | jcenter()
21 | }
22 | dependencies {
23 | classpath 'com.android.tools.build:gradle:3.0.1'
24 | classpath "com.google.protobuf:protobuf-gradle-plugin:0.8.2"
25 |
26 | // NOTE: Do not place your application dependencies here; they belong
27 | // in the individual module build.gradle files
28 | }
29 | }
30 |
31 | allprojects {
32 | repositories {
33 | google()
34 | jcenter()
35 | }
36 | }
37 |
38 | task clean(type: Delete) {
39 | delete rootProject.buildDir
40 | }
41 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-googleassistant/b65f4b06f500cd41222b85933cd84e9b832afc91/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Mar 29 02:45:55 PDT 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/grpc/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/grpc/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 | apply plugin: 'com.google.protobuf'
3 |
4 | android {
5 | compileSdkVersion 27
6 |
7 | defaultConfig {
8 | minSdkVersion 27
9 | targetSdkVersion 27
10 | versionCode 1
11 | versionName "1.0"
12 |
13 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
14 |
15 | }
16 | buildTypes {
17 | release {
18 | minifyEnabled false
19 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
20 | }
21 | }
22 | }
23 |
24 | protobuf {
25 | protoc {
26 | artifact = 'com.google.protobuf:protoc:3.2.0'
27 | }
28 | plugins {
29 | javalite {
30 | artifact = "com.google.protobuf:protoc-gen-javalite:3.0.0"
31 | }
32 | grpc {
33 | artifact = 'io.grpc:protoc-gen-grpc-java:1.2.0'
34 | }
35 | }
36 | generateProtoTasks {
37 | all().each { task ->
38 | task.plugins {
39 | javalite {}
40 | grpc {
41 | // Options added to --grpc_out
42 | option 'lite'
43 | }
44 | }
45 | }
46 | }
47 | }
48 |
49 | ext.protobufVersion = "1.9.0"
50 |
51 | dependencies {
52 | api "io.grpc:grpc-okhttp:$protobufVersion"
53 | api "io.grpc:grpc-protobuf-lite:$protobufVersion"
54 | api "io.grpc:grpc-stub:$protobufVersion"
55 | api "io.grpc:grpc-auth:$protobufVersion"
56 |
57 | implementation 'javax.annotation:javax.annotation-api:1.2'
58 | }
59 |
--------------------------------------------------------------------------------
/grpc/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /usr/local/google/home/fleker/Android/Sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
19 | # Uncomment this to preserve the line number information for
20 | # debugging stack traces.
21 | #-keepattributes SourceFile,LineNumberTable
22 |
23 | # If you keep the line number information, uncomment this to
24 | # hide the original source file name.
25 | #-renamesourcefileattribute SourceFile
26 |
--------------------------------------------------------------------------------
/grpc/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/api/annotations.proto:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2015, Google Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | syntax = "proto3";
16 |
17 | package google.api;
18 |
19 | import "google/api/http.proto";
20 | import "google/protobuf/descriptor.proto";
21 |
22 | option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
23 | option java_multiple_files = true;
24 | option java_outer_classname = "AnnotationsProto";
25 | option java_package = "com.google.api";
26 | option objc_class_prefix = "GAPI";
27 |
28 | extend google.protobuf.MethodOptions {
29 | // See `HttpRule`.
30 | HttpRule http = 72295728;
31 | }
32 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/api/http.proto:
--------------------------------------------------------------------------------
1 | // Copyright 2016 Google Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | syntax = "proto3";
16 |
17 | package google.api;
18 |
19 | option cc_enable_arenas = true;
20 | option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
21 | option java_multiple_files = true;
22 | option java_outer_classname = "HttpProto";
23 | option java_package = "com.google.api";
24 | option objc_class_prefix = "GAPI";
25 |
26 |
27 | // Defines the HTTP configuration for a service. It contains a list of
28 | // [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method
29 | // to one or more HTTP REST API methods.
30 | message Http {
31 | // A list of HTTP configuration rules that apply to individual API methods.
32 | //
33 | // **NOTE:** All service configuration rules follow "last one wins" order.
34 | repeated HttpRule rules = 1;
35 | }
36 |
37 | // `HttpRule` defines the mapping of an RPC method to one or more HTTP
38 | // REST APIs. The mapping determines what portions of the request
39 | // message are populated from the path, query parameters, or body of
40 | // the HTTP request. The mapping is typically specified as an
41 | // `google.api.http` annotation, see "google/api/annotations.proto"
42 | // for details.
43 | //
44 | // The mapping consists of a field specifying the path template and
45 | // method kind. The path template can refer to fields in the request
46 | // message, as in the example below which describes a REST GET
47 | // operation on a resource collection of messages:
48 | //
49 | //
50 | // service Messaging {
51 | // rpc GetMessage(GetMessageRequest) returns (Message) {
52 | // option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}";
53 | // }
54 | // }
55 | // message GetMessageRequest {
56 | // message SubMessage {
57 | // string subfield = 1;
58 | // }
59 | // string message_id = 1; // mapped to the URL
60 | // SubMessage sub = 2; // `sub.subfield` is url-mapped
61 | // }
62 | // message Message {
63 | // string text = 1; // content of the resource
64 | // }
65 | //
66 | // The same http annotation can alternatively be expressed inside the
67 | // `GRPC API Configuration` YAML file.
68 | //
69 | // http:
70 | // rules:
71 | // - selector: .Messaging.GetMessage
72 | // get: /v1/messages/{message_id}/{sub.subfield}
73 | //
74 | // This definition enables an automatic, bidrectional mapping of HTTP
75 | // JSON to RPC. Example:
76 | //
77 | // HTTP | RPC
78 | // -----|-----
79 | // `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))`
80 | //
81 | // In general, not only fields but also field paths can be referenced
82 | // from a path pattern. Fields mapped to the path pattern cannot be
83 | // repeated and must have a primitive (non-message) type.
84 | //
85 | // Any fields in the request message which are not bound by the path
86 | // pattern automatically become (optional) HTTP query
87 | // parameters. Assume the following definition of the request message:
88 | //
89 | //
90 | // message GetMessageRequest {
91 | // message SubMessage {
92 | // string subfield = 1;
93 | // }
94 | // string message_id = 1; // mapped to the URL
95 | // int64 revision = 2; // becomes a parameter
96 | // SubMessage sub = 3; // `sub.subfield` becomes a parameter
97 | // }
98 | //
99 | //
100 | // This enables a HTTP JSON to RPC mapping as below:
101 | //
102 | // HTTP | RPC
103 | // -----|-----
104 | // `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))`
105 | //
106 | // Note that fields which are mapped to HTTP parameters must have a
107 | // primitive type or a repeated primitive type. Message types are not
108 | // allowed. In the case of a repeated type, the parameter can be
109 | // repeated in the URL, as in `...?param=A¶m=B`.
110 | //
111 | // For HTTP method kinds which allow a request body, the `body` field
112 | // specifies the mapping. Consider a REST update method on the
113 | // message resource collection:
114 | //
115 | //
116 | // service Messaging {
117 | // rpc UpdateMessage(UpdateMessageRequest) returns (Message) {
118 | // option (google.api.http) = {
119 | // put: "/v1/messages/{message_id}"
120 | // body: "message"
121 | // };
122 | // }
123 | // }
124 | // message UpdateMessageRequest {
125 | // string message_id = 1; // mapped to the URL
126 | // Message message = 2; // mapped to the body
127 | // }
128 | //
129 | //
130 | // The following HTTP JSON to RPC mapping is enabled, where the
131 | // representation of the JSON in the request body is determined by
132 | // protos JSON encoding:
133 | //
134 | // HTTP | RPC
135 | // -----|-----
136 | // `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })`
137 | //
138 | // The special name `*` can be used in the body mapping to define that
139 | // every field not bound by the path template should be mapped to the
140 | // request body. This enables the following alternative definition of
141 | // the update method:
142 | //
143 | // service Messaging {
144 | // rpc UpdateMessage(Message) returns (Message) {
145 | // option (google.api.http) = {
146 | // put: "/v1/messages/{message_id}"
147 | // body: "*"
148 | // };
149 | // }
150 | // }
151 | // message Message {
152 | // string message_id = 1;
153 | // string text = 2;
154 | // }
155 | //
156 | //
157 | // The following HTTP JSON to RPC mapping is enabled:
158 | //
159 | // HTTP | RPC
160 | // -----|-----
161 | // `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")`
162 | //
163 | // Note that when using `*` in the body mapping, it is not possible to
164 | // have HTTP parameters, as all fields not bound by the path end in
165 | // the body. This makes this option more rarely used in practice of
166 | // defining REST APIs. The common usage of `*` is in custom methods
167 | // which don't use the URL at all for transferring data.
168 | //
169 | // It is possible to define multiple HTTP methods for one RPC by using
170 | // the `additional_bindings` option. Example:
171 | //
172 | // service Messaging {
173 | // rpc GetMessage(GetMessageRequest) returns (Message) {
174 | // option (google.api.http) = {
175 | // get: "/v1/messages/{message_id}"
176 | // additional_bindings {
177 | // get: "/v1/users/{user_id}/messages/{message_id}"
178 | // }
179 | // };
180 | // }
181 | // }
182 | // message GetMessageRequest {
183 | // string message_id = 1;
184 | // string user_id = 2;
185 | // }
186 | //
187 | //
188 | // This enables the following two alternative HTTP JSON to RPC
189 | // mappings:
190 | //
191 | // HTTP | RPC
192 | // -----|-----
193 | // `GET /v1/messages/123456` | `GetMessage(message_id: "123456")`
194 | // `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")`
195 | //
196 | // # Rules for HTTP mapping
197 | //
198 | // The rules for mapping HTTP path, query parameters, and body fields
199 | // to the request message are as follows:
200 | //
201 | // 1. The `body` field specifies either `*` or a field path, or is
202 | // omitted. If omitted, it assumes there is no HTTP body.
203 | // 2. Leaf fields (recursive expansion of nested messages in the
204 | // request) can be classified into three types:
205 | // (a) Matched in the URL template.
206 | // (b) Covered by body (if body is `*`, everything except (a) fields;
207 | // else everything under the body field)
208 | // (c) All other fields.
209 | // 3. URL query parameters found in the HTTP request are mapped to (c) fields.
210 | // 4. Any body sent with an HTTP request can contain only (b) fields.
211 | //
212 | // The syntax of the path template is as follows:
213 | //
214 | // Template = "/" Segments [ Verb ] ;
215 | // Segments = Segment { "/" Segment } ;
216 | // Segment = "*" | "**" | LITERAL | Variable ;
217 | // Variable = "{" FieldPath [ "=" Segments ] "}" ;
218 | // FieldPath = IDENT { "." IDENT } ;
219 | // Verb = ":" LITERAL ;
220 | //
221 | // The syntax `*` matches a single path segment. It follows the semantics of
222 | // [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
223 | // Expansion.
224 | //
225 | // The syntax `**` matches zero or more path segments. It follows the semantics
226 | // of [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.3 Reserved
227 | // Expansion. NOTE: it must be the last segment in the path except the Verb.
228 | //
229 | // The syntax `LITERAL` matches literal text in the URL path.
230 | //
231 | // The syntax `Variable` matches the entire path as specified by its template;
232 | // this nested template must not contain further variables. If a variable
233 | // matches a single path segment, its template may be omitted, e.g. `{var}`
234 | // is equivalent to `{var=*}`.
235 | //
236 | // NOTE: the field paths in variables and in the `body` must not refer to
237 | // repeated fields or map fields.
238 | //
239 | // Use CustomHttpPattern to specify any HTTP method that is not included in the
240 | // `pattern` field, such as HEAD, or "*" to leave the HTTP method unspecified for
241 | // a given URL path rule. The wild-card rule is useful for services that provide
242 | // content to Web (HTML) clients.
243 | message HttpRule {
244 | // Selects methods to which this rule applies.
245 | //
246 | // Refer to [selector][google.api.DocumentationRule.selector] for syntax details.
247 | string selector = 1;
248 |
249 | // Determines the URL pattern is matched by this rules. This pattern can be
250 | // used with any of the {get|put|post|delete|patch} methods. A custom method
251 | // can be defined using the 'custom' field.
252 | oneof pattern {
253 | // Used for listing and getting information about resources.
254 | string get = 2;
255 |
256 | // Used for updating a resource.
257 | string put = 3;
258 |
259 | // Used for creating a resource.
260 | string post = 4;
261 |
262 | // Used for deleting a resource.
263 | string delete = 5;
264 |
265 | // Used for updating a resource.
266 | string patch = 6;
267 |
268 | // Custom pattern is used for defining custom verbs.
269 | CustomHttpPattern custom = 8;
270 | }
271 |
272 | // The name of the request field whose value is mapped to the HTTP body, or
273 | // `*` for mapping all fields not captured by the path pattern to the HTTP
274 | // body. NOTE: the referred field must not be a repeated field and must be
275 | // present at the top-level of request message type.
276 | string body = 7;
277 |
278 | // Additional HTTP bindings for the selector. Nested bindings must
279 | // not contain an `additional_bindings` field themselves (that is,
280 | // the nesting may only be one level deep).
281 | repeated HttpRule additional_bindings = 11;
282 | }
283 |
284 | // A custom pattern is used for defining custom HTTP verb.
285 | message CustomHttpPattern {
286 | // The name of this custom HTTP verb.
287 | string kind = 1;
288 |
289 | // The path matched by this custom verb.
290 | string path = 2;
291 | }
292 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/assistant/embedded/README.md:
--------------------------------------------------------------------------------
1 | The `Google Assistant API` allows developers to embed the Google Assistant into
2 | their devices. It provides an audio-in (spoken user query) and
3 | audio-out (Assistant spoken response).
4 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/assistant/embedded/v1alpha2/embedded_assistant.proto:
--------------------------------------------------------------------------------
1 | // Copyright 2018 Google Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | syntax = "proto3";
16 |
17 | package google.assistant.embedded.v1alpha2;
18 |
19 | import "google/api/annotations.proto";
20 | import "google/type/latlng.proto";
21 |
22 | option go_package = "google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2;embedded";
23 | option java_multiple_files = true;
24 | option java_outer_classname = "AssistantProto";
25 | option java_package = "com.google.assistant.embedded.v1alpha2";
26 | option objc_class_prefix = "ASTSDK";
27 |
28 |
29 | // Service that implements the Google Assistant API.
30 | service EmbeddedAssistant {
31 | // Initiates or continues a conversation with the embedded Assistant Service.
32 | // Each call performs one round-trip, sending an audio request to the service
33 | // and receiving the audio response. Uses bidirectional streaming to receive
34 | // results, such as the `END_OF_UTTERANCE` event, while sending audio.
35 | //
36 | // A conversation is one or more gRPC connections, each consisting of several
37 | // streamed requests and responses.
38 | // For example, the user says *Add to my shopping list* and the Assistant
39 | // responds *What do you want to add?*. The sequence of streamed requests and
40 | // responses in the first gRPC message could be:
41 | //
42 | // * AssistRequest.config
43 | // * AssistRequest.audio_in
44 | // * AssistRequest.audio_in
45 | // * AssistRequest.audio_in
46 | // * AssistRequest.audio_in
47 | // * AssistResponse.event_type.END_OF_UTTERANCE
48 | // * AssistResponse.speech_results.transcript "add to my shopping list"
49 | // * AssistResponse.dialog_state_out.microphone_mode.DIALOG_FOLLOW_ON
50 | // * AssistResponse.audio_out
51 | // * AssistResponse.audio_out
52 | // * AssistResponse.audio_out
53 | //
54 | //
55 | // The user then says *bagels* and the Assistant responds
56 | // *OK, I've added bagels to your shopping list*. This is sent as another gRPC
57 | // connection call to the `Assist` method, again with streamed requests and
58 | // responses, such as:
59 | //
60 | // * AssistRequest.config
61 | // * AssistRequest.audio_in
62 | // * AssistRequest.audio_in
63 | // * AssistRequest.audio_in
64 | // * AssistResponse.event_type.END_OF_UTTERANCE
65 | // * AssistResponse.dialog_state_out.microphone_mode.CLOSE_MICROPHONE
66 | // * AssistResponse.audio_out
67 | // * AssistResponse.audio_out
68 | // * AssistResponse.audio_out
69 | // * AssistResponse.audio_out
70 | //
71 | // Although the precise order of responses is not guaranteed, sequential
72 | // `AssistResponse.audio_out` messages will always contain sequential portions
73 | // of audio.
74 | rpc Assist(stream AssistRequest) returns (stream AssistResponse);
75 | }
76 |
77 | // The top-level message sent by the client. Clients must send at least two, and
78 | // typically numerous `AssistRequest` messages. The first message must
79 | // contain a `config` message and must not contain `audio_in` data. All
80 | // subsequent messages must contain `audio_in` data and must not contain a
81 | // `config` message.
82 | message AssistRequest {
83 | // Exactly one of these fields must be specified in each `AssistRequest`.
84 | oneof type {
85 | // The `config` message provides information to the recognizer that
86 | // specifies how to process the request.
87 | // The first `AssistRequest` message must contain a `config` message.
88 | AssistConfig config = 1;
89 |
90 | // The audio data to be recognized. Sequential chunks of audio data are sent
91 | // in sequential `AssistRequest` messages. The first `AssistRequest`
92 | // message must not contain `audio_in` data and all subsequent
93 | // `AssistRequest` messages must contain `audio_in` data. The audio bytes
94 | // must be encoded as specified in `AudioInConfig`.
95 | // Audio must be sent at approximately real-time (16000 samples per second).
96 | // An error will be returned if audio is sent significantly faster or
97 | // slower.
98 | bytes audio_in = 2;
99 | }
100 | }
101 |
102 | // The top-level message received by the client. A series of one or more
103 | // `AssistResponse` messages are streamed back to the client.
104 | message AssistResponse {
105 | // Indicates the type of event.
106 | enum EventType {
107 | // No event specified.
108 | EVENT_TYPE_UNSPECIFIED = 0;
109 |
110 | // This event indicates that the server has detected the end of the user's
111 | // speech utterance and expects no additional speech. Therefore, the server
112 | // will not process additional audio (although it may subsequently return
113 | // additional results). The client should stop sending additional audio
114 | // data, half-close the gRPC connection, and wait for any additional results
115 | // until the server closes the gRPC connection.
116 | END_OF_UTTERANCE = 1;
117 | }
118 |
119 | // *Output-only* Indicates the type of event.
120 | EventType event_type = 1;
121 |
122 | // *Output-only* The audio containing the Assistant's response to the query.
123 | AudioOut audio_out = 3;
124 |
125 | // *Output-only* Contains the Assistant's visual response to the query.
126 | ScreenOut screen_out = 4;
127 |
128 | // *Output-only* Contains the action triggered by the query with the
129 | // appropriate payloads and semantic parsing.
130 | DeviceAction device_action = 6;
131 |
132 | // *Output-only* This repeated list contains zero or more speech recognition
133 | // results that correspond to consecutive portions of the audio currently
134 | // being processed, starting with the portion corresponding to the earliest
135 | // audio (and most stable portion) to the portion corresponding to the most
136 | // recent audio. The strings can be concatenated to view the full
137 | // in-progress response. When the speech recognition completes, this list
138 | // will contain one item with `stability` of `1.0`.
139 | repeated SpeechRecognitionResult speech_results = 2;
140 |
141 | // *Output-only* Contains output related to the user's query.
142 | DialogStateOut dialog_state_out = 5;
143 | }
144 |
145 | // Specifies how to process the `AssistRequest` messages.
146 | message AssistConfig {
147 | oneof type {
148 | // Specifies how to process the subsequent incoming audio. Required if
149 | // [AssistRequest.audio_in][google.assistant.embedded.v1alpha2.AssistRequest.audio_in] bytes will be provided in subsequent requests.
150 | AudioInConfig audio_in_config = 1;
151 |
152 | // The text input to be sent to the Assistant. This can be populated from a
153 | // text interface if audio input is not available.
154 | string text_query = 6;
155 | }
156 |
157 | // *Required* Specifies how to format the audio that will be returned.
158 | AudioOutConfig audio_out_config = 2;
159 |
160 | // *Optional* Specifies the desired format to use when server returns a
161 | // visual screen response.
162 | ScreenOutConfig screen_out_config = 8;
163 |
164 | // *Required* Represents the current dialog state.
165 | DialogStateIn dialog_state_in = 3;
166 |
167 | // Device configuration that uniquely identifies a specific device.
168 | DeviceConfig device_config = 4;
169 | }
170 |
171 | // Specifies how to process the `audio_in` data that will be provided in
172 | // subsequent requests. For recommended settings, see the Google Assistant SDK
173 | // [best practices](https://developers.google.com/assistant/sdk/guides/service/python/best-practices/audio).
174 | message AudioInConfig {
175 | // Audio encoding of the data sent in the audio message.
176 | // Audio must be one-channel (mono).
177 | enum Encoding {
178 | // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][].
179 | ENCODING_UNSPECIFIED = 0;
180 |
181 | // Uncompressed 16-bit signed little-endian samples (Linear PCM).
182 | // This encoding includes no header, only the raw audio bytes.
183 | LINEAR16 = 1;
184 |
185 | // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio
186 | // Codec) is the recommended encoding because it is
187 | // lossless--therefore recognition is not compromised--and
188 | // requires only about half the bandwidth of `LINEAR16`. This encoding
189 | // includes the `FLAC` stream header followed by audio data. It supports
190 | // 16-bit and 24-bit samples, however, not all fields in `STREAMINFO` are
191 | // supported.
192 | FLAC = 2;
193 | }
194 |
195 | // *Required* Encoding of audio data sent in all `audio_in` messages.
196 | Encoding encoding = 1;
197 |
198 | // *Required* Sample rate (in Hertz) of the audio data sent in all `audio_in`
199 | // messages. Valid values are from 16000-24000, but 16000 is optimal.
200 | // For best results, set the sampling rate of the audio source to 16000 Hz.
201 | // If that's not possible, use the native sample rate of the audio source
202 | // (instead of re-sampling).
203 | int32 sample_rate_hertz = 2;
204 | }
205 |
206 | // Specifies the desired format for the server to use when it returns
207 | // `audio_out` messages.
208 | message AudioOutConfig {
209 | // Audio encoding of the data returned in the audio message. All encodings are
210 | // raw audio bytes with no header, except as indicated below.
211 | enum Encoding {
212 | // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][].
213 | ENCODING_UNSPECIFIED = 0;
214 |
215 | // Uncompressed 16-bit signed little-endian samples (Linear PCM).
216 | LINEAR16 = 1;
217 |
218 | // MP3 audio encoding. The sample rate is encoded in the payload.
219 | MP3 = 2;
220 |
221 | // Opus-encoded audio wrapped in an ogg container. The result will be a
222 | // file which can be played natively on Android and in some browsers (such
223 | // as Chrome). The quality of the encoding is considerably higher than MP3
224 | // while using the same bitrate. The sample rate is encoded in the payload.
225 | OPUS_IN_OGG = 3;
226 | }
227 |
228 | // *Required* The encoding of audio data to be returned in all `audio_out`
229 | // messages.
230 | Encoding encoding = 1;
231 |
232 | // *Required* The sample rate in Hertz of the audio data returned in
233 | // `audio_out` messages. Valid values are: 16000-24000.
234 | int32 sample_rate_hertz = 2;
235 |
236 | // *Required* Current volume setting of the device's audio output.
237 | // Valid values are 1 to 100 (corresponding to 1% to 100%).
238 | int32 volume_percentage = 3;
239 | }
240 |
241 | // Specifies the desired format for the server to use when it returns
242 | // `screen_out` response.
243 | message ScreenOutConfig {
244 | // Possible modes for visual screen-output on the device.
245 | enum ScreenMode {
246 | // No video mode specified.
247 | // The Assistant may respond as if in `OFF` mode.
248 | SCREEN_MODE_UNSPECIFIED = 0;
249 |
250 | // Screen is off (or has brightness or other settings set so low it is
251 | // not visible). The Assistant will typically not return a screen response
252 | // in this mode.
253 | OFF = 1;
254 |
255 | // The Assistant will typically return a partial-screen response in this
256 | // mode.
257 | PLAYING = 3;
258 | }
259 |
260 | // Current visual screen-mode for the device while issuing the query.
261 | ScreenMode screen_mode = 1;
262 | }
263 |
264 | // Provides information about the current dialog state.
265 | message DialogStateIn {
266 | // *Required* This field must always be set to the
267 | // [DialogStateOut.conversation_state][google.assistant.embedded.v1alpha2.DialogStateOut.conversation_state] value that was returned in the prior
268 | // `Assist` RPC. It should only be omitted (field not set) if there was no
269 | // prior `Assist` RPC because this is the first `Assist` RPC made by this
270 | // device after it was first setup and/or a factory-default reset.
271 | bytes conversation_state = 1;
272 |
273 | // *Required* Language of the request in
274 | // [IETF BCP 47 syntax](https://tools.ietf.org/html/bcp47). For example:
275 | // "en-US". If you have selected a language for this `device_id` using the
276 | // [Settings](https://developers.google.com/assistant/sdk/guides/assistant-settings)
277 | // menu in your phone's Google Assistant app, that selection will override
278 | // this value.
279 | string language_code = 2;
280 |
281 | // *Optional* Location of the device where the query originated.
282 | DeviceLocation device_location = 5;
283 |
284 | // *Optional* If true, the server will treat the request as a new conversation
285 | // and not use state from the prior request. Set this field to true when the
286 | // conversation should be restarted, such as after a device reboot, or after a
287 | // significant lapse of time since the prior query.
288 | bool is_new_conversation = 7;
289 | }
290 |
291 | // *Required* Fields that identify the device to the Assistant.
292 | //
293 | // See also:
294 | //
295 | // * [Register a Device - REST
296 | // API](https://developers.google.com/assistant/sdk/reference/device-registration/register-device-manual)
297 | // * [Device Model and Instance
298 | // Schemas](https://developers.google.com/assistant/sdk/reference/device-registration/model-and-instance-schemas)
299 | // * [Device
300 | // Proto](https://developers.google.com/assistant/sdk/reference/rpc/google.assistant.devices.v1alpha2#device)
301 | message DeviceConfig {
302 | // *Required* Unique identifier for the device. The id length must be 128
303 | // characters or less. Example: DBCDW098234. This MUST match the device_id
304 | // returned from device registration. This device_id is used to match against
305 | // the user's registered devices to lookup the supported traits and
306 | // capabilities of this device. This information should not change across
307 | // device reboots. However, it should not be saved across
308 | // factory-default resets.
309 | string device_id = 1;
310 |
311 | // *Required* Unique identifier for the device model. The combination of
312 | // device_model_id and device_id must have been previously associated through
313 | // device registration.
314 | string device_model_id = 3;
315 | }
316 |
317 | // The audio containing the Assistant's response to the query. Sequential chunks
318 | // of audio data are received in sequential `AssistResponse` messages.
319 | message AudioOut {
320 | // *Output-only* The audio data containing the Assistant's response to the
321 | // query. Sequential chunks of audio data are received in sequential
322 | // `AssistResponse` messages.
323 | bytes audio_data = 1;
324 | }
325 |
326 | // The Assistant's visual response to query. Contains the entire visual output.
327 | message ScreenOut {
328 | // Possible formats of the screen data.
329 | enum Format {
330 | // No format specified.
331 | FORMAT_UNSPECIFIED = 0;
332 |
333 | // Data will contain a fully-formed HTML5 layout encoded in UTF-8, e.g.
334 | // "
...
". It is intended to be rendered
335 | // along with the audio response. Note that HTML5 doctype should be included
336 | // in the actual HTML data.
337 | HTML = 1;
338 | }
339 |
340 | // *Output-only* The format of the provided screen data.
341 | Format format = 1;
342 |
343 | // *Output-only* The raw screen data to be displayed as the result of the
344 | // Assistant query.
345 | bytes data = 2;
346 | }
347 |
348 | // The response returned to the device if the user has triggered a Device
349 | // Action. For example, a device which supports the query *Turn on the light*
350 | // would receive a `DeviceAction` with a JSON payload containing the semantics
351 | // of the request.
352 | message DeviceAction {
353 | // JSON containing the device command response generated from the triggered
354 | // Device Action grammar. The format is given by the
355 | // `action.devices.EXECUTE` intent for a given
356 | // [trait](https://developers.google.com/assistant/sdk/reference/traits/).
357 | string device_request_json = 1;
358 | }
359 |
360 | // The estimated transcription of a phrase the user has spoken. This could be
361 | // a single segment or the full guess of the user's spoken query.
362 | message SpeechRecognitionResult {
363 | // *Output-only* Transcript text representing the words that the user spoke.
364 | string transcript = 1;
365 |
366 | // *Output-only* An estimate of the likelihood that the Assistant will not
367 | // change its guess about this result. Values range from 0.0 (completely
368 | // unstable) to 1.0 (completely stable and final). The default of 0.0 is a
369 | // sentinel value indicating `stability` was not set.
370 | float stability = 2;
371 | }
372 |
373 | // The dialog state resulting from the user's query. Multiple of these messages
374 | // may be received.
375 | message DialogStateOut {
376 | // Possible states of the microphone after a `Assist` RPC completes.
377 | enum MicrophoneMode {
378 | // No mode specified.
379 | MICROPHONE_MODE_UNSPECIFIED = 0;
380 |
381 | // The service is not expecting a follow-on question from the user.
382 | // The microphone should remain off until the user re-activates it.
383 | CLOSE_MICROPHONE = 1;
384 |
385 | // The service is expecting a follow-on question from the user. The
386 | // microphone should be re-opened when the `AudioOut` playback completes
387 | // (by starting a new `Assist` RPC call to send the new audio).
388 | DIALOG_FOLLOW_ON = 2;
389 | }
390 |
391 | // *Output-only* Supplemental display text from the Assistant. This could be
392 | // the same as the speech spoken in `AssistResponse.audio_out` or it could
393 | // be some additional information which aids the user's understanding.
394 | string supplemental_display_text = 1;
395 |
396 | // *Output-only* State information for the subsequent `Assist` RPC. This
397 | // value should be saved in the client and returned in the
398 | // [`DialogStateIn.conversation_state`](#dialogstatein) field with the next
399 | // `Assist` RPC. (The client does not need to interpret or otherwise use this
400 | // value.) This information should be saved across device reboots. However,
401 | // this value should be cleared (not saved in the client) during a
402 | // factory-default reset.
403 | bytes conversation_state = 2;
404 |
405 | // *Output-only* Specifies the mode of the microphone after this `Assist`
406 | // RPC is processed.
407 | MicrophoneMode microphone_mode = 3;
408 |
409 | // *Output-only* Updated volume level. The value will be 0 or omitted
410 | // (indicating no change) unless a voice command such as *Increase the volume*
411 | // or *Set volume level 4* was recognized, in which case the value will be
412 | // between 1 and 100 (corresponding to the new volume level of 1% to 100%).
413 | // Typically, a client should use this volume level when playing the
414 | // `audio_out` data, and retain this value as the current volume level and
415 | // supply it in the `AudioOutConfig` of the next `AssistRequest`. (Some
416 | // clients may also implement other ways to allow the current volume level to
417 | // be changed, for example, by providing a knob that the user can turn.)
418 | int32 volume_percentage = 4;
419 | }
420 |
421 | // There are three sources of locations. They are used with this precedence:
422 | //
423 | // 1. This `DeviceLocation`, which is primarily used for mobile devices with
424 | // GPS .
425 | // 2. Location specified by the user during device setup; this is per-user, per
426 | // device. This location is used if `DeviceLocation` is not specified.
427 | // 3. Inferred location based on IP address. This is used only if neither of the
428 | // above are specified.
429 | message DeviceLocation {
430 | oneof type {
431 | // Latitude and longitude of device.
432 | google.type.LatLng coordinates = 1;
433 | }
434 | }
435 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/protobuf/any.proto:
--------------------------------------------------------------------------------
1 | // Protocol Buffers - Google's data interchange format
2 | // Copyright 2008 Google Inc. All rights reserved.
3 | // https://developers.google.com/protocol-buffers/
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright
10 | // notice, this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above
12 | // copyright notice, this list of conditions and the following disclaimer
13 | // in the documentation and/or other materials provided with the
14 | // distribution.
15 | // * Neither the name of Google Inc. nor the names of its
16 | // contributors may be used to endorse or promote products derived from
17 | // this software without specific prior written permission.
18 | //
19 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
31 | syntax = "proto3";
32 |
33 | package google.protobuf;
34 |
35 | option csharp_namespace = "Google.Protobuf.WellKnownTypes";
36 | option go_package = "github.com/golang/protobuf/ptypes/any";
37 | option java_package = "com.google.protobuf";
38 | option java_outer_classname = "AnyProto";
39 | option java_multiple_files = true;
40 | option objc_class_prefix = "GPB";
41 |
42 | // `Any` contains an arbitrary serialized protocol buffer message along with a
43 | // URL that describes the type of the serialized message.
44 | //
45 | // Protobuf library provides support to pack/unpack Any values in the form
46 | // of utility functions or additional generated methods of the Any type.
47 | //
48 | // Example 1: Pack and unpack a message in C++.
49 | //
50 | // Foo foo = ...;
51 | // Any any;
52 | // any.PackFrom(foo);
53 | // ...
54 | // if (any.UnpackTo(&foo)) {
55 | // ...
56 | // }
57 | //
58 | // Example 2: Pack and unpack a message in Java.
59 | //
60 | // Foo foo = ...;
61 | // Any any = Any.pack(foo);
62 | // ...
63 | // if (any.is(Foo.class)) {
64 | // foo = any.unpack(Foo.class);
65 | // }
66 | //
67 | // Example 3: Pack and unpack a message in Python.
68 | //
69 | // foo = Foo(...)
70 | // any = Any()
71 | // any.Pack(foo)
72 | // ...
73 | // if any.Is(Foo.DESCRIPTOR):
74 | // any.Unpack(foo)
75 | // ...
76 | //
77 | // The pack methods provided by protobuf library will by default use
78 | // 'type.googleapis.com/full.type.name' as the type URL and the unpack
79 | // methods only use the fully qualified type name after the last '/'
80 | // in the type URL, for example "foo.bar.com/x/y.z" will yield type
81 | // name "y.z".
82 | //
83 | //
84 | // JSON
85 | // ====
86 | // The JSON representation of an `Any` value uses the regular
87 | // representation of the deserialized, embedded message, with an
88 | // additional field `@type` which contains the type URL. Example:
89 | //
90 | // package google.profile;
91 | // message Person {
92 | // string first_name = 1;
93 | // string last_name = 2;
94 | // }
95 | //
96 | // {
97 | // "@type": "type.googleapis.com/google.profile.Person",
98 | // "firstName": ,
99 | // "lastName":
100 | // }
101 | //
102 | // If the embedded message type is well-known and has a custom JSON
103 | // representation, that representation will be embedded adding a field
104 | // `value` which holds the custom JSON in addition to the `@type`
105 | // field. Example (for message [google.protobuf.Duration][]):
106 | //
107 | // {
108 | // "@type": "type.googleapis.com/google.protobuf.Duration",
109 | // "value": "1.212s"
110 | // }
111 | //
112 | message Any {
113 | // A URL/resource name whose content describes the type of the
114 | // serialized protocol buffer message.
115 | //
116 | // For URLs which use the scheme `http`, `https`, or no scheme, the
117 | // following restrictions and interpretations apply:
118 | //
119 | // * If no scheme is provided, `https` is assumed.
120 | // * The last segment of the URL's path must represent the fully
121 | // qualified name of the type (as in `path/google.protobuf.Duration`).
122 | // The name should be in a canonical form (e.g., leading "." is
123 | // not accepted).
124 | // * An HTTP GET on the URL must yield a [google.protobuf.Type][]
125 | // value in binary format, or produce an error.
126 | // * Applications are allowed to cache lookup results based on the
127 | // URL, or have them precompiled into a binary to avoid any
128 | // lookup. Therefore, binary compatibility needs to be preserved
129 | // on changes to types. (Use versioned type names to manage
130 | // breaking changes.)
131 | //
132 | // Schemes other than `http`, `https` (or the empty scheme) might be
133 | // used with implementation specific semantics.
134 | //
135 | string type_url = 1;
136 |
137 | // Must be a valid serialized protocol buffer of the above specified type.
138 | bytes value = 2;
139 | }
140 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/protobuf/descriptor.proto:
--------------------------------------------------------------------------------
1 | // Protocol Buffers - Google's data interchange format
2 | // Copyright 2008 Google Inc. All rights reserved.
3 | // https://developers.google.com/protocol-buffers/
4 | //
5 | // Redistribution and use in source and binary forms, with or without
6 | // modification, are permitted provided that the following conditions are
7 | // met:
8 | //
9 | // * Redistributions of source code must retain the above copyright
10 | // notice, this list of conditions and the following disclaimer.
11 | // * Redistributions in binary form must reproduce the above
12 | // copyright notice, this list of conditions and the following disclaimer
13 | // in the documentation and/or other materials provided with the
14 | // distribution.
15 | // * Neither the name of Google Inc. nor the names of its
16 | // contributors may be used to endorse or promote products derived from
17 | // this software without specific prior written permission.
18 | //
19 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
31 | // Author: kenton@google.com (Kenton Varda)
32 | // Based on original Protocol Buffers design by
33 | // Sanjay Ghemawat, Jeff Dean, and others.
34 | //
35 | // The messages in this file describe the definitions found in .proto files.
36 | // A valid .proto file can be translated directly to a FileDescriptorProto
37 | // without any other information (e.g. without reading its imports).
38 |
39 |
40 | syntax = "proto2";
41 |
42 | package google.protobuf;
43 | option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor";
44 | option java_package = "com.google.protobuf";
45 | option java_outer_classname = "DescriptorProtos";
46 | option csharp_namespace = "Google.Protobuf.Reflection";
47 | option objc_class_prefix = "GPB";
48 |
49 | // descriptor.proto must be optimized for speed because reflection-based
50 | // algorithms don't work during bootstrapping.
51 | option optimize_for = SPEED;
52 |
53 | // The protocol compiler can output a FileDescriptorSet containing the .proto
54 | // files it parses.
55 | message FileDescriptorSet {
56 | repeated FileDescriptorProto file = 1;
57 | }
58 |
59 | // Describes a complete .proto file.
60 | message FileDescriptorProto {
61 | optional string name = 1; // file name, relative to root of source tree
62 | optional string package = 2; // e.g. "foo", "foo.bar", etc.
63 |
64 | // Names of files imported by this file.
65 | repeated string dependency = 3;
66 | // Indexes of the public imported files in the dependency list above.
67 | repeated int32 public_dependency = 10;
68 | // Indexes of the weak imported files in the dependency list.
69 | // For Google-internal migration only. Do not use.
70 | repeated int32 weak_dependency = 11;
71 |
72 | // All top-level definitions in this file.
73 | repeated DescriptorProto message_type = 4;
74 | repeated EnumDescriptorProto enum_type = 5;
75 | repeated ServiceDescriptorProto service = 6;
76 | repeated FieldDescriptorProto extension = 7;
77 |
78 | optional FileOptions options = 8;
79 |
80 | // This field contains optional information about the original source code.
81 | // You may safely remove this entire field without harming runtime
82 | // functionality of the descriptors -- the information is needed only by
83 | // development tools.
84 | optional SourceCodeInfo source_code_info = 9;
85 |
86 | // The syntax of the proto file.
87 | // The supported values are "proto2" and "proto3".
88 | optional string syntax = 12;
89 | }
90 |
91 | // Describes a message type.
92 | message DescriptorProto {
93 | optional string name = 1;
94 |
95 | repeated FieldDescriptorProto field = 2;
96 | repeated FieldDescriptorProto extension = 6;
97 |
98 | repeated DescriptorProto nested_type = 3;
99 | repeated EnumDescriptorProto enum_type = 4;
100 |
101 | message ExtensionRange {
102 | optional int32 start = 1;
103 | optional int32 end = 2;
104 | }
105 | repeated ExtensionRange extension_range = 5;
106 |
107 | repeated OneofDescriptorProto oneof_decl = 8;
108 |
109 | optional MessageOptions options = 7;
110 |
111 | // Range of reserved tag numbers. Reserved tag numbers may not be used by
112 | // fields or extension ranges in the same message. Reserved ranges may
113 | // not overlap.
114 | message ReservedRange {
115 | optional int32 start = 1; // Inclusive.
116 | optional int32 end = 2; // Exclusive.
117 | }
118 | repeated ReservedRange reserved_range = 9;
119 | // Reserved field names, which may not be used by fields in the same message.
120 | // A given name may only be reserved once.
121 | repeated string reserved_name = 10;
122 | }
123 |
124 | // Describes a field within a message.
125 | message FieldDescriptorProto {
126 | enum Type {
127 | // 0 is reserved for errors.
128 | // Order is weird for historical reasons.
129 | TYPE_DOUBLE = 1;
130 | TYPE_FLOAT = 2;
131 | // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
132 | // negative values are likely.
133 | TYPE_INT64 = 3;
134 | TYPE_UINT64 = 4;
135 | // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
136 | // negative values are likely.
137 | TYPE_INT32 = 5;
138 | TYPE_FIXED64 = 6;
139 | TYPE_FIXED32 = 7;
140 | TYPE_BOOL = 8;
141 | TYPE_STRING = 9;
142 | // Tag-delimited aggregate.
143 | // Group type is deprecated and not supported in proto3. However, Proto3
144 | // implementations should still be able to parse the group wire format and
145 | // treat group fields as unknown fields.
146 | TYPE_GROUP = 10;
147 | TYPE_MESSAGE = 11; // Length-delimited aggregate.
148 |
149 | // New in version 2.
150 | TYPE_BYTES = 12;
151 | TYPE_UINT32 = 13;
152 | TYPE_ENUM = 14;
153 | TYPE_SFIXED32 = 15;
154 | TYPE_SFIXED64 = 16;
155 | TYPE_SINT32 = 17; // Uses ZigZag encoding.
156 | TYPE_SINT64 = 18; // Uses ZigZag encoding.
157 | };
158 |
159 | enum Label {
160 | // 0 is reserved for errors
161 | LABEL_OPTIONAL = 1;
162 | LABEL_REQUIRED = 2;
163 | LABEL_REPEATED = 3;
164 | };
165 |
166 | optional string name = 1;
167 | optional int32 number = 3;
168 | optional Label label = 4;
169 |
170 | // If type_name is set, this need not be set. If both this and type_name
171 | // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
172 | optional Type type = 5;
173 |
174 | // For message and enum types, this is the name of the type. If the name
175 | // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
176 | // rules are used to find the type (i.e. first the nested types within this
177 | // message are searched, then within the parent, on up to the root
178 | // namespace).
179 | optional string type_name = 6;
180 |
181 | // For extensions, this is the name of the type being extended. It is
182 | // resolved in the same manner as type_name.
183 | optional string extendee = 2;
184 |
185 | // For numeric types, contains the original text representation of the value.
186 | // For booleans, "true" or "false".
187 | // For strings, contains the default text contents (not escaped in any way).
188 | // For bytes, contains the C escaped value. All bytes >= 128 are escaped.
189 | // TODO(kenton): Base-64 encode?
190 | optional string default_value = 7;
191 |
192 | // If set, gives the index of a oneof in the containing type's oneof_decl
193 | // list. This field is a member of that oneof.
194 | optional int32 oneof_index = 9;
195 |
196 | // JSON name of this field. The value is set by protocol compiler. If the
197 | // user has set a "json_name" option on this field, that option's value
198 | // will be used. Otherwise, it's deduced from the field's name by converting
199 | // it to camelCase.
200 | optional string json_name = 10;
201 |
202 | optional FieldOptions options = 8;
203 | }
204 |
205 | // Describes a oneof.
206 | message OneofDescriptorProto {
207 | optional string name = 1;
208 | optional OneofOptions options = 2;
209 | }
210 |
211 | // Describes an enum type.
212 | message EnumDescriptorProto {
213 | optional string name = 1;
214 |
215 | repeated EnumValueDescriptorProto value = 2;
216 |
217 | optional EnumOptions options = 3;
218 | }
219 |
220 | // Describes a value within an enum.
221 | message EnumValueDescriptorProto {
222 | optional string name = 1;
223 | optional int32 number = 2;
224 |
225 | optional EnumValueOptions options = 3;
226 | }
227 |
228 | // Describes a service.
229 | message ServiceDescriptorProto {
230 | optional string name = 1;
231 | repeated MethodDescriptorProto method = 2;
232 |
233 | optional ServiceOptions options = 3;
234 | }
235 |
236 | // Describes a method of a service.
237 | message MethodDescriptorProto {
238 | optional string name = 1;
239 |
240 | // Input and output type names. These are resolved in the same way as
241 | // FieldDescriptorProto.type_name, but must refer to a message type.
242 | optional string input_type = 2;
243 | optional string output_type = 3;
244 |
245 | optional MethodOptions options = 4;
246 |
247 | // Identifies if client streams multiple client messages
248 | optional bool client_streaming = 5 [default=false];
249 | // Identifies if server streams multiple server messages
250 | optional bool server_streaming = 6 [default=false];
251 | }
252 |
253 |
254 | // ===================================================================
255 | // Options
256 |
257 | // Each of the definitions above may have "options" attached. These are
258 | // just annotations which may cause code to be generated slightly differently
259 | // or may contain hints for code that manipulates protocol messages.
260 | //
261 | // Clients may define custom options as extensions of the *Options messages.
262 | // These extensions may not yet be known at parsing time, so the parser cannot
263 | // store the values in them. Instead it stores them in a field in the *Options
264 | // message called uninterpreted_option. This field must have the same name
265 | // across all *Options messages. We then use this field to populate the
266 | // extensions when we build a descriptor, at which point all protos have been
267 | // parsed and so all extensions are known.
268 | //
269 | // Extension numbers for custom options may be chosen as follows:
270 | // * For options which will only be used within a single application or
271 | // organization, or for experimental options, use field numbers 50000
272 | // through 99999. It is up to you to ensure that you do not use the
273 | // same number for multiple options.
274 | // * For options which will be published and used publicly by multiple
275 | // independent entities, e-mail protobuf-global-extension-registry@google.com
276 | // to reserve extension numbers. Simply provide your project name (e.g.
277 | // Objective-C plugin) and your project website (if available) -- there's no
278 | // need to explain how you intend to use them. Usually you only need one
279 | // extension number. You can declare multiple options with only one extension
280 | // number by putting them in a sub-message. See the Custom Options section of
281 | // the docs for examples:
282 | // https://developers.google.com/protocol-buffers/docs/proto#options
283 | // If this turns out to be popular, a web service will be set up
284 | // to automatically assign option numbers.
285 |
286 |
287 | message FileOptions {
288 |
289 | // Sets the Java package where classes generated from this .proto will be
290 | // placed. By default, the proto package is used, but this is often
291 | // inappropriate because proto packages do not normally start with backwards
292 | // domain names.
293 | optional string java_package = 1;
294 |
295 |
296 | // If set, all the classes from the .proto file are wrapped in a single
297 | // outer class with the given name. This applies to both Proto1
298 | // (equivalent to the old "--one_java_file" option) and Proto2 (where
299 | // a .proto always translates to a single class, but you may want to
300 | // explicitly choose the class name).
301 | optional string java_outer_classname = 8;
302 |
303 | // If set true, then the Java code generator will generate a separate .java
304 | // file for each top-level message, enum, and service defined in the .proto
305 | // file. Thus, these types will *not* be nested inside the outer class
306 | // named by java_outer_classname. However, the outer class will still be
307 | // generated to contain the file's getDescriptor() method as well as any
308 | // top-level extensions defined in the file.
309 | optional bool java_multiple_files = 10 [default=false];
310 |
311 | // This option does nothing.
312 | optional bool java_generate_equals_and_hash = 20 [deprecated=true];
313 |
314 | // If set true, then the Java2 code generator will generate code that
315 | // throws an exception whenever an attempt is made to assign a non-UTF-8
316 | // byte sequence to a string field.
317 | // Message reflection will do the same.
318 | // However, an extension field still accepts non-UTF-8 byte sequences.
319 | // This option has no effect on when used with the lite runtime.
320 | optional bool java_string_check_utf8 = 27 [default=false];
321 |
322 |
323 | // Generated classes can be optimized for speed or code size.
324 | enum OptimizeMode {
325 | SPEED = 1; // Generate complete code for parsing, serialization,
326 | // etc.
327 | CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
328 | LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
329 | }
330 | optional OptimizeMode optimize_for = 9 [default=SPEED];
331 |
332 | // Sets the Go package where structs generated from this .proto will be
333 | // placed. If omitted, the Go package will be derived from the following:
334 | // - The basename of the package import path, if provided.
335 | // - Otherwise, the package statement in the .proto file, if present.
336 | // - Otherwise, the basename of the .proto file, without extension.
337 | optional string go_package = 11;
338 |
339 |
340 |
341 | // Should generic services be generated in each language? "Generic" services
342 | // are not specific to any particular RPC system. They are generated by the
343 | // main code generators in each language (without additional plugins).
344 | // Generic services were the only kind of service generation supported by
345 | // early versions of google.protobuf.
346 | //
347 | // Generic services are now considered deprecated in favor of using plugins
348 | // that generate code specific to your particular RPC system. Therefore,
349 | // these default to false. Old code which depends on generic services should
350 | // explicitly set them to true.
351 | optional bool cc_generic_services = 16 [default=false];
352 | optional bool java_generic_services = 17 [default=false];
353 | optional bool py_generic_services = 18 [default=false];
354 |
355 | // Is this file deprecated?
356 | // Depending on the target platform, this can emit Deprecated annotations
357 | // for everything in the file, or it will be completely ignored; in the very
358 | // least, this is a formalization for deprecating files.
359 | optional bool deprecated = 23 [default=false];
360 |
361 | // Enables the use of arenas for the proto messages in this file. This applies
362 | // only to generated classes for C++.
363 | optional bool cc_enable_arenas = 31 [default=false];
364 |
365 |
366 | // Sets the objective c class prefix which is prepended to all objective c
367 | // generated classes from this .proto. There is no default.
368 | optional string objc_class_prefix = 36;
369 |
370 | // Namespace for generated classes; defaults to the package.
371 | optional string csharp_namespace = 37;
372 |
373 | // By default Swift generators will take the proto package and CamelCase it
374 | // replacing '.' with underscore and use that to prefix the types/symbols
375 | // defined. When this options is provided, they will use this value instead
376 | // to prefix the types/symbols defined.
377 | optional string swift_prefix = 39;
378 |
379 | // Sets the php class prefix which is prepended to all php generated classes
380 | // from this .proto. Default is empty.
381 | optional string php_class_prefix = 40;
382 |
383 | // The parser stores options it doesn't recognize here. See above.
384 | repeated UninterpretedOption uninterpreted_option = 999;
385 |
386 | // Clients can define custom options in extensions of this message. See above.
387 | extensions 1000 to max;
388 |
389 | reserved 38;
390 | }
391 |
392 | message MessageOptions {
393 | // Set true to use the old proto1 MessageSet wire format for extensions.
394 | // This is provided for backwards-compatibility with the MessageSet wire
395 | // format. You should not use this for any other reason: It's less
396 | // efficient, has fewer features, and is more complicated.
397 | //
398 | // The message must be defined exactly as follows:
399 | // message Foo {
400 | // option message_set_wire_format = true;
401 | // extensions 4 to max;
402 | // }
403 | // Note that the message cannot have any defined fields; MessageSets only
404 | // have extensions.
405 | //
406 | // All extensions of your type must be singular messages; e.g. they cannot
407 | // be int32s, enums, or repeated messages.
408 | //
409 | // Because this is an option, the above two restrictions are not enforced by
410 | // the protocol compiler.
411 | optional bool message_set_wire_format = 1 [default=false];
412 |
413 | // Disables the generation of the standard "descriptor()" accessor, which can
414 | // conflict with a field of the same name. This is meant to make migration
415 | // from proto1 easier; new code should avoid fields named "descriptor".
416 | optional bool no_standard_descriptor_accessor = 2 [default=false];
417 |
418 | // Is this message deprecated?
419 | // Depending on the target platform, this can emit Deprecated annotations
420 | // for the message, or it will be completely ignored; in the very least,
421 | // this is a formalization for deprecating messages.
422 | optional bool deprecated = 3 [default=false];
423 |
424 | // Whether the message is an automatically generated map entry type for the
425 | // maps field.
426 | //
427 | // For maps fields:
428 | // map map_field = 1;
429 | // The parsed descriptor looks like:
430 | // message MapFieldEntry {
431 | // option map_entry = true;
432 | // optional KeyType key = 1;
433 | // optional ValueType value = 2;
434 | // }
435 | // repeated MapFieldEntry map_field = 1;
436 | //
437 | // Implementations may choose not to generate the map_entry=true message, but
438 | // use a native map in the target language to hold the keys and values.
439 | // The reflection APIs in such implementions still need to work as
440 | // if the field is a repeated message field.
441 | //
442 | // NOTE: Do not set the option in .proto files. Always use the maps syntax
443 | // instead. The option should only be implicitly set by the proto compiler
444 | // parser.
445 | optional bool map_entry = 7;
446 |
447 | reserved 8; // javalite_serializable
448 | reserved 9; // javanano_as_lite
449 |
450 | // The parser stores options it doesn't recognize here. See above.
451 | repeated UninterpretedOption uninterpreted_option = 999;
452 |
453 | // Clients can define custom options in extensions of this message. See above.
454 | extensions 1000 to max;
455 | }
456 |
457 | message FieldOptions {
458 | // The ctype option instructs the C++ code generator to use a different
459 | // representation of the field than it normally would. See the specific
460 | // options below. This option is not yet implemented in the open source
461 | // release -- sorry, we'll try to include it in a future version!
462 | optional CType ctype = 1 [default = STRING];
463 | enum CType {
464 | // Default mode.
465 | STRING = 0;
466 |
467 | CORD = 1;
468 |
469 | STRING_PIECE = 2;
470 | }
471 | // The packed option can be enabled for repeated primitive fields to enable
472 | // a more efficient representation on the wire. Rather than repeatedly
473 | // writing the tag and type for each element, the entire array is encoded as
474 | // a single length-delimited blob. In proto3, only explicit setting it to
475 | // false will avoid using packed encoding.
476 | optional bool packed = 2;
477 |
478 | // The jstype option determines the JavaScript type used for values of the
479 | // field. The option is permitted only for 64 bit integral and fixed types
480 | // (int64, uint64, sint64, fixed64, sfixed64). By default these types are
481 | // represented as JavaScript strings. This avoids loss of precision that can
482 | // happen when a large value is converted to a floating point JavaScript
483 | // numbers. Specifying JS_NUMBER for the jstype causes the generated
484 | // JavaScript code to use the JavaScript "number" type instead of strings.
485 | // This option is an enum to permit additional types to be added,
486 | // e.g. goog.math.Integer.
487 | optional JSType jstype = 6 [default = JS_NORMAL];
488 | enum JSType {
489 | // Use the default type.
490 | JS_NORMAL = 0;
491 |
492 | // Use JavaScript strings.
493 | JS_STRING = 1;
494 |
495 | // Use JavaScript numbers.
496 | JS_NUMBER = 2;
497 | }
498 |
499 | // Should this field be parsed lazily? Lazy applies only to message-type
500 | // fields. It means that when the outer message is initially parsed, the
501 | // inner message's contents will not be parsed but instead stored in encoded
502 | // form. The inner message will actually be parsed when it is first accessed.
503 | //
504 | // This is only a hint. Implementations are free to choose whether to use
505 | // eager or lazy parsing regardless of the value of this option. However,
506 | // setting this option true suggests that the protocol author believes that
507 | // using lazy parsing on this field is worth the additional bookkeeping
508 | // overhead typically needed to implement it.
509 | //
510 | // This option does not affect the public interface of any generated code;
511 | // all method signatures remain the same. Furthermore, thread-safety of the
512 | // interface is not affected by this option; const methods remain safe to
513 | // call from multiple threads concurrently, while non-const methods continue
514 | // to require exclusive access.
515 | //
516 | //
517 | // Note that implementations may choose not to check required fields within
518 | // a lazy sub-message. That is, calling IsInitialized() on the outer message
519 | // may return true even if the inner message has missing required fields.
520 | // This is necessary because otherwise the inner message would have to be
521 | // parsed in order to perform the check, defeating the purpose of lazy
522 | // parsing. An implementation which chooses not to check required fields
523 | // must be consistent about it. That is, for any particular sub-message, the
524 | // implementation must either *always* check its required fields, or *never*
525 | // check its required fields, regardless of whether or not the message has
526 | // been parsed.
527 | optional bool lazy = 5 [default=false];
528 |
529 | // Is this field deprecated?
530 | // Depending on the target platform, this can emit Deprecated annotations
531 | // for accessors, or it will be completely ignored; in the very least, this
532 | // is a formalization for deprecating fields.
533 | optional bool deprecated = 3 [default=false];
534 |
535 | // For Google-internal migration only. Do not use.
536 | optional bool weak = 10 [default=false];
537 |
538 |
539 | // The parser stores options it doesn't recognize here. See above.
540 | repeated UninterpretedOption uninterpreted_option = 999;
541 |
542 | // Clients can define custom options in extensions of this message. See above.
543 | extensions 1000 to max;
544 |
545 | reserved 4; // removed jtype
546 | }
547 |
548 | message OneofOptions {
549 | // The parser stores options it doesn't recognize here. See above.
550 | repeated UninterpretedOption uninterpreted_option = 999;
551 |
552 | // Clients can define custom options in extensions of this message. See above.
553 | extensions 1000 to max;
554 | }
555 |
556 | message EnumOptions {
557 |
558 | // Set this option to true to allow mapping different tag names to the same
559 | // value.
560 | optional bool allow_alias = 2;
561 |
562 | // Is this enum deprecated?
563 | // Depending on the target platform, this can emit Deprecated annotations
564 | // for the enum, or it will be completely ignored; in the very least, this
565 | // is a formalization for deprecating enums.
566 | optional bool deprecated = 3 [default=false];
567 |
568 | reserved 5; // javanano_as_lite
569 |
570 | // The parser stores options it doesn't recognize here. See above.
571 | repeated UninterpretedOption uninterpreted_option = 999;
572 |
573 | // Clients can define custom options in extensions of this message. See above.
574 | extensions 1000 to max;
575 | }
576 |
577 | message EnumValueOptions {
578 | // Is this enum value deprecated?
579 | // Depending on the target platform, this can emit Deprecated annotations
580 | // for the enum value, or it will be completely ignored; in the very least,
581 | // this is a formalization for deprecating enum values.
582 | optional bool deprecated = 1 [default=false];
583 |
584 | // The parser stores options it doesn't recognize here. See above.
585 | repeated UninterpretedOption uninterpreted_option = 999;
586 |
587 | // Clients can define custom options in extensions of this message. See above.
588 | extensions 1000 to max;
589 | }
590 |
591 | message ServiceOptions {
592 |
593 | // Note: Field numbers 1 through 32 are reserved for Google's internal RPC
594 | // framework. We apologize for hoarding these numbers to ourselves, but
595 | // we were already using them long before we decided to release Protocol
596 | // Buffers.
597 |
598 | // Is this service deprecated?
599 | // Depending on the target platform, this can emit Deprecated annotations
600 | // for the service, or it will be completely ignored; in the very least,
601 | // this is a formalization for deprecating services.
602 | optional bool deprecated = 33 [default=false];
603 |
604 | // The parser stores options it doesn't recognize here. See above.
605 | repeated UninterpretedOption uninterpreted_option = 999;
606 |
607 | // Clients can define custom options in extensions of this message. See above.
608 | extensions 1000 to max;
609 | }
610 |
611 | message MethodOptions {
612 |
613 | // Note: Field numbers 1 through 32 are reserved for Google's internal RPC
614 | // framework. We apologize for hoarding these numbers to ourselves, but
615 | // we were already using them long before we decided to release Protocol
616 | // Buffers.
617 |
618 | // Is this method deprecated?
619 | // Depending on the target platform, this can emit Deprecated annotations
620 | // for the method, or it will be completely ignored; in the very least,
621 | // this is a formalization for deprecating methods.
622 | optional bool deprecated = 33 [default=false];
623 |
624 | // Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
625 | // or neither? HTTP based RPC implementation may choose GET verb for safe
626 | // methods, and PUT verb for idempotent methods instead of the default POST.
627 | enum IdempotencyLevel {
628 | IDEMPOTENCY_UNKNOWN = 0;
629 | NO_SIDE_EFFECTS = 1; // implies idempotent
630 | IDEMPOTENT = 2; // idempotent, but may have side effects
631 | }
632 | optional IdempotencyLevel idempotency_level =
633 | 34 [default=IDEMPOTENCY_UNKNOWN];
634 |
635 | // The parser stores options it doesn't recognize here. See above.
636 | repeated UninterpretedOption uninterpreted_option = 999;
637 |
638 | // Clients can define custom options in extensions of this message. See above.
639 | extensions 1000 to max;
640 | }
641 |
642 |
643 | // A message representing a option the parser does not recognize. This only
644 | // appears in options protos created by the compiler::Parser class.
645 | // DescriptorPool resolves these when building Descriptor objects. Therefore,
646 | // options protos in descriptor objects (e.g. returned by Descriptor::options(),
647 | // or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
648 | // in them.
649 | message UninterpretedOption {
650 | // The name of the uninterpreted option. Each string represents a segment in
651 | // a dot-separated name. is_extension is true iff a segment represents an
652 | // extension (denoted with parentheses in options specs in .proto files).
653 | // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
654 | // "foo.(bar.baz).qux".
655 | message NamePart {
656 | required string name_part = 1;
657 | required bool is_extension = 2;
658 | }
659 | repeated NamePart name = 2;
660 |
661 | // The value of the uninterpreted option, in whatever type the tokenizer
662 | // identified it as during parsing. Exactly one of these should be set.
663 | optional string identifier_value = 3;
664 | optional uint64 positive_int_value = 4;
665 | optional int64 negative_int_value = 5;
666 | optional double double_value = 6;
667 | optional bytes string_value = 7;
668 | optional string aggregate_value = 8;
669 | }
670 |
671 | // ===================================================================
672 | // Optional source code info
673 |
674 | // Encapsulates information about the original source file from which a
675 | // FileDescriptorProto was generated.
676 | message SourceCodeInfo {
677 | // A Location identifies a piece of source code in a .proto file which
678 | // corresponds to a particular definition. This information is intended
679 | // to be useful to IDEs, code indexers, documentation generators, and similar
680 | // tools.
681 | //
682 | // For example, say we have a file like:
683 | // message Foo {
684 | // optional string foo = 1;
685 | // }
686 | // Let's look at just the field definition:
687 | // optional string foo = 1;
688 | // ^ ^^ ^^ ^ ^^^
689 | // a bc de f ghi
690 | // We have the following locations:
691 | // span path represents
692 | // [a,i) [ 4, 0, 2, 0 ] The whole field definition.
693 | // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
694 | // [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
695 | // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
696 | // [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
697 | //
698 | // Notes:
699 | // - A location may refer to a repeated field itself (i.e. not to any
700 | // particular index within it). This is used whenever a set of elements are
701 | // logically enclosed in a single code segment. For example, an entire
702 | // extend block (possibly containing multiple extension definitions) will
703 | // have an outer location whose path refers to the "extensions" repeated
704 | // field without an index.
705 | // - Multiple locations may have the same path. This happens when a single
706 | // logical declaration is spread out across multiple places. The most
707 | // obvious example is the "extend" block again -- there may be multiple
708 | // extend blocks in the same scope, each of which will have the same path.
709 | // - A location's span is not always a subset of its parent's span. For
710 | // example, the "extendee" of an extension declaration appears at the
711 | // beginning of the "extend" block and is shared by all extensions within
712 | // the block.
713 | // - Just because a location's span is a subset of some other location's span
714 | // does not mean that it is a descendent. For example, a "group" defines
715 | // both a type and a field in a single declaration. Thus, the locations
716 | // corresponding to the type and field and their components will overlap.
717 | // - Code which tries to interpret locations should probably be designed to
718 | // ignore those that it doesn't understand, as more types of locations could
719 | // be recorded in the future.
720 | repeated Location location = 1;
721 | message Location {
722 | // Identifies which part of the FileDescriptorProto was defined at this
723 | // location.
724 | //
725 | // Each element is a field number or an index. They form a path from
726 | // the root FileDescriptorProto to the place where the definition. For
727 | // example, this path:
728 | // [ 4, 3, 2, 7, 1 ]
729 | // refers to:
730 | // file.message_type(3) // 4, 3
731 | // .field(7) // 2, 7
732 | // .name() // 1
733 | // This is because FileDescriptorProto.message_type has field number 4:
734 | // repeated DescriptorProto message_type = 4;
735 | // and DescriptorProto.field has field number 2:
736 | // repeated FieldDescriptorProto field = 2;
737 | // and FieldDescriptorProto.name has field number 1:
738 | // optional string name = 1;
739 | //
740 | // Thus, the above path gives the location of a field name. If we removed
741 | // the last element:
742 | // [ 4, 3, 2, 7 ]
743 | // this path refers to the whole field declaration (from the beginning
744 | // of the label to the terminating semicolon).
745 | repeated int32 path = 1 [packed=true];
746 |
747 | // Always has exactly three or four elements: start line, start column,
748 | // end line (optional, otherwise assumed same as start line), end column.
749 | // These are packed into a single field for efficiency. Note that line
750 | // and column numbers are zero-based -- typically you will want to add
751 | // 1 to each before displaying to a user.
752 | repeated int32 span = 2 [packed=true];
753 |
754 | // If this SourceCodeInfo represents a complete declaration, these are any
755 | // comments appearing before and after the declaration which appear to be
756 | // attached to the declaration.
757 | //
758 | // A series of line comments appearing on consecutive lines, with no other
759 | // tokens appearing on those lines, will be treated as a single comment.
760 | //
761 | // leading_detached_comments will keep paragraphs of comments that appear
762 | // before (but not connected to) the current element. Each paragraph,
763 | // separated by empty lines, will be one comment element in the repeated
764 | // field.
765 | //
766 | // Only the comment content is provided; comment markers (e.g. //) are
767 | // stripped out. For block comments, leading whitespace and an asterisk
768 | // will be stripped from the beginning of each line other than the first.
769 | // Newlines are included in the output.
770 | //
771 | // Examples:
772 | //
773 | // optional int32 foo = 1; // Comment attached to foo.
774 | // // Comment attached to bar.
775 | // optional int32 bar = 2;
776 | //
777 | // optional string baz = 3;
778 | // // Comment attached to baz.
779 | // // Another line attached to baz.
780 | //
781 | // // Comment attached to qux.
782 | // //
783 | // // Another line attached to qux.
784 | // optional double qux = 4;
785 | //
786 | // // Detached comment for corge. This is not leading or trailing comments
787 | // // to qux or corge because there are blank lines separating it from
788 | // // both.
789 | //
790 | // // Detached comment for corge paragraph 2.
791 | //
792 | // optional string corge = 5;
793 | // /* Block comment attached
794 | // * to corge. Leading asterisks
795 | // * will be removed. */
796 | // /* Block comment attached to
797 | // * grault. */
798 | // optional int32 grault = 6;
799 | //
800 | // // ignored detached comments.
801 | optional string leading_comments = 3;
802 | optional string trailing_comments = 4;
803 | repeated string leading_detached_comments = 6;
804 | }
805 | }
806 |
807 | // Describes the relationship between generated code and its original source
808 | // file. A GeneratedCodeInfo message is associated with only one generated
809 | // source file, but may contain references to different source .proto files.
810 | message GeneratedCodeInfo {
811 | // An Annotation connects some span of text in generated code to an element
812 | // of its generating .proto file.
813 | repeated Annotation annotation = 1;
814 | message Annotation {
815 | // Identifies the element in the original source .proto file. This field
816 | // is formatted the same as SourceCodeInfo.Location.path.
817 | repeated int32 path = 1 [packed=true];
818 |
819 | // Identifies the filesystem path to the original source .proto.
820 | optional string source_file = 2;
821 |
822 | // Identifies the starting offset in bytes in the generated code
823 | // that relates to the identified object.
824 | optional int32 begin = 3;
825 |
826 | // Identifies the ending offset in bytes in the generated code that
827 | // relates to the identified offset. The end offset should be one past
828 | // the last relevant byte (so the length of the text = end - begin).
829 | optional int32 end = 4;
830 | }
831 | }
832 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/rpc/status.proto:
--------------------------------------------------------------------------------
1 | // Copyright 2016 Google Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | syntax = "proto3";
16 |
17 | package google.rpc;
18 |
19 | import "google/protobuf/any.proto";
20 |
21 | option go_package = "google.golang.org/genproto/googleapis/rpc/status;status";
22 | option java_multiple_files = true;
23 | option java_outer_classname = "StatusProto";
24 | option java_package = "com.google.rpc";
25 | option objc_class_prefix = "RPC";
26 |
27 |
28 | // The `Status` type defines a logical error model that is suitable for different
29 | // programming environments, including REST APIs and RPC APIs. It is used by
30 | // [gRPC](https://github.com/grpc). The error model is designed to be:
31 | //
32 | // - Simple to use and understand for most users
33 | // - Flexible enough to meet unexpected needs
34 | //
35 | // # Overview
36 | //
37 | // The `Status` message contains three pieces of data: error code, error message,
38 | // and error details. The error code should be an enum value of
39 | // [google.rpc.Code][google.rpc.Code], but it may accept additional error codes if needed. The
40 | // error message should be a developer-facing English message that helps
41 | // developers *understand* and *resolve* the error. If a localized user-facing
42 | // error message is needed, put the localized message in the error details or
43 | // localize it in the client. The optional error details may contain arbitrary
44 | // information about the error. There is a predefined set of error detail types
45 | // in the package `google.rpc` which can be used for common error conditions.
46 | //
47 | // # Language mapping
48 | //
49 | // The `Status` message is the logical representation of the error model, but it
50 | // is not necessarily the actual wire format. When the `Status` message is
51 | // exposed in different client libraries and different wire protocols, it can be
52 | // mapped differently. For example, it will likely be mapped to some exceptions
53 | // in Java, but more likely mapped to some error codes in C.
54 | //
55 | // # Other uses
56 | //
57 | // The error model and the `Status` message can be used in a variety of
58 | // environments, either with or without APIs, to provide a
59 | // consistent developer experience across different environments.
60 | //
61 | // Example uses of this error model include:
62 | //
63 | // - Partial errors. If a service needs to return partial errors to the client,
64 | // it may embed the `Status` in the normal response to indicate the partial
65 | // errors.
66 | //
67 | // - Workflow errors. A typical workflow has multiple steps. Each step may
68 | // have a `Status` message for error reporting purpose.
69 | //
70 | // - Batch operations. If a client uses batch request and batch response, the
71 | // `Status` message should be used directly inside batch response, one for
72 | // each error sub-response.
73 | //
74 | // - Asynchronous operations. If an API call embeds asynchronous operation
75 | // results in its response, the status of those operations should be
76 | // represented directly using the `Status` message.
77 | //
78 | // - Logging. If some API errors are stored in logs, the message `Status` could
79 | // be used directly after any stripping needed for security/privacy reasons.
80 | message Status {
81 | // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].
82 | int32 code = 1;
83 |
84 | // A developer-facing error message, which should be in English. Any
85 | // user-facing error message should be localized and sent in the
86 | // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client.
87 | string message = 2;
88 |
89 | // A list of messages that carry the error details. There will be a
90 | // common set of message types for APIs to use.
91 | repeated google.protobuf.Any details = 3;
92 | }
93 |
--------------------------------------------------------------------------------
/grpc/src/main/proto/google/type/latlng.proto:
--------------------------------------------------------------------------------
1 | // Copyright 2016 Google Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | syntax = "proto3";
16 |
17 | package google.type;
18 |
19 | option go_package = "google.golang.org/genproto/googleapis/type/latlng;latlng";
20 | option java_multiple_files = true;
21 | option java_outer_classname = "LatLngProto";
22 | option java_package = "com.google.type";
23 | option objc_class_prefix = "GTP";
24 |
25 |
26 | // An object representing a latitude/longitude pair. This is expressed as a pair
27 | // of doubles representing degrees latitude and degrees longitude. Unless
28 | // specified otherwise, this must conform to the
29 | // WGS84
30 | // standard. Values must be within normalized ranges.
31 | //
32 | // Example of normalization code in Python:
33 | //
34 | // def NormalizeLongitude(longitude):
35 | // """Wraps decimal degrees longitude to [-180.0, 180.0]."""
36 | // q, r = divmod(longitude, 360.0)
37 | // if r > 180.0 or (r == 180.0 and q <= -1.0):
38 | // return r - 360.0
39 | // return r
40 | //
41 | // def NormalizeLatLng(latitude, longitude):
42 | // """Wraps decimal degrees latitude and longitude to
43 | // [-90.0, 90.0] and [-180.0, 180.0], respectively."""
44 | // r = latitude % 360.0
45 | // if r <= 90.0:
46 | // return r, NormalizeLongitude(longitude)
47 | // elif r >= 270.0:
48 | // return r - 360, NormalizeLongitude(longitude)
49 | // else:
50 | // return 180 - r, NormalizeLongitude(longitude + 180.0)
51 | //
52 | // assert 180.0 == NormalizeLongitude(180.0)
53 | // assert -180.0 == NormalizeLongitude(-180.0)
54 | // assert -179.0 == NormalizeLongitude(181.0)
55 | // assert (0.0, 0.0) == NormalizeLatLng(360.0, 0.0)
56 | // assert (0.0, 0.0) == NormalizeLatLng(-360.0, 0.0)
57 | // assert (85.0, 180.0) == NormalizeLatLng(95.0, 0.0)
58 | // assert (-85.0, -170.0) == NormalizeLatLng(-95.0, 10.0)
59 | // assert (90.0, 10.0) == NormalizeLatLng(90.0, 10.0)
60 | // assert (-90.0, -10.0) == NormalizeLatLng(-90.0, -10.0)
61 | // assert (0.0, -170.0) == NormalizeLatLng(-180.0, 10.0)
62 | // assert (0.0, -170.0) == NormalizeLatLng(180.0, 10.0)
63 | // assert (-90.0, 10.0) == NormalizeLatLng(270.0, 10.0)
64 | // assert (90.0, 10.0) == NormalizeLatLng(-270.0, 10.0)
65 | message LatLng {
66 | // The latitude in degrees. It must be in the range [-90.0, +90.0].
67 | double latitude = 1;
68 |
69 | // The longitude in degrees. It must be in the range [-180.0, +180.0].
70 | double longitude = 2;
71 | }
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | include ':app', ':grpc'
18 |
--------------------------------------------------------------------------------