• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 Google LLC
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     https://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 // Generated by the protocol buffer compiler.  DO NOT EDIT!
17 // source: google/cloud/dialogflow/cx/v3/session.proto
18 
19 package com.google.cloud.dialogflow.cx.v3;
20 
21 /**
22  *
23  *
24  * <pre>
25  * Contains a speech recognition result corresponding to a portion of the audio
26  * that is currently being processed or an indication that this is the end
27  * of the single requested utterance.
28  * While end-user audio is being processed, Dialogflow sends a series of
29  * results. Each result may contain a `transcript` value. A transcript
30  * represents a portion of the utterance. While the recognizer is processing
31  * audio, transcript values may be interim values or finalized values.
32  * Once a transcript is finalized, the `is_final` value is set to true and
33  * processing continues for the next transcript.
34  * If `StreamingDetectIntentRequest.query_input.audio.config.single_utterance`
35  * was true, and the recognizer has completed processing audio,
36  * the `message_type` value is set to `END_OF_SINGLE_UTTERANCE and the
37  * following (last) result contains the last finalized transcript.
38  * The complete end-user utterance is determined by concatenating the
39  * finalized transcript values received for the series of results.
40  * In the following example, single utterance is enabled. In the case where
41  * single utterance is not enabled, result 7 would not occur.
42  * ```
43  * Num | transcript              | message_type            | is_final
44  * --- | ----------------------- | ----------------------- | --------
45  * 1   | "tube"                  | TRANSCRIPT              | false
46  * 2   | "to be a"               | TRANSCRIPT              | false
47  * 3   | "to be"                 | TRANSCRIPT              | false
48  * 4   | "to be or not to be"    | TRANSCRIPT              | true
49  * 5   | "that's"                | TRANSCRIPT              | false
50  * 6   | "that is                | TRANSCRIPT              | false
51  * 7   | unset                   | END_OF_SINGLE_UTTERANCE | unset
52  * 8   | " that is the question" | TRANSCRIPT              | true
53  * ```
54  * Concatenating the finalized transcripts with `is_final` set to true,
55  * the complete utterance becomes "to be or not to be that is the question".
56  * </pre>
57  *
58  * Protobuf type {@code google.cloud.dialogflow.cx.v3.StreamingRecognitionResult}
59  */
60 public final class StreamingRecognitionResult extends com.google.protobuf.GeneratedMessageV3
61     implements
62     // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
63     StreamingRecognitionResultOrBuilder {
64   private static final long serialVersionUID = 0L;
65   // Use StreamingRecognitionResult.newBuilder() to construct.
StreamingRecognitionResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)66   private StreamingRecognitionResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
67     super(builder);
68   }
69 
StreamingRecognitionResult()70   private StreamingRecognitionResult() {
71     messageType_ = 0;
72     transcript_ = "";
73     speechWordInfo_ = java.util.Collections.emptyList();
74     languageCode_ = "";
75   }
76 
77   @java.lang.Override
78   @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)79   protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
80     return new StreamingRecognitionResult();
81   }
82 
83   @java.lang.Override
getUnknownFields()84   public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
85     return this.unknownFields;
86   }
87 
getDescriptor()88   public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
89     return com.google.cloud.dialogflow.cx.v3.SessionProto
90         .internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_descriptor;
91   }
92 
93   @java.lang.Override
94   protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()95       internalGetFieldAccessorTable() {
96     return com.google.cloud.dialogflow.cx.v3.SessionProto
97         .internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_fieldAccessorTable
98         .ensureFieldAccessorsInitialized(
99             com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.class,
100             com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.Builder.class);
101   }
102 
103   /**
104    *
105    *
106    * <pre>
107    * Type of the response message.
108    * </pre>
109    *
110    * Protobuf enum {@code google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType}
111    */
112   public enum MessageType implements com.google.protobuf.ProtocolMessageEnum {
113     /**
114      *
115      *
116      * <pre>
117      * Not specified. Should never be used.
118      * </pre>
119      *
120      * <code>MESSAGE_TYPE_UNSPECIFIED = 0;</code>
121      */
122     MESSAGE_TYPE_UNSPECIFIED(0),
123     /**
124      *
125      *
126      * <pre>
127      * Message contains a (possibly partial) transcript.
128      * </pre>
129      *
130      * <code>TRANSCRIPT = 1;</code>
131      */
132     TRANSCRIPT(1),
133     /**
134      *
135      *
136      * <pre>
137      * Event indicates that the server has detected the end of the user's speech
138      * utterance and expects no additional speech. Therefore, the server will
139      * not process additional audio (although it may subsequently return
140      * additional results). The client should stop sending additional audio
141      * data, half-close the gRPC connection, and wait for any additional results
142      * until the server closes the gRPC connection. This message is only sent if
143      * [`single_utterance`][google.cloud.dialogflow.cx.v3.InputAudioConfig.single_utterance]
144      * was set to `true`, and is not used otherwise.
145      * </pre>
146      *
147      * <code>END_OF_SINGLE_UTTERANCE = 2;</code>
148      */
149     END_OF_SINGLE_UTTERANCE(2),
150     UNRECOGNIZED(-1),
151     ;
152 
153     /**
154      *
155      *
156      * <pre>
157      * Not specified. Should never be used.
158      * </pre>
159      *
160      * <code>MESSAGE_TYPE_UNSPECIFIED = 0;</code>
161      */
162     public static final int MESSAGE_TYPE_UNSPECIFIED_VALUE = 0;
163     /**
164      *
165      *
166      * <pre>
167      * Message contains a (possibly partial) transcript.
168      * </pre>
169      *
170      * <code>TRANSCRIPT = 1;</code>
171      */
172     public static final int TRANSCRIPT_VALUE = 1;
173     /**
174      *
175      *
176      * <pre>
177      * Event indicates that the server has detected the end of the user's speech
178      * utterance and expects no additional speech. Therefore, the server will
179      * not process additional audio (although it may subsequently return
180      * additional results). The client should stop sending additional audio
181      * data, half-close the gRPC connection, and wait for any additional results
182      * until the server closes the gRPC connection. This message is only sent if
183      * [`single_utterance`][google.cloud.dialogflow.cx.v3.InputAudioConfig.single_utterance]
184      * was set to `true`, and is not used otherwise.
185      * </pre>
186      *
187      * <code>END_OF_SINGLE_UTTERANCE = 2;</code>
188      */
189     public static final int END_OF_SINGLE_UTTERANCE_VALUE = 2;
190 
getNumber()191     public final int getNumber() {
192       if (this == UNRECOGNIZED) {
193         throw new java.lang.IllegalArgumentException(
194             "Can't get the number of an unknown enum value.");
195       }
196       return value;
197     }
198 
199     /**
200      * @param value The numeric wire value of the corresponding enum entry.
201      * @return The enum associated with the given numeric wire value.
202      * @deprecated Use {@link #forNumber(int)} instead.
203      */
204     @java.lang.Deprecated
valueOf(int value)205     public static MessageType valueOf(int value) {
206       return forNumber(value);
207     }
208 
209     /**
210      * @param value The numeric wire value of the corresponding enum entry.
211      * @return The enum associated with the given numeric wire value.
212      */
forNumber(int value)213     public static MessageType forNumber(int value) {
214       switch (value) {
215         case 0:
216           return MESSAGE_TYPE_UNSPECIFIED;
217         case 1:
218           return TRANSCRIPT;
219         case 2:
220           return END_OF_SINGLE_UTTERANCE;
221         default:
222           return null;
223       }
224     }
225 
internalGetValueMap()226     public static com.google.protobuf.Internal.EnumLiteMap<MessageType> internalGetValueMap() {
227       return internalValueMap;
228     }
229 
230     private static final com.google.protobuf.Internal.EnumLiteMap<MessageType> internalValueMap =
231         new com.google.protobuf.Internal.EnumLiteMap<MessageType>() {
232           public MessageType findValueByNumber(int number) {
233             return MessageType.forNumber(number);
234           }
235         };
236 
getValueDescriptor()237     public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
238       if (this == UNRECOGNIZED) {
239         throw new java.lang.IllegalStateException(
240             "Can't get the descriptor of an unrecognized enum value.");
241       }
242       return getDescriptor().getValues().get(ordinal());
243     }
244 
getDescriptorForType()245     public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
246       return getDescriptor();
247     }
248 
getDescriptor()249     public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
250       return com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.getDescriptor()
251           .getEnumTypes()
252           .get(0);
253     }
254 
255     private static final MessageType[] VALUES = values();
256 
valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc)257     public static MessageType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
258       if (desc.getType() != getDescriptor()) {
259         throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
260       }
261       if (desc.getIndex() == -1) {
262         return UNRECOGNIZED;
263       }
264       return VALUES[desc.getIndex()];
265     }
266 
267     private final int value;
268 
MessageType(int value)269     private MessageType(int value) {
270       this.value = value;
271     }
272 
273     // @@protoc_insertion_point(enum_scope:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType)
274   }
275 
276   public static final int MESSAGE_TYPE_FIELD_NUMBER = 1;
277   private int messageType_ = 0;
278   /**
279    *
280    *
281    * <pre>
282    * Type of the result message.
283    * </pre>
284    *
285    * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
286    * </code>
287    *
288    * @return The enum numeric value on the wire for messageType.
289    */
290   @java.lang.Override
getMessageTypeValue()291   public int getMessageTypeValue() {
292     return messageType_;
293   }
294   /**
295    *
296    *
297    * <pre>
298    * Type of the result message.
299    * </pre>
300    *
301    * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
302    * </code>
303    *
304    * @return The messageType.
305    */
306   @java.lang.Override
getMessageType()307   public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType getMessageType() {
308     com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType result =
309         com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.forNumber(
310             messageType_);
311     return result == null
312         ? com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.UNRECOGNIZED
313         : result;
314   }
315 
316   public static final int TRANSCRIPT_FIELD_NUMBER = 2;
317 
318   @SuppressWarnings("serial")
319   private volatile java.lang.Object transcript_ = "";
320   /**
321    *
322    *
323    * <pre>
324    * Transcript text representing the words that the user spoke.
325    * Populated if and only if `message_type` = `TRANSCRIPT`.
326    * </pre>
327    *
328    * <code>string transcript = 2;</code>
329    *
330    * @return The transcript.
331    */
332   @java.lang.Override
getTranscript()333   public java.lang.String getTranscript() {
334     java.lang.Object ref = transcript_;
335     if (ref instanceof java.lang.String) {
336       return (java.lang.String) ref;
337     } else {
338       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
339       java.lang.String s = bs.toStringUtf8();
340       transcript_ = s;
341       return s;
342     }
343   }
344   /**
345    *
346    *
347    * <pre>
348    * Transcript text representing the words that the user spoke.
349    * Populated if and only if `message_type` = `TRANSCRIPT`.
350    * </pre>
351    *
352    * <code>string transcript = 2;</code>
353    *
354    * @return The bytes for transcript.
355    */
356   @java.lang.Override
getTranscriptBytes()357   public com.google.protobuf.ByteString getTranscriptBytes() {
358     java.lang.Object ref = transcript_;
359     if (ref instanceof java.lang.String) {
360       com.google.protobuf.ByteString b =
361           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
362       transcript_ = b;
363       return b;
364     } else {
365       return (com.google.protobuf.ByteString) ref;
366     }
367   }
368 
369   public static final int IS_FINAL_FIELD_NUMBER = 3;
370   private boolean isFinal_ = false;
371   /**
372    *
373    *
374    * <pre>
375    * If `false`, the `StreamingRecognitionResult` represents an
376    * interim result that may change. If `true`, the recognizer will not return
377    * any further hypotheses about this piece of the audio. May only be populated
378    * for `message_type` = `TRANSCRIPT`.
379    * </pre>
380    *
381    * <code>bool is_final = 3;</code>
382    *
383    * @return The isFinal.
384    */
385   @java.lang.Override
getIsFinal()386   public boolean getIsFinal() {
387     return isFinal_;
388   }
389 
390   public static final int CONFIDENCE_FIELD_NUMBER = 4;
391   private float confidence_ = 0F;
392   /**
393    *
394    *
395    * <pre>
396    * The Speech confidence between 0.0 and 1.0 for the current portion of audio.
397    * A higher number indicates an estimated greater likelihood that the
398    * recognized words are correct. The default of 0.0 is a sentinel value
399    * indicating that confidence was not set.
400    * This field is typically only provided if `is_final` is true and you should
401    * not rely on it being accurate or even set.
402    * </pre>
403    *
404    * <code>float confidence = 4;</code>
405    *
406    * @return The confidence.
407    */
408   @java.lang.Override
getConfidence()409   public float getConfidence() {
410     return confidence_;
411   }
412 
413   public static final int STABILITY_FIELD_NUMBER = 6;
414   private float stability_ = 0F;
415   /**
416    *
417    *
418    * <pre>
419    * An estimate of the likelihood that the speech recognizer will
420    * not change its guess about this interim recognition result:
421    * * If the value is unspecified or 0.0, Dialogflow didn't compute the
422    *   stability. In particular, Dialogflow will only provide stability for
423    *   `TRANSCRIPT` results with `is_final = false`.
424    * * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
425    *   unstable and 1.0 means completely stable.
426    * </pre>
427    *
428    * <code>float stability = 6;</code>
429    *
430    * @return The stability.
431    */
432   @java.lang.Override
getStability()433   public float getStability() {
434     return stability_;
435   }
436 
437   public static final int SPEECH_WORD_INFO_FIELD_NUMBER = 7;
438 
439   @SuppressWarnings("serial")
440   private java.util.List<com.google.cloud.dialogflow.cx.v3.SpeechWordInfo> speechWordInfo_;
441   /**
442    *
443    *
444    * <pre>
445    * Word-specific information for the words recognized by Speech in
446    * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
447    * Populated if and only if `message_type` = `TRANSCRIPT` and
448    * [InputAudioConfig.enable_word_info] is set.
449    * </pre>
450    *
451    * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
452    */
453   @java.lang.Override
getSpeechWordInfoList()454   public java.util.List<com.google.cloud.dialogflow.cx.v3.SpeechWordInfo> getSpeechWordInfoList() {
455     return speechWordInfo_;
456   }
457   /**
458    *
459    *
460    * <pre>
461    * Word-specific information for the words recognized by Speech in
462    * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
463    * Populated if and only if `message_type` = `TRANSCRIPT` and
464    * [InputAudioConfig.enable_word_info] is set.
465    * </pre>
466    *
467    * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
468    */
469   @java.lang.Override
470   public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
getSpeechWordInfoOrBuilderList()471       getSpeechWordInfoOrBuilderList() {
472     return speechWordInfo_;
473   }
474   /**
475    *
476    *
477    * <pre>
478    * Word-specific information for the words recognized by Speech in
479    * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
480    * Populated if and only if `message_type` = `TRANSCRIPT` and
481    * [InputAudioConfig.enable_word_info] is set.
482    * </pre>
483    *
484    * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
485    */
486   @java.lang.Override
getSpeechWordInfoCount()487   public int getSpeechWordInfoCount() {
488     return speechWordInfo_.size();
489   }
490   /**
491    *
492    *
493    * <pre>
494    * Word-specific information for the words recognized by Speech in
495    * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
496    * Populated if and only if `message_type` = `TRANSCRIPT` and
497    * [InputAudioConfig.enable_word_info] is set.
498    * </pre>
499    *
500    * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
501    */
502   @java.lang.Override
getSpeechWordInfo(int index)503   public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo getSpeechWordInfo(int index) {
504     return speechWordInfo_.get(index);
505   }
506   /**
507    *
508    *
509    * <pre>
510    * Word-specific information for the words recognized by Speech in
511    * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
512    * Populated if and only if `message_type` = `TRANSCRIPT` and
513    * [InputAudioConfig.enable_word_info] is set.
514    * </pre>
515    *
516    * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
517    */
518   @java.lang.Override
getSpeechWordInfoOrBuilder( int index)519   public com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder getSpeechWordInfoOrBuilder(
520       int index) {
521     return speechWordInfo_.get(index);
522   }
523 
524   public static final int SPEECH_END_OFFSET_FIELD_NUMBER = 8;
525   private com.google.protobuf.Duration speechEndOffset_;
526   /**
527    *
528    *
529    * <pre>
530    * Time offset of the end of this Speech recognition result relative to the
531    * beginning of the audio. Only populated for `message_type` =
532    * `TRANSCRIPT`.
533    * </pre>
534    *
535    * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
536    *
537    * @return Whether the speechEndOffset field is set.
538    */
539   @java.lang.Override
hasSpeechEndOffset()540   public boolean hasSpeechEndOffset() {
541     return speechEndOffset_ != null;
542   }
543   /**
544    *
545    *
546    * <pre>
547    * Time offset of the end of this Speech recognition result relative to the
548    * beginning of the audio. Only populated for `message_type` =
549    * `TRANSCRIPT`.
550    * </pre>
551    *
552    * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
553    *
554    * @return The speechEndOffset.
555    */
556   @java.lang.Override
getSpeechEndOffset()557   public com.google.protobuf.Duration getSpeechEndOffset() {
558     return speechEndOffset_ == null
559         ? com.google.protobuf.Duration.getDefaultInstance()
560         : speechEndOffset_;
561   }
562   /**
563    *
564    *
565    * <pre>
566    * Time offset of the end of this Speech recognition result relative to the
567    * beginning of the audio. Only populated for `message_type` =
568    * `TRANSCRIPT`.
569    * </pre>
570    *
571    * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
572    */
573   @java.lang.Override
getSpeechEndOffsetOrBuilder()574   public com.google.protobuf.DurationOrBuilder getSpeechEndOffsetOrBuilder() {
575     return speechEndOffset_ == null
576         ? com.google.protobuf.Duration.getDefaultInstance()
577         : speechEndOffset_;
578   }
579 
580   public static final int LANGUAGE_CODE_FIELD_NUMBER = 10;
581 
582   @SuppressWarnings("serial")
583   private volatile java.lang.Object languageCode_ = "";
584   /**
585    *
586    *
587    * <pre>
588    * Detected language code for the transcript.
589    * </pre>
590    *
591    * <code>string language_code = 10;</code>
592    *
593    * @return The languageCode.
594    */
595   @java.lang.Override
getLanguageCode()596   public java.lang.String getLanguageCode() {
597     java.lang.Object ref = languageCode_;
598     if (ref instanceof java.lang.String) {
599       return (java.lang.String) ref;
600     } else {
601       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
602       java.lang.String s = bs.toStringUtf8();
603       languageCode_ = s;
604       return s;
605     }
606   }
607   /**
608    *
609    *
610    * <pre>
611    * Detected language code for the transcript.
612    * </pre>
613    *
614    * <code>string language_code = 10;</code>
615    *
616    * @return The bytes for languageCode.
617    */
618   @java.lang.Override
getLanguageCodeBytes()619   public com.google.protobuf.ByteString getLanguageCodeBytes() {
620     java.lang.Object ref = languageCode_;
621     if (ref instanceof java.lang.String) {
622       com.google.protobuf.ByteString b =
623           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
624       languageCode_ = b;
625       return b;
626     } else {
627       return (com.google.protobuf.ByteString) ref;
628     }
629   }
630 
631   private byte memoizedIsInitialized = -1;
632 
633   @java.lang.Override
isInitialized()634   public final boolean isInitialized() {
635     byte isInitialized = memoizedIsInitialized;
636     if (isInitialized == 1) return true;
637     if (isInitialized == 0) return false;
638 
639     memoizedIsInitialized = 1;
640     return true;
641   }
642 
643   @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)644   public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
645     if (messageType_
646         != com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType
647             .MESSAGE_TYPE_UNSPECIFIED
648             .getNumber()) {
649       output.writeEnum(1, messageType_);
650     }
651     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(transcript_)) {
652       com.google.protobuf.GeneratedMessageV3.writeString(output, 2, transcript_);
653     }
654     if (isFinal_ != false) {
655       output.writeBool(3, isFinal_);
656     }
657     if (java.lang.Float.floatToRawIntBits(confidence_) != 0) {
658       output.writeFloat(4, confidence_);
659     }
660     if (java.lang.Float.floatToRawIntBits(stability_) != 0) {
661       output.writeFloat(6, stability_);
662     }
663     for (int i = 0; i < speechWordInfo_.size(); i++) {
664       output.writeMessage(7, speechWordInfo_.get(i));
665     }
666     if (speechEndOffset_ != null) {
667       output.writeMessage(8, getSpeechEndOffset());
668     }
669     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
670       com.google.protobuf.GeneratedMessageV3.writeString(output, 10, languageCode_);
671     }
672     getUnknownFields().writeTo(output);
673   }
674 
675   @java.lang.Override
getSerializedSize()676   public int getSerializedSize() {
677     int size = memoizedSize;
678     if (size != -1) return size;
679 
680     size = 0;
681     if (messageType_
682         != com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType
683             .MESSAGE_TYPE_UNSPECIFIED
684             .getNumber()) {
685       size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, messageType_);
686     }
687     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(transcript_)) {
688       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, transcript_);
689     }
690     if (isFinal_ != false) {
691       size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, isFinal_);
692     }
693     if (java.lang.Float.floatToRawIntBits(confidence_) != 0) {
694       size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, confidence_);
695     }
696     if (java.lang.Float.floatToRawIntBits(stability_) != 0) {
697       size += com.google.protobuf.CodedOutputStream.computeFloatSize(6, stability_);
698     }
699     for (int i = 0; i < speechWordInfo_.size(); i++) {
700       size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, speechWordInfo_.get(i));
701     }
702     if (speechEndOffset_ != null) {
703       size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getSpeechEndOffset());
704     }
705     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
706       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, languageCode_);
707     }
708     size += getUnknownFields().getSerializedSize();
709     memoizedSize = size;
710     return size;
711   }
712 
713   @java.lang.Override
equals(final java.lang.Object obj)714   public boolean equals(final java.lang.Object obj) {
715     if (obj == this) {
716       return true;
717     }
718     if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)) {
719       return super.equals(obj);
720     }
721     com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult other =
722         (com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) obj;
723 
724     if (messageType_ != other.messageType_) return false;
725     if (!getTranscript().equals(other.getTranscript())) return false;
726     if (getIsFinal() != other.getIsFinal()) return false;
727     if (java.lang.Float.floatToIntBits(getConfidence())
728         != java.lang.Float.floatToIntBits(other.getConfidence())) return false;
729     if (java.lang.Float.floatToIntBits(getStability())
730         != java.lang.Float.floatToIntBits(other.getStability())) return false;
731     if (!getSpeechWordInfoList().equals(other.getSpeechWordInfoList())) return false;
732     if (hasSpeechEndOffset() != other.hasSpeechEndOffset()) return false;
733     if (hasSpeechEndOffset()) {
734       if (!getSpeechEndOffset().equals(other.getSpeechEndOffset())) return false;
735     }
736     if (!getLanguageCode().equals(other.getLanguageCode())) return false;
737     if (!getUnknownFields().equals(other.getUnknownFields())) return false;
738     return true;
739   }
740 
741   @java.lang.Override
hashCode()742   public int hashCode() {
743     if (memoizedHashCode != 0) {
744       return memoizedHashCode;
745     }
746     int hash = 41;
747     hash = (19 * hash) + getDescriptor().hashCode();
748     hash = (37 * hash) + MESSAGE_TYPE_FIELD_NUMBER;
749     hash = (53 * hash) + messageType_;
750     hash = (37 * hash) + TRANSCRIPT_FIELD_NUMBER;
751     hash = (53 * hash) + getTranscript().hashCode();
752     hash = (37 * hash) + IS_FINAL_FIELD_NUMBER;
753     hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getIsFinal());
754     hash = (37 * hash) + CONFIDENCE_FIELD_NUMBER;
755     hash = (53 * hash) + java.lang.Float.floatToIntBits(getConfidence());
756     hash = (37 * hash) + STABILITY_FIELD_NUMBER;
757     hash = (53 * hash) + java.lang.Float.floatToIntBits(getStability());
758     if (getSpeechWordInfoCount() > 0) {
759       hash = (37 * hash) + SPEECH_WORD_INFO_FIELD_NUMBER;
760       hash = (53 * hash) + getSpeechWordInfoList().hashCode();
761     }
762     if (hasSpeechEndOffset()) {
763       hash = (37 * hash) + SPEECH_END_OFFSET_FIELD_NUMBER;
764       hash = (53 * hash) + getSpeechEndOffset().hashCode();
765     }
766     hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
767     hash = (53 * hash) + getLanguageCode().hashCode();
768     hash = (29 * hash) + getUnknownFields().hashCode();
769     memoizedHashCode = hash;
770     return hash;
771   }
772 
parseFrom( java.nio.ByteBuffer data)773   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
774       java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
775     return PARSER.parseFrom(data);
776   }
777 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)778   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
779       java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
780       throws com.google.protobuf.InvalidProtocolBufferException {
781     return PARSER.parseFrom(data, extensionRegistry);
782   }
783 
parseFrom( com.google.protobuf.ByteString data)784   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
785       com.google.protobuf.ByteString data)
786       throws com.google.protobuf.InvalidProtocolBufferException {
787     return PARSER.parseFrom(data);
788   }
789 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)790   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
791       com.google.protobuf.ByteString data,
792       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
793       throws com.google.protobuf.InvalidProtocolBufferException {
794     return PARSER.parseFrom(data, extensionRegistry);
795   }
796 
parseFrom(byte[] data)797   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(byte[] data)
798       throws com.google.protobuf.InvalidProtocolBufferException {
799     return PARSER.parseFrom(data);
800   }
801 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)802   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
803       byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
804       throws com.google.protobuf.InvalidProtocolBufferException {
805     return PARSER.parseFrom(data, extensionRegistry);
806   }
807 
parseFrom( java.io.InputStream input)808   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
809       java.io.InputStream input) throws java.io.IOException {
810     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
811   }
812 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)813   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
814       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
815       throws java.io.IOException {
816     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
817         PARSER, input, extensionRegistry);
818   }
819 
parseDelimitedFrom( java.io.InputStream input)820   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseDelimitedFrom(
821       java.io.InputStream input) throws java.io.IOException {
822     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
823   }
824 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)825   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseDelimitedFrom(
826       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
827       throws java.io.IOException {
828     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
829         PARSER, input, extensionRegistry);
830   }
831 
parseFrom( com.google.protobuf.CodedInputStream input)832   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
833       com.google.protobuf.CodedInputStream input) throws java.io.IOException {
834     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
835   }
836 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)837   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
838       com.google.protobuf.CodedInputStream input,
839       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
840       throws java.io.IOException {
841     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
842         PARSER, input, extensionRegistry);
843   }
844 
845   @java.lang.Override
newBuilderForType()846   public Builder newBuilderForType() {
847     return newBuilder();
848   }
849 
newBuilder()850   public static Builder newBuilder() {
851     return DEFAULT_INSTANCE.toBuilder();
852   }
853 
newBuilder( com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult prototype)854   public static Builder newBuilder(
855       com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult prototype) {
856     return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
857   }
858 
859   @java.lang.Override
toBuilder()860   public Builder toBuilder() {
861     return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
862   }
863 
864   @java.lang.Override
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)865   protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
866     Builder builder = new Builder(parent);
867     return builder;
868   }
869   /**
870    *
871    *
872    * <pre>
873    * Contains a speech recognition result corresponding to a portion of the audio
874    * that is currently being processed or an indication that this is the end
875    * of the single requested utterance.
876    * While end-user audio is being processed, Dialogflow sends a series of
877    * results. Each result may contain a `transcript` value. A transcript
878    * represents a portion of the utterance. While the recognizer is processing
879    * audio, transcript values may be interim values or finalized values.
880    * Once a transcript is finalized, the `is_final` value is set to true and
881    * processing continues for the next transcript.
882    * If `StreamingDetectIntentRequest.query_input.audio.config.single_utterance`
883    * was true, and the recognizer has completed processing audio,
884    * the `message_type` value is set to `END_OF_SINGLE_UTTERANCE and the
885    * following (last) result contains the last finalized transcript.
886    * The complete end-user utterance is determined by concatenating the
887    * finalized transcript values received for the series of results.
888    * In the following example, single utterance is enabled. In the case where
889    * single utterance is not enabled, result 7 would not occur.
890    * ```
891    * Num | transcript              | message_type            | is_final
892    * --- | ----------------------- | ----------------------- | --------
893    * 1   | "tube"                  | TRANSCRIPT              | false
894    * 2   | "to be a"               | TRANSCRIPT              | false
895    * 3   | "to be"                 | TRANSCRIPT              | false
896    * 4   | "to be or not to be"    | TRANSCRIPT              | true
897    * 5   | "that's"                | TRANSCRIPT              | false
898    * 6   | "that is                | TRANSCRIPT              | false
899    * 7   | unset                   | END_OF_SINGLE_UTTERANCE | unset
900    * 8   | " that is the question" | TRANSCRIPT              | true
901    * ```
902    * Concatenating the finalized transcripts with `is_final` set to true,
903    * the complete utterance becomes "to be or not to be that is the question".
904    * </pre>
905    *
906    * Protobuf type {@code google.cloud.dialogflow.cx.v3.StreamingRecognitionResult}
907    */
908   public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
909       implements
910       // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
911       com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResultOrBuilder {
getDescriptor()912     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
913       return com.google.cloud.dialogflow.cx.v3.SessionProto
914           .internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_descriptor;
915     }
916 
917     @java.lang.Override
918     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()919         internalGetFieldAccessorTable() {
920       return com.google.cloud.dialogflow.cx.v3.SessionProto
921           .internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_fieldAccessorTable
922           .ensureFieldAccessorsInitialized(
923               com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.class,
924               com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.Builder.class);
925     }
926 
927     // Construct using com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.newBuilder()
Builder()928     private Builder() {}
929 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)930     private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
931       super(parent);
932     }
933 
934     @java.lang.Override
clear()935     public Builder clear() {
936       super.clear();
937       bitField0_ = 0;
938       messageType_ = 0;
939       transcript_ = "";
940       isFinal_ = false;
941       confidence_ = 0F;
942       stability_ = 0F;
943       if (speechWordInfoBuilder_ == null) {
944         speechWordInfo_ = java.util.Collections.emptyList();
945       } else {
946         speechWordInfo_ = null;
947         speechWordInfoBuilder_.clear();
948       }
949       bitField0_ = (bitField0_ & ~0x00000020);
950       speechEndOffset_ = null;
951       if (speechEndOffsetBuilder_ != null) {
952         speechEndOffsetBuilder_.dispose();
953         speechEndOffsetBuilder_ = null;
954       }
955       languageCode_ = "";
956       return this;
957     }
958 
959     @java.lang.Override
getDescriptorForType()960     public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
961       return com.google.cloud.dialogflow.cx.v3.SessionProto
962           .internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_descriptor;
963     }
964 
965     @java.lang.Override
966     public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult
getDefaultInstanceForType()967         getDefaultInstanceForType() {
968       return com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.getDefaultInstance();
969     }
970 
971     @java.lang.Override
build()972     public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult build() {
973       com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result = buildPartial();
974       if (!result.isInitialized()) {
975         throw newUninitializedMessageException(result);
976       }
977       return result;
978     }
979 
980     @java.lang.Override
buildPartial()981     public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult buildPartial() {
982       com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result =
983           new com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult(this);
984       buildPartialRepeatedFields(result);
985       if (bitField0_ != 0) {
986         buildPartial0(result);
987       }
988       onBuilt();
989       return result;
990     }
991 
buildPartialRepeatedFields( com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result)992     private void buildPartialRepeatedFields(
993         com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result) {
994       if (speechWordInfoBuilder_ == null) {
995         if (((bitField0_ & 0x00000020) != 0)) {
996           speechWordInfo_ = java.util.Collections.unmodifiableList(speechWordInfo_);
997           bitField0_ = (bitField0_ & ~0x00000020);
998         }
999         result.speechWordInfo_ = speechWordInfo_;
1000       } else {
1001         result.speechWordInfo_ = speechWordInfoBuilder_.build();
1002       }
1003     }
1004 
buildPartial0( com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result)1005     private void buildPartial0(
1006         com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result) {
1007       int from_bitField0_ = bitField0_;
1008       if (((from_bitField0_ & 0x00000001) != 0)) {
1009         result.messageType_ = messageType_;
1010       }
1011       if (((from_bitField0_ & 0x00000002) != 0)) {
1012         result.transcript_ = transcript_;
1013       }
1014       if (((from_bitField0_ & 0x00000004) != 0)) {
1015         result.isFinal_ = isFinal_;
1016       }
1017       if (((from_bitField0_ & 0x00000008) != 0)) {
1018         result.confidence_ = confidence_;
1019       }
1020       if (((from_bitField0_ & 0x00000010) != 0)) {
1021         result.stability_ = stability_;
1022       }
1023       if (((from_bitField0_ & 0x00000040) != 0)) {
1024         result.speechEndOffset_ =
1025             speechEndOffsetBuilder_ == null ? speechEndOffset_ : speechEndOffsetBuilder_.build();
1026       }
1027       if (((from_bitField0_ & 0x00000080) != 0)) {
1028         result.languageCode_ = languageCode_;
1029       }
1030     }
1031 
1032     @java.lang.Override
clone()1033     public Builder clone() {
1034       return super.clone();
1035     }
1036 
1037     @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1038     public Builder setField(
1039         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
1040       return super.setField(field, value);
1041     }
1042 
1043     @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)1044     public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
1045       return super.clearField(field);
1046     }
1047 
1048     @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)1049     public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
1050       return super.clearOneof(oneof);
1051     }
1052 
1053     @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)1054     public Builder setRepeatedField(
1055         com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
1056       return super.setRepeatedField(field, index, value);
1057     }
1058 
1059     @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1060     public Builder addRepeatedField(
1061         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
1062       return super.addRepeatedField(field, value);
1063     }
1064 
1065     @java.lang.Override
mergeFrom(com.google.protobuf.Message other)1066     public Builder mergeFrom(com.google.protobuf.Message other) {
1067       if (other instanceof com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) {
1068         return mergeFrom((com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) other);
1069       } else {
1070         super.mergeFrom(other);
1071         return this;
1072       }
1073     }
1074 
mergeFrom(com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult other)1075     public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult other) {
1076       if (other
1077           == com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.getDefaultInstance())
1078         return this;
1079       if (other.messageType_ != 0) {
1080         setMessageTypeValue(other.getMessageTypeValue());
1081       }
1082       if (!other.getTranscript().isEmpty()) {
1083         transcript_ = other.transcript_;
1084         bitField0_ |= 0x00000002;
1085         onChanged();
1086       }
1087       if (other.getIsFinal() != false) {
1088         setIsFinal(other.getIsFinal());
1089       }
1090       if (other.getConfidence() != 0F) {
1091         setConfidence(other.getConfidence());
1092       }
1093       if (other.getStability() != 0F) {
1094         setStability(other.getStability());
1095       }
1096       if (speechWordInfoBuilder_ == null) {
1097         if (!other.speechWordInfo_.isEmpty()) {
1098           if (speechWordInfo_.isEmpty()) {
1099             speechWordInfo_ = other.speechWordInfo_;
1100             bitField0_ = (bitField0_ & ~0x00000020);
1101           } else {
1102             ensureSpeechWordInfoIsMutable();
1103             speechWordInfo_.addAll(other.speechWordInfo_);
1104           }
1105           onChanged();
1106         }
1107       } else {
1108         if (!other.speechWordInfo_.isEmpty()) {
1109           if (speechWordInfoBuilder_.isEmpty()) {
1110             speechWordInfoBuilder_.dispose();
1111             speechWordInfoBuilder_ = null;
1112             speechWordInfo_ = other.speechWordInfo_;
1113             bitField0_ = (bitField0_ & ~0x00000020);
1114             speechWordInfoBuilder_ =
1115                 com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
1116                     ? getSpeechWordInfoFieldBuilder()
1117                     : null;
1118           } else {
1119             speechWordInfoBuilder_.addAllMessages(other.speechWordInfo_);
1120           }
1121         }
1122       }
1123       if (other.hasSpeechEndOffset()) {
1124         mergeSpeechEndOffset(other.getSpeechEndOffset());
1125       }
1126       if (!other.getLanguageCode().isEmpty()) {
1127         languageCode_ = other.languageCode_;
1128         bitField0_ |= 0x00000080;
1129         onChanged();
1130       }
1131       this.mergeUnknownFields(other.getUnknownFields());
1132       onChanged();
1133       return this;
1134     }
1135 
1136     @java.lang.Override
isInitialized()1137     public final boolean isInitialized() {
1138       return true;
1139     }
1140 
1141     @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1142     public Builder mergeFrom(
1143         com.google.protobuf.CodedInputStream input,
1144         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1145         throws java.io.IOException {
1146       if (extensionRegistry == null) {
1147         throw new java.lang.NullPointerException();
1148       }
1149       try {
1150         boolean done = false;
1151         while (!done) {
1152           int tag = input.readTag();
1153           switch (tag) {
1154             case 0:
1155               done = true;
1156               break;
1157             case 8:
1158               {
1159                 messageType_ = input.readEnum();
1160                 bitField0_ |= 0x00000001;
1161                 break;
1162               } // case 8
1163             case 18:
1164               {
1165                 transcript_ = input.readStringRequireUtf8();
1166                 bitField0_ |= 0x00000002;
1167                 break;
1168               } // case 18
1169             case 24:
1170               {
1171                 isFinal_ = input.readBool();
1172                 bitField0_ |= 0x00000004;
1173                 break;
1174               } // case 24
1175             case 37:
1176               {
1177                 confidence_ = input.readFloat();
1178                 bitField0_ |= 0x00000008;
1179                 break;
1180               } // case 37
1181             case 53:
1182               {
1183                 stability_ = input.readFloat();
1184                 bitField0_ |= 0x00000010;
1185                 break;
1186               } // case 53
1187             case 58:
1188               {
1189                 com.google.cloud.dialogflow.cx.v3.SpeechWordInfo m =
1190                     input.readMessage(
1191                         com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.parser(),
1192                         extensionRegistry);
1193                 if (speechWordInfoBuilder_ == null) {
1194                   ensureSpeechWordInfoIsMutable();
1195                   speechWordInfo_.add(m);
1196                 } else {
1197                   speechWordInfoBuilder_.addMessage(m);
1198                 }
1199                 break;
1200               } // case 58
1201             case 66:
1202               {
1203                 input.readMessage(getSpeechEndOffsetFieldBuilder().getBuilder(), extensionRegistry);
1204                 bitField0_ |= 0x00000040;
1205                 break;
1206               } // case 66
1207             case 82:
1208               {
1209                 languageCode_ = input.readStringRequireUtf8();
1210                 bitField0_ |= 0x00000080;
1211                 break;
1212               } // case 82
1213             default:
1214               {
1215                 if (!super.parseUnknownField(input, extensionRegistry, tag)) {
1216                   done = true; // was an endgroup tag
1217                 }
1218                 break;
1219               } // default:
1220           } // switch (tag)
1221         } // while (!done)
1222       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1223         throw e.unwrapIOException();
1224       } finally {
1225         onChanged();
1226       } // finally
1227       return this;
1228     }
1229 
1230     private int bitField0_;
1231 
1232     private int messageType_ = 0;
1233     /**
1234      *
1235      *
1236      * <pre>
1237      * Type of the result message.
1238      * </pre>
1239      *
1240      * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
1241      * </code>
1242      *
1243      * @return The enum numeric value on the wire for messageType.
1244      */
1245     @java.lang.Override
getMessageTypeValue()1246     public int getMessageTypeValue() {
1247       return messageType_;
1248     }
1249     /**
1250      *
1251      *
1252      * <pre>
1253      * Type of the result message.
1254      * </pre>
1255      *
1256      * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
1257      * </code>
1258      *
1259      * @param value The enum numeric value on the wire for messageType to set.
1260      * @return This builder for chaining.
1261      */
setMessageTypeValue(int value)1262     public Builder setMessageTypeValue(int value) {
1263       messageType_ = value;
1264       bitField0_ |= 0x00000001;
1265       onChanged();
1266       return this;
1267     }
1268     /**
1269      *
1270      *
1271      * <pre>
1272      * Type of the result message.
1273      * </pre>
1274      *
1275      * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
1276      * </code>
1277      *
1278      * @return The messageType.
1279      */
1280     @java.lang.Override
1281     public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType
getMessageType()1282         getMessageType() {
1283       com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType result =
1284           com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.forNumber(
1285               messageType_);
1286       return result == null
1287           ? com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.UNRECOGNIZED
1288           : result;
1289     }
1290     /**
1291      *
1292      *
1293      * <pre>
1294      * Type of the result message.
1295      * </pre>
1296      *
1297      * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
1298      * </code>
1299      *
1300      * @param value The messageType to set.
1301      * @return This builder for chaining.
1302      */
setMessageType( com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType value)1303     public Builder setMessageType(
1304         com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType value) {
1305       if (value == null) {
1306         throw new NullPointerException();
1307       }
1308       bitField0_ |= 0x00000001;
1309       messageType_ = value.getNumber();
1310       onChanged();
1311       return this;
1312     }
1313     /**
1314      *
1315      *
1316      * <pre>
1317      * Type of the result message.
1318      * </pre>
1319      *
1320      * <code>.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
1321      * </code>
1322      *
1323      * @return This builder for chaining.
1324      */
clearMessageType()1325     public Builder clearMessageType() {
1326       bitField0_ = (bitField0_ & ~0x00000001);
1327       messageType_ = 0;
1328       onChanged();
1329       return this;
1330     }
1331 
1332     private java.lang.Object transcript_ = "";
1333     /**
1334      *
1335      *
1336      * <pre>
1337      * Transcript text representing the words that the user spoke.
1338      * Populated if and only if `message_type` = `TRANSCRIPT`.
1339      * </pre>
1340      *
1341      * <code>string transcript = 2;</code>
1342      *
1343      * @return The transcript.
1344      */
getTranscript()1345     public java.lang.String getTranscript() {
1346       java.lang.Object ref = transcript_;
1347       if (!(ref instanceof java.lang.String)) {
1348         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1349         java.lang.String s = bs.toStringUtf8();
1350         transcript_ = s;
1351         return s;
1352       } else {
1353         return (java.lang.String) ref;
1354       }
1355     }
1356     /**
1357      *
1358      *
1359      * <pre>
1360      * Transcript text representing the words that the user spoke.
1361      * Populated if and only if `message_type` = `TRANSCRIPT`.
1362      * </pre>
1363      *
1364      * <code>string transcript = 2;</code>
1365      *
1366      * @return The bytes for transcript.
1367      */
getTranscriptBytes()1368     public com.google.protobuf.ByteString getTranscriptBytes() {
1369       java.lang.Object ref = transcript_;
1370       if (ref instanceof String) {
1371         com.google.protobuf.ByteString b =
1372             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1373         transcript_ = b;
1374         return b;
1375       } else {
1376         return (com.google.protobuf.ByteString) ref;
1377       }
1378     }
1379     /**
1380      *
1381      *
1382      * <pre>
1383      * Transcript text representing the words that the user spoke.
1384      * Populated if and only if `message_type` = `TRANSCRIPT`.
1385      * </pre>
1386      *
1387      * <code>string transcript = 2;</code>
1388      *
1389      * @param value The transcript to set.
1390      * @return This builder for chaining.
1391      */
setTranscript(java.lang.String value)1392     public Builder setTranscript(java.lang.String value) {
1393       if (value == null) {
1394         throw new NullPointerException();
1395       }
1396       transcript_ = value;
1397       bitField0_ |= 0x00000002;
1398       onChanged();
1399       return this;
1400     }
1401     /**
1402      *
1403      *
1404      * <pre>
1405      * Transcript text representing the words that the user spoke.
1406      * Populated if and only if `message_type` = `TRANSCRIPT`.
1407      * </pre>
1408      *
1409      * <code>string transcript = 2;</code>
1410      *
1411      * @return This builder for chaining.
1412      */
clearTranscript()1413     public Builder clearTranscript() {
1414       transcript_ = getDefaultInstance().getTranscript();
1415       bitField0_ = (bitField0_ & ~0x00000002);
1416       onChanged();
1417       return this;
1418     }
1419     /**
1420      *
1421      *
1422      * <pre>
1423      * Transcript text representing the words that the user spoke.
1424      * Populated if and only if `message_type` = `TRANSCRIPT`.
1425      * </pre>
1426      *
1427      * <code>string transcript = 2;</code>
1428      *
1429      * @param value The bytes for transcript to set.
1430      * @return This builder for chaining.
1431      */
setTranscriptBytes(com.google.protobuf.ByteString value)1432     public Builder setTranscriptBytes(com.google.protobuf.ByteString value) {
1433       if (value == null) {
1434         throw new NullPointerException();
1435       }
1436       checkByteStringIsUtf8(value);
1437       transcript_ = value;
1438       bitField0_ |= 0x00000002;
1439       onChanged();
1440       return this;
1441     }
1442 
1443     private boolean isFinal_;
1444     /**
1445      *
1446      *
1447      * <pre>
1448      * If `false`, the `StreamingRecognitionResult` represents an
1449      * interim result that may change. If `true`, the recognizer will not return
1450      * any further hypotheses about this piece of the audio. May only be populated
1451      * for `message_type` = `TRANSCRIPT`.
1452      * </pre>
1453      *
1454      * <code>bool is_final = 3;</code>
1455      *
1456      * @return The isFinal.
1457      */
1458     @java.lang.Override
getIsFinal()1459     public boolean getIsFinal() {
1460       return isFinal_;
1461     }
1462     /**
1463      *
1464      *
1465      * <pre>
1466      * If `false`, the `StreamingRecognitionResult` represents an
1467      * interim result that may change. If `true`, the recognizer will not return
1468      * any further hypotheses about this piece of the audio. May only be populated
1469      * for `message_type` = `TRANSCRIPT`.
1470      * </pre>
1471      *
1472      * <code>bool is_final = 3;</code>
1473      *
1474      * @param value The isFinal to set.
1475      * @return This builder for chaining.
1476      */
setIsFinal(boolean value)1477     public Builder setIsFinal(boolean value) {
1478 
1479       isFinal_ = value;
1480       bitField0_ |= 0x00000004;
1481       onChanged();
1482       return this;
1483     }
1484     /**
1485      *
1486      *
1487      * <pre>
1488      * If `false`, the `StreamingRecognitionResult` represents an
1489      * interim result that may change. If `true`, the recognizer will not return
1490      * any further hypotheses about this piece of the audio. May only be populated
1491      * for `message_type` = `TRANSCRIPT`.
1492      * </pre>
1493      *
1494      * <code>bool is_final = 3;</code>
1495      *
1496      * @return This builder for chaining.
1497      */
clearIsFinal()1498     public Builder clearIsFinal() {
1499       bitField0_ = (bitField0_ & ~0x00000004);
1500       isFinal_ = false;
1501       onChanged();
1502       return this;
1503     }
1504 
1505     private float confidence_;
1506     /**
1507      *
1508      *
1509      * <pre>
1510      * The Speech confidence between 0.0 and 1.0 for the current portion of audio.
1511      * A higher number indicates an estimated greater likelihood that the
1512      * recognized words are correct. The default of 0.0 is a sentinel value
1513      * indicating that confidence was not set.
1514      * This field is typically only provided if `is_final` is true and you should
1515      * not rely on it being accurate or even set.
1516      * </pre>
1517      *
1518      * <code>float confidence = 4;</code>
1519      *
1520      * @return The confidence.
1521      */
1522     @java.lang.Override
getConfidence()1523     public float getConfidence() {
1524       return confidence_;
1525     }
1526     /**
1527      *
1528      *
1529      * <pre>
1530      * The Speech confidence between 0.0 and 1.0 for the current portion of audio.
1531      * A higher number indicates an estimated greater likelihood that the
1532      * recognized words are correct. The default of 0.0 is a sentinel value
1533      * indicating that confidence was not set.
1534      * This field is typically only provided if `is_final` is true and you should
1535      * not rely on it being accurate or even set.
1536      * </pre>
1537      *
1538      * <code>float confidence = 4;</code>
1539      *
1540      * @param value The confidence to set.
1541      * @return This builder for chaining.
1542      */
setConfidence(float value)1543     public Builder setConfidence(float value) {
1544 
1545       confidence_ = value;
1546       bitField0_ |= 0x00000008;
1547       onChanged();
1548       return this;
1549     }
1550     /**
1551      *
1552      *
1553      * <pre>
1554      * The Speech confidence between 0.0 and 1.0 for the current portion of audio.
1555      * A higher number indicates an estimated greater likelihood that the
1556      * recognized words are correct. The default of 0.0 is a sentinel value
1557      * indicating that confidence was not set.
1558      * This field is typically only provided if `is_final` is true and you should
1559      * not rely on it being accurate or even set.
1560      * </pre>
1561      *
1562      * <code>float confidence = 4;</code>
1563      *
1564      * @return This builder for chaining.
1565      */
clearConfidence()1566     public Builder clearConfidence() {
1567       bitField0_ = (bitField0_ & ~0x00000008);
1568       confidence_ = 0F;
1569       onChanged();
1570       return this;
1571     }
1572 
1573     private float stability_;
1574     /**
1575      *
1576      *
1577      * <pre>
1578      * An estimate of the likelihood that the speech recognizer will
1579      * not change its guess about this interim recognition result:
1580      * * If the value is unspecified or 0.0, Dialogflow didn't compute the
1581      *   stability. In particular, Dialogflow will only provide stability for
1582      *   `TRANSCRIPT` results with `is_final = false`.
1583      * * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
1584      *   unstable and 1.0 means completely stable.
1585      * </pre>
1586      *
1587      * <code>float stability = 6;</code>
1588      *
1589      * @return The stability.
1590      */
1591     @java.lang.Override
getStability()1592     public float getStability() {
1593       return stability_;
1594     }
1595     /**
1596      *
1597      *
1598      * <pre>
1599      * An estimate of the likelihood that the speech recognizer will
1600      * not change its guess about this interim recognition result:
1601      * * If the value is unspecified or 0.0, Dialogflow didn't compute the
1602      *   stability. In particular, Dialogflow will only provide stability for
1603      *   `TRANSCRIPT` results with `is_final = false`.
1604      * * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
1605      *   unstable and 1.0 means completely stable.
1606      * </pre>
1607      *
1608      * <code>float stability = 6;</code>
1609      *
1610      * @param value The stability to set.
1611      * @return This builder for chaining.
1612      */
setStability(float value)1613     public Builder setStability(float value) {
1614 
1615       stability_ = value;
1616       bitField0_ |= 0x00000010;
1617       onChanged();
1618       return this;
1619     }
1620     /**
1621      *
1622      *
1623      * <pre>
1624      * An estimate of the likelihood that the speech recognizer will
1625      * not change its guess about this interim recognition result:
1626      * * If the value is unspecified or 0.0, Dialogflow didn't compute the
1627      *   stability. In particular, Dialogflow will only provide stability for
1628      *   `TRANSCRIPT` results with `is_final = false`.
1629      * * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
1630      *   unstable and 1.0 means completely stable.
1631      * </pre>
1632      *
1633      * <code>float stability = 6;</code>
1634      *
1635      * @return This builder for chaining.
1636      */
clearStability()1637     public Builder clearStability() {
1638       bitField0_ = (bitField0_ & ~0x00000010);
1639       stability_ = 0F;
1640       onChanged();
1641       return this;
1642     }
1643 
1644     private java.util.List<com.google.cloud.dialogflow.cx.v3.SpeechWordInfo> speechWordInfo_ =
1645         java.util.Collections.emptyList();
1646 
ensureSpeechWordInfoIsMutable()1647     private void ensureSpeechWordInfoIsMutable() {
1648       if (!((bitField0_ & 0x00000020) != 0)) {
1649         speechWordInfo_ =
1650             new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3.SpeechWordInfo>(
1651                 speechWordInfo_);
1652         bitField0_ |= 0x00000020;
1653       }
1654     }
1655 
1656     private com.google.protobuf.RepeatedFieldBuilderV3<
1657             com.google.cloud.dialogflow.cx.v3.SpeechWordInfo,
1658             com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder,
1659             com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
1660         speechWordInfoBuilder_;
1661 
1662     /**
1663      *
1664      *
1665      * <pre>
1666      * Word-specific information for the words recognized by Speech in
1667      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1668      * Populated if and only if `message_type` = `TRANSCRIPT` and
1669      * [InputAudioConfig.enable_word_info] is set.
1670      * </pre>
1671      *
1672      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1673      */
1674     public java.util.List<com.google.cloud.dialogflow.cx.v3.SpeechWordInfo>
getSpeechWordInfoList()1675         getSpeechWordInfoList() {
1676       if (speechWordInfoBuilder_ == null) {
1677         return java.util.Collections.unmodifiableList(speechWordInfo_);
1678       } else {
1679         return speechWordInfoBuilder_.getMessageList();
1680       }
1681     }
1682     /**
1683      *
1684      *
1685      * <pre>
1686      * Word-specific information for the words recognized by Speech in
1687      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1688      * Populated if and only if `message_type` = `TRANSCRIPT` and
1689      * [InputAudioConfig.enable_word_info] is set.
1690      * </pre>
1691      *
1692      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1693      */
getSpeechWordInfoCount()1694     public int getSpeechWordInfoCount() {
1695       if (speechWordInfoBuilder_ == null) {
1696         return speechWordInfo_.size();
1697       } else {
1698         return speechWordInfoBuilder_.getCount();
1699       }
1700     }
1701     /**
1702      *
1703      *
1704      * <pre>
1705      * Word-specific information for the words recognized by Speech in
1706      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1707      * Populated if and only if `message_type` = `TRANSCRIPT` and
1708      * [InputAudioConfig.enable_word_info] is set.
1709      * </pre>
1710      *
1711      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1712      */
getSpeechWordInfo(int index)1713     public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo getSpeechWordInfo(int index) {
1714       if (speechWordInfoBuilder_ == null) {
1715         return speechWordInfo_.get(index);
1716       } else {
1717         return speechWordInfoBuilder_.getMessage(index);
1718       }
1719     }
1720     /**
1721      *
1722      *
1723      * <pre>
1724      * Word-specific information for the words recognized by Speech in
1725      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1726      * Populated if and only if `message_type` = `TRANSCRIPT` and
1727      * [InputAudioConfig.enable_word_info] is set.
1728      * </pre>
1729      *
1730      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1731      */
setSpeechWordInfo( int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value)1732     public Builder setSpeechWordInfo(
1733         int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value) {
1734       if (speechWordInfoBuilder_ == null) {
1735         if (value == null) {
1736           throw new NullPointerException();
1737         }
1738         ensureSpeechWordInfoIsMutable();
1739         speechWordInfo_.set(index, value);
1740         onChanged();
1741       } else {
1742         speechWordInfoBuilder_.setMessage(index, value);
1743       }
1744       return this;
1745     }
1746     /**
1747      *
1748      *
1749      * <pre>
1750      * Word-specific information for the words recognized by Speech in
1751      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1752      * Populated if and only if `message_type` = `TRANSCRIPT` and
1753      * [InputAudioConfig.enable_word_info] is set.
1754      * </pre>
1755      *
1756      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1757      */
setSpeechWordInfo( int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue)1758     public Builder setSpeechWordInfo(
1759         int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue) {
1760       if (speechWordInfoBuilder_ == null) {
1761         ensureSpeechWordInfoIsMutable();
1762         speechWordInfo_.set(index, builderForValue.build());
1763         onChanged();
1764       } else {
1765         speechWordInfoBuilder_.setMessage(index, builderForValue.build());
1766       }
1767       return this;
1768     }
1769     /**
1770      *
1771      *
1772      * <pre>
1773      * Word-specific information for the words recognized by Speech in
1774      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1775      * Populated if and only if `message_type` = `TRANSCRIPT` and
1776      * [InputAudioConfig.enable_word_info] is set.
1777      * </pre>
1778      *
1779      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1780      */
addSpeechWordInfo(com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value)1781     public Builder addSpeechWordInfo(com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value) {
1782       if (speechWordInfoBuilder_ == null) {
1783         if (value == null) {
1784           throw new NullPointerException();
1785         }
1786         ensureSpeechWordInfoIsMutable();
1787         speechWordInfo_.add(value);
1788         onChanged();
1789       } else {
1790         speechWordInfoBuilder_.addMessage(value);
1791       }
1792       return this;
1793     }
1794     /**
1795      *
1796      *
1797      * <pre>
1798      * Word-specific information for the words recognized by Speech in
1799      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1800      * Populated if and only if `message_type` = `TRANSCRIPT` and
1801      * [InputAudioConfig.enable_word_info] is set.
1802      * </pre>
1803      *
1804      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1805      */
addSpeechWordInfo( int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value)1806     public Builder addSpeechWordInfo(
1807         int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value) {
1808       if (speechWordInfoBuilder_ == null) {
1809         if (value == null) {
1810           throw new NullPointerException();
1811         }
1812         ensureSpeechWordInfoIsMutable();
1813         speechWordInfo_.add(index, value);
1814         onChanged();
1815       } else {
1816         speechWordInfoBuilder_.addMessage(index, value);
1817       }
1818       return this;
1819     }
1820     /**
1821      *
1822      *
1823      * <pre>
1824      * Word-specific information for the words recognized by Speech in
1825      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1826      * Populated if and only if `message_type` = `TRANSCRIPT` and
1827      * [InputAudioConfig.enable_word_info] is set.
1828      * </pre>
1829      *
1830      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1831      */
addSpeechWordInfo( com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue)1832     public Builder addSpeechWordInfo(
1833         com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue) {
1834       if (speechWordInfoBuilder_ == null) {
1835         ensureSpeechWordInfoIsMutable();
1836         speechWordInfo_.add(builderForValue.build());
1837         onChanged();
1838       } else {
1839         speechWordInfoBuilder_.addMessage(builderForValue.build());
1840       }
1841       return this;
1842     }
1843     /**
1844      *
1845      *
1846      * <pre>
1847      * Word-specific information for the words recognized by Speech in
1848      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1849      * Populated if and only if `message_type` = `TRANSCRIPT` and
1850      * [InputAudioConfig.enable_word_info] is set.
1851      * </pre>
1852      *
1853      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1854      */
addSpeechWordInfo( int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue)1855     public Builder addSpeechWordInfo(
1856         int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue) {
1857       if (speechWordInfoBuilder_ == null) {
1858         ensureSpeechWordInfoIsMutable();
1859         speechWordInfo_.add(index, builderForValue.build());
1860         onChanged();
1861       } else {
1862         speechWordInfoBuilder_.addMessage(index, builderForValue.build());
1863       }
1864       return this;
1865     }
1866     /**
1867      *
1868      *
1869      * <pre>
1870      * Word-specific information for the words recognized by Speech in
1871      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1872      * Populated if and only if `message_type` = `TRANSCRIPT` and
1873      * [InputAudioConfig.enable_word_info] is set.
1874      * </pre>
1875      *
1876      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1877      */
addAllSpeechWordInfo( java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfo> values)1878     public Builder addAllSpeechWordInfo(
1879         java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfo> values) {
1880       if (speechWordInfoBuilder_ == null) {
1881         ensureSpeechWordInfoIsMutable();
1882         com.google.protobuf.AbstractMessageLite.Builder.addAll(values, speechWordInfo_);
1883         onChanged();
1884       } else {
1885         speechWordInfoBuilder_.addAllMessages(values);
1886       }
1887       return this;
1888     }
1889     /**
1890      *
1891      *
1892      * <pre>
1893      * Word-specific information for the words recognized by Speech in
1894      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1895      * Populated if and only if `message_type` = `TRANSCRIPT` and
1896      * [InputAudioConfig.enable_word_info] is set.
1897      * </pre>
1898      *
1899      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1900      */
clearSpeechWordInfo()1901     public Builder clearSpeechWordInfo() {
1902       if (speechWordInfoBuilder_ == null) {
1903         speechWordInfo_ = java.util.Collections.emptyList();
1904         bitField0_ = (bitField0_ & ~0x00000020);
1905         onChanged();
1906       } else {
1907         speechWordInfoBuilder_.clear();
1908       }
1909       return this;
1910     }
1911     /**
1912      *
1913      *
1914      * <pre>
1915      * Word-specific information for the words recognized by Speech in
1916      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1917      * Populated if and only if `message_type` = `TRANSCRIPT` and
1918      * [InputAudioConfig.enable_word_info] is set.
1919      * </pre>
1920      *
1921      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1922      */
removeSpeechWordInfo(int index)1923     public Builder removeSpeechWordInfo(int index) {
1924       if (speechWordInfoBuilder_ == null) {
1925         ensureSpeechWordInfoIsMutable();
1926         speechWordInfo_.remove(index);
1927         onChanged();
1928       } else {
1929         speechWordInfoBuilder_.remove(index);
1930       }
1931       return this;
1932     }
1933     /**
1934      *
1935      *
1936      * <pre>
1937      * Word-specific information for the words recognized by Speech in
1938      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1939      * Populated if and only if `message_type` = `TRANSCRIPT` and
1940      * [InputAudioConfig.enable_word_info] is set.
1941      * </pre>
1942      *
1943      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1944      */
getSpeechWordInfoBuilder( int index)1945     public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder getSpeechWordInfoBuilder(
1946         int index) {
1947       return getSpeechWordInfoFieldBuilder().getBuilder(index);
1948     }
1949     /**
1950      *
1951      *
1952      * <pre>
1953      * Word-specific information for the words recognized by Speech in
1954      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1955      * Populated if and only if `message_type` = `TRANSCRIPT` and
1956      * [InputAudioConfig.enable_word_info] is set.
1957      * </pre>
1958      *
1959      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1960      */
getSpeechWordInfoOrBuilder( int index)1961     public com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder getSpeechWordInfoOrBuilder(
1962         int index) {
1963       if (speechWordInfoBuilder_ == null) {
1964         return speechWordInfo_.get(index);
1965       } else {
1966         return speechWordInfoBuilder_.getMessageOrBuilder(index);
1967       }
1968     }
1969     /**
1970      *
1971      *
1972      * <pre>
1973      * Word-specific information for the words recognized by Speech in
1974      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1975      * Populated if and only if `message_type` = `TRANSCRIPT` and
1976      * [InputAudioConfig.enable_word_info] is set.
1977      * </pre>
1978      *
1979      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
1980      */
1981     public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
getSpeechWordInfoOrBuilderList()1982         getSpeechWordInfoOrBuilderList() {
1983       if (speechWordInfoBuilder_ != null) {
1984         return speechWordInfoBuilder_.getMessageOrBuilderList();
1985       } else {
1986         return java.util.Collections.unmodifiableList(speechWordInfo_);
1987       }
1988     }
1989     /**
1990      *
1991      *
1992      * <pre>
1993      * Word-specific information for the words recognized by Speech in
1994      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
1995      * Populated if and only if `message_type` = `TRANSCRIPT` and
1996      * [InputAudioConfig.enable_word_info] is set.
1997      * </pre>
1998      *
1999      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
2000      */
addSpeechWordInfoBuilder()2001     public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder addSpeechWordInfoBuilder() {
2002       return getSpeechWordInfoFieldBuilder()
2003           .addBuilder(com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.getDefaultInstance());
2004     }
2005     /**
2006      *
2007      *
2008      * <pre>
2009      * Word-specific information for the words recognized by Speech in
2010      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
2011      * Populated if and only if `message_type` = `TRANSCRIPT` and
2012      * [InputAudioConfig.enable_word_info] is set.
2013      * </pre>
2014      *
2015      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
2016      */
addSpeechWordInfoBuilder( int index)2017     public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder addSpeechWordInfoBuilder(
2018         int index) {
2019       return getSpeechWordInfoFieldBuilder()
2020           .addBuilder(index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.getDefaultInstance());
2021     }
2022     /**
2023      *
2024      *
2025      * <pre>
2026      * Word-specific information for the words recognized by Speech in
2027      * [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript].
2028      * Populated if and only if `message_type` = `TRANSCRIPT` and
2029      * [InputAudioConfig.enable_word_info] is set.
2030      * </pre>
2031      *
2032      * <code>repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;</code>
2033      */
2034     public java.util.List<com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder>
getSpeechWordInfoBuilderList()2035         getSpeechWordInfoBuilderList() {
2036       return getSpeechWordInfoFieldBuilder().getBuilderList();
2037     }
2038 
2039     private com.google.protobuf.RepeatedFieldBuilderV3<
2040             com.google.cloud.dialogflow.cx.v3.SpeechWordInfo,
2041             com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder,
2042             com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
getSpeechWordInfoFieldBuilder()2043         getSpeechWordInfoFieldBuilder() {
2044       if (speechWordInfoBuilder_ == null) {
2045         speechWordInfoBuilder_ =
2046             new com.google.protobuf.RepeatedFieldBuilderV3<
2047                 com.google.cloud.dialogflow.cx.v3.SpeechWordInfo,
2048                 com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder,
2049                 com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>(
2050                 speechWordInfo_,
2051                 ((bitField0_ & 0x00000020) != 0),
2052                 getParentForChildren(),
2053                 isClean());
2054         speechWordInfo_ = null;
2055       }
2056       return speechWordInfoBuilder_;
2057     }
2058 
2059     private com.google.protobuf.Duration speechEndOffset_;
2060     private com.google.protobuf.SingleFieldBuilderV3<
2061             com.google.protobuf.Duration,
2062             com.google.protobuf.Duration.Builder,
2063             com.google.protobuf.DurationOrBuilder>
2064         speechEndOffsetBuilder_;
2065     /**
2066      *
2067      *
2068      * <pre>
2069      * Time offset of the end of this Speech recognition result relative to the
2070      * beginning of the audio. Only populated for `message_type` =
2071      * `TRANSCRIPT`.
2072      * </pre>
2073      *
2074      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2075      *
2076      * @return Whether the speechEndOffset field is set.
2077      */
hasSpeechEndOffset()2078     public boolean hasSpeechEndOffset() {
2079       return ((bitField0_ & 0x00000040) != 0);
2080     }
2081     /**
2082      *
2083      *
2084      * <pre>
2085      * Time offset of the end of this Speech recognition result relative to the
2086      * beginning of the audio. Only populated for `message_type` =
2087      * `TRANSCRIPT`.
2088      * </pre>
2089      *
2090      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2091      *
2092      * @return The speechEndOffset.
2093      */
getSpeechEndOffset()2094     public com.google.protobuf.Duration getSpeechEndOffset() {
2095       if (speechEndOffsetBuilder_ == null) {
2096         return speechEndOffset_ == null
2097             ? com.google.protobuf.Duration.getDefaultInstance()
2098             : speechEndOffset_;
2099       } else {
2100         return speechEndOffsetBuilder_.getMessage();
2101       }
2102     }
2103     /**
2104      *
2105      *
2106      * <pre>
2107      * Time offset of the end of this Speech recognition result relative to the
2108      * beginning of the audio. Only populated for `message_type` =
2109      * `TRANSCRIPT`.
2110      * </pre>
2111      *
2112      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2113      */
setSpeechEndOffset(com.google.protobuf.Duration value)2114     public Builder setSpeechEndOffset(com.google.protobuf.Duration value) {
2115       if (speechEndOffsetBuilder_ == null) {
2116         if (value == null) {
2117           throw new NullPointerException();
2118         }
2119         speechEndOffset_ = value;
2120       } else {
2121         speechEndOffsetBuilder_.setMessage(value);
2122       }
2123       bitField0_ |= 0x00000040;
2124       onChanged();
2125       return this;
2126     }
2127     /**
2128      *
2129      *
2130      * <pre>
2131      * Time offset of the end of this Speech recognition result relative to the
2132      * beginning of the audio. Only populated for `message_type` =
2133      * `TRANSCRIPT`.
2134      * </pre>
2135      *
2136      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2137      */
setSpeechEndOffset(com.google.protobuf.Duration.Builder builderForValue)2138     public Builder setSpeechEndOffset(com.google.protobuf.Duration.Builder builderForValue) {
2139       if (speechEndOffsetBuilder_ == null) {
2140         speechEndOffset_ = builderForValue.build();
2141       } else {
2142         speechEndOffsetBuilder_.setMessage(builderForValue.build());
2143       }
2144       bitField0_ |= 0x00000040;
2145       onChanged();
2146       return this;
2147     }
2148     /**
2149      *
2150      *
2151      * <pre>
2152      * Time offset of the end of this Speech recognition result relative to the
2153      * beginning of the audio. Only populated for `message_type` =
2154      * `TRANSCRIPT`.
2155      * </pre>
2156      *
2157      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2158      */
mergeSpeechEndOffset(com.google.protobuf.Duration value)2159     public Builder mergeSpeechEndOffset(com.google.protobuf.Duration value) {
2160       if (speechEndOffsetBuilder_ == null) {
2161         if (((bitField0_ & 0x00000040) != 0)
2162             && speechEndOffset_ != null
2163             && speechEndOffset_ != com.google.protobuf.Duration.getDefaultInstance()) {
2164           getSpeechEndOffsetBuilder().mergeFrom(value);
2165         } else {
2166           speechEndOffset_ = value;
2167         }
2168       } else {
2169         speechEndOffsetBuilder_.mergeFrom(value);
2170       }
2171       bitField0_ |= 0x00000040;
2172       onChanged();
2173       return this;
2174     }
2175     /**
2176      *
2177      *
2178      * <pre>
2179      * Time offset of the end of this Speech recognition result relative to the
2180      * beginning of the audio. Only populated for `message_type` =
2181      * `TRANSCRIPT`.
2182      * </pre>
2183      *
2184      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2185      */
clearSpeechEndOffset()2186     public Builder clearSpeechEndOffset() {
2187       bitField0_ = (bitField0_ & ~0x00000040);
2188       speechEndOffset_ = null;
2189       if (speechEndOffsetBuilder_ != null) {
2190         speechEndOffsetBuilder_.dispose();
2191         speechEndOffsetBuilder_ = null;
2192       }
2193       onChanged();
2194       return this;
2195     }
2196     /**
2197      *
2198      *
2199      * <pre>
2200      * Time offset of the end of this Speech recognition result relative to the
2201      * beginning of the audio. Only populated for `message_type` =
2202      * `TRANSCRIPT`.
2203      * </pre>
2204      *
2205      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2206      */
getSpeechEndOffsetBuilder()2207     public com.google.protobuf.Duration.Builder getSpeechEndOffsetBuilder() {
2208       bitField0_ |= 0x00000040;
2209       onChanged();
2210       return getSpeechEndOffsetFieldBuilder().getBuilder();
2211     }
2212     /**
2213      *
2214      *
2215      * <pre>
2216      * Time offset of the end of this Speech recognition result relative to the
2217      * beginning of the audio. Only populated for `message_type` =
2218      * `TRANSCRIPT`.
2219      * </pre>
2220      *
2221      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2222      */
getSpeechEndOffsetOrBuilder()2223     public com.google.protobuf.DurationOrBuilder getSpeechEndOffsetOrBuilder() {
2224       if (speechEndOffsetBuilder_ != null) {
2225         return speechEndOffsetBuilder_.getMessageOrBuilder();
2226       } else {
2227         return speechEndOffset_ == null
2228             ? com.google.protobuf.Duration.getDefaultInstance()
2229             : speechEndOffset_;
2230       }
2231     }
2232     /**
2233      *
2234      *
2235      * <pre>
2236      * Time offset of the end of this Speech recognition result relative to the
2237      * beginning of the audio. Only populated for `message_type` =
2238      * `TRANSCRIPT`.
2239      * </pre>
2240      *
2241      * <code>.google.protobuf.Duration speech_end_offset = 8;</code>
2242      */
2243     private com.google.protobuf.SingleFieldBuilderV3<
2244             com.google.protobuf.Duration,
2245             com.google.protobuf.Duration.Builder,
2246             com.google.protobuf.DurationOrBuilder>
getSpeechEndOffsetFieldBuilder()2247         getSpeechEndOffsetFieldBuilder() {
2248       if (speechEndOffsetBuilder_ == null) {
2249         speechEndOffsetBuilder_ =
2250             new com.google.protobuf.SingleFieldBuilderV3<
2251                 com.google.protobuf.Duration,
2252                 com.google.protobuf.Duration.Builder,
2253                 com.google.protobuf.DurationOrBuilder>(
2254                 getSpeechEndOffset(), getParentForChildren(), isClean());
2255         speechEndOffset_ = null;
2256       }
2257       return speechEndOffsetBuilder_;
2258     }
2259 
2260     private java.lang.Object languageCode_ = "";
2261     /**
2262      *
2263      *
2264      * <pre>
2265      * Detected language code for the transcript.
2266      * </pre>
2267      *
2268      * <code>string language_code = 10;</code>
2269      *
2270      * @return The languageCode.
2271      */
getLanguageCode()2272     public java.lang.String getLanguageCode() {
2273       java.lang.Object ref = languageCode_;
2274       if (!(ref instanceof java.lang.String)) {
2275         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
2276         java.lang.String s = bs.toStringUtf8();
2277         languageCode_ = s;
2278         return s;
2279       } else {
2280         return (java.lang.String) ref;
2281       }
2282     }
2283     /**
2284      *
2285      *
2286      * <pre>
2287      * Detected language code for the transcript.
2288      * </pre>
2289      *
2290      * <code>string language_code = 10;</code>
2291      *
2292      * @return The bytes for languageCode.
2293      */
getLanguageCodeBytes()2294     public com.google.protobuf.ByteString getLanguageCodeBytes() {
2295       java.lang.Object ref = languageCode_;
2296       if (ref instanceof String) {
2297         com.google.protobuf.ByteString b =
2298             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
2299         languageCode_ = b;
2300         return b;
2301       } else {
2302         return (com.google.protobuf.ByteString) ref;
2303       }
2304     }
2305     /**
2306      *
2307      *
2308      * <pre>
2309      * Detected language code for the transcript.
2310      * </pre>
2311      *
2312      * <code>string language_code = 10;</code>
2313      *
2314      * @param value The languageCode to set.
2315      * @return This builder for chaining.
2316      */
setLanguageCode(java.lang.String value)2317     public Builder setLanguageCode(java.lang.String value) {
2318       if (value == null) {
2319         throw new NullPointerException();
2320       }
2321       languageCode_ = value;
2322       bitField0_ |= 0x00000080;
2323       onChanged();
2324       return this;
2325     }
2326     /**
2327      *
2328      *
2329      * <pre>
2330      * Detected language code for the transcript.
2331      * </pre>
2332      *
2333      * <code>string language_code = 10;</code>
2334      *
2335      * @return This builder for chaining.
2336      */
clearLanguageCode()2337     public Builder clearLanguageCode() {
2338       languageCode_ = getDefaultInstance().getLanguageCode();
2339       bitField0_ = (bitField0_ & ~0x00000080);
2340       onChanged();
2341       return this;
2342     }
2343     /**
2344      *
2345      *
2346      * <pre>
2347      * Detected language code for the transcript.
2348      * </pre>
2349      *
2350      * <code>string language_code = 10;</code>
2351      *
2352      * @param value The bytes for languageCode to set.
2353      * @return This builder for chaining.
2354      */
setLanguageCodeBytes(com.google.protobuf.ByteString value)2355     public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
2356       if (value == null) {
2357         throw new NullPointerException();
2358       }
2359       checkByteStringIsUtf8(value);
2360       languageCode_ = value;
2361       bitField0_ |= 0x00000080;
2362       onChanged();
2363       return this;
2364     }
2365 
2366     @java.lang.Override
setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)2367     public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
2368       return super.setUnknownFields(unknownFields);
2369     }
2370 
2371     @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)2372     public final Builder mergeUnknownFields(
2373         final com.google.protobuf.UnknownFieldSet unknownFields) {
2374       return super.mergeUnknownFields(unknownFields);
2375     }
2376 
2377     // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
2378   }
2379 
2380   // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
2381   private static final com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult
2382       DEFAULT_INSTANCE;
2383 
2384   static {
2385     DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult();
2386   }
2387 
getDefaultInstance()2388   public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult getDefaultInstance() {
2389     return DEFAULT_INSTANCE;
2390   }
2391 
2392   private static final com.google.protobuf.Parser<StreamingRecognitionResult> PARSER =
2393       new com.google.protobuf.AbstractParser<StreamingRecognitionResult>() {
2394         @java.lang.Override
2395         public StreamingRecognitionResult parsePartialFrom(
2396             com.google.protobuf.CodedInputStream input,
2397             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2398             throws com.google.protobuf.InvalidProtocolBufferException {
2399           Builder builder = newBuilder();
2400           try {
2401             builder.mergeFrom(input, extensionRegistry);
2402           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2403             throw e.setUnfinishedMessage(builder.buildPartial());
2404           } catch (com.google.protobuf.UninitializedMessageException e) {
2405             throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
2406           } catch (java.io.IOException e) {
2407             throw new com.google.protobuf.InvalidProtocolBufferException(e)
2408                 .setUnfinishedMessage(builder.buildPartial());
2409           }
2410           return builder.buildPartial();
2411         }
2412       };
2413 
parser()2414   public static com.google.protobuf.Parser<StreamingRecognitionResult> parser() {
2415     return PARSER;
2416   }
2417 
2418   @java.lang.Override
getParserForType()2419   public com.google.protobuf.Parser<StreamingRecognitionResult> getParserForType() {
2420     return PARSER;
2421   }
2422 
2423   @java.lang.Override
getDefaultInstanceForType()2424   public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult getDefaultInstanceForType() {
2425     return DEFAULT_INSTANCE;
2426   }
2427 }
2428