1 /* 2 * Copyright 2020 Google LLC 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * https://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 // Generated by the protocol buffer compiler. DO NOT EDIT! 17 // source: google/cloud/speech/v1/cloud_speech.proto 18 19 package com.google.cloud.speech.v1; 20 21 /** 22 * 23 * 24 * <pre> 25 * Description of audio data to be recognized. 26 * </pre> 27 * 28 * Protobuf type {@code google.cloud.speech.v1.RecognitionMetadata} 29 */ 30 @java.lang.Deprecated 31 public final class RecognitionMetadata extends com.google.protobuf.GeneratedMessageV3 32 implements 33 // @@protoc_insertion_point(message_implements:google.cloud.speech.v1.RecognitionMetadata) 34 RecognitionMetadataOrBuilder { 35 private static final long serialVersionUID = 0L; 36 // Use RecognitionMetadata.newBuilder() to construct. RecognitionMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)37 private RecognitionMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { 38 super(builder); 39 } 40 RecognitionMetadata()41 private RecognitionMetadata() { 42 interactionType_ = 0; 43 microphoneDistance_ = 0; 44 originalMediaType_ = 0; 45 recordingDeviceType_ = 0; 46 recordingDeviceName_ = ""; 47 originalMimeType_ = ""; 48 audioTopic_ = ""; 49 } 50 51 @java.lang.Override 52 @SuppressWarnings({"unused"}) newInstance(UnusedPrivateParameter unused)53 protected java.lang.Object newInstance(UnusedPrivateParameter unused) { 54 return new RecognitionMetadata(); 55 } 56 57 @java.lang.Override getUnknownFields()58 public final com.google.protobuf.UnknownFieldSet getUnknownFields() { 59 return this.unknownFields; 60 } 61 getDescriptor()62 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 63 return com.google.cloud.speech.v1.SpeechProto 64 .internal_static_google_cloud_speech_v1_RecognitionMetadata_descriptor; 65 } 66 67 @java.lang.Override 68 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()69 internalGetFieldAccessorTable() { 70 return com.google.cloud.speech.v1.SpeechProto 71 .internal_static_google_cloud_speech_v1_RecognitionMetadata_fieldAccessorTable 72 .ensureFieldAccessorsInitialized( 73 com.google.cloud.speech.v1.RecognitionMetadata.class, 74 com.google.cloud.speech.v1.RecognitionMetadata.Builder.class); 75 } 76 77 /** 78 * 79 * 80 * <pre> 81 * Use case categories that the audio recognition request can be described 82 * by. 83 * </pre> 84 * 85 * Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.InteractionType} 86 */ 87 public enum InteractionType implements com.google.protobuf.ProtocolMessageEnum { 88 /** 89 * 90 * 91 * <pre> 92 * Use case is either unknown or is something other than one of the other 93 * values below. 94 * </pre> 95 * 96 * <code>INTERACTION_TYPE_UNSPECIFIED = 0;</code> 97 */ 98 INTERACTION_TYPE_UNSPECIFIED(0), 99 /** 100 * 101 * 102 * <pre> 103 * Multiple people in a conversation or discussion. For example in a 104 * meeting with two or more people actively participating. Typically 105 * all the primary people speaking would be in the same room (if not, 106 * see PHONE_CALL) 107 * </pre> 108 * 109 * <code>DISCUSSION = 1;</code> 110 */ 111 DISCUSSION(1), 112 /** 113 * 114 * 115 * <pre> 116 * One or more persons lecturing or presenting to others, mostly 117 * uninterrupted. 118 * </pre> 119 * 120 * <code>PRESENTATION = 2;</code> 121 */ 122 PRESENTATION(2), 123 /** 124 * 125 * 126 * <pre> 127 * A phone-call or video-conference in which two or more people, who are 128 * not in the same room, are actively participating. 129 * </pre> 130 * 131 * <code>PHONE_CALL = 3;</code> 132 */ 133 PHONE_CALL(3), 134 /** 135 * 136 * 137 * <pre> 138 * A recorded message intended for another person to listen to. 139 * </pre> 140 * 141 * <code>VOICEMAIL = 4;</code> 142 */ 143 VOICEMAIL(4), 144 /** 145 * 146 * 147 * <pre> 148 * Professionally produced audio (eg. TV Show, Podcast). 149 * </pre> 150 * 151 * <code>PROFESSIONALLY_PRODUCED = 5;</code> 152 */ 153 PROFESSIONALLY_PRODUCED(5), 154 /** 155 * 156 * 157 * <pre> 158 * Transcribe spoken questions and queries into text. 159 * </pre> 160 * 161 * <code>VOICE_SEARCH = 6;</code> 162 */ 163 VOICE_SEARCH(6), 164 /** 165 * 166 * 167 * <pre> 168 * Transcribe voice commands, such as for controlling a device. 169 * </pre> 170 * 171 * <code>VOICE_COMMAND = 7;</code> 172 */ 173 VOICE_COMMAND(7), 174 /** 175 * 176 * 177 * <pre> 178 * Transcribe speech to text to create a written document, such as a 179 * text-message, email or report. 180 * </pre> 181 * 182 * <code>DICTATION = 8;</code> 183 */ 184 DICTATION(8), 185 UNRECOGNIZED(-1), 186 ; 187 188 /** 189 * 190 * 191 * <pre> 192 * Use case is either unknown or is something other than one of the other 193 * values below. 194 * </pre> 195 * 196 * <code>INTERACTION_TYPE_UNSPECIFIED = 0;</code> 197 */ 198 public static final int INTERACTION_TYPE_UNSPECIFIED_VALUE = 0; 199 /** 200 * 201 * 202 * <pre> 203 * Multiple people in a conversation or discussion. For example in a 204 * meeting with two or more people actively participating. Typically 205 * all the primary people speaking would be in the same room (if not, 206 * see PHONE_CALL) 207 * </pre> 208 * 209 * <code>DISCUSSION = 1;</code> 210 */ 211 public static final int DISCUSSION_VALUE = 1; 212 /** 213 * 214 * 215 * <pre> 216 * One or more persons lecturing or presenting to others, mostly 217 * uninterrupted. 218 * </pre> 219 * 220 * <code>PRESENTATION = 2;</code> 221 */ 222 public static final int PRESENTATION_VALUE = 2; 223 /** 224 * 225 * 226 * <pre> 227 * A phone-call or video-conference in which two or more people, who are 228 * not in the same room, are actively participating. 229 * </pre> 230 * 231 * <code>PHONE_CALL = 3;</code> 232 */ 233 public static final int PHONE_CALL_VALUE = 3; 234 /** 235 * 236 * 237 * <pre> 238 * A recorded message intended for another person to listen to. 239 * </pre> 240 * 241 * <code>VOICEMAIL = 4;</code> 242 */ 243 public static final int VOICEMAIL_VALUE = 4; 244 /** 245 * 246 * 247 * <pre> 248 * Professionally produced audio (eg. TV Show, Podcast). 249 * </pre> 250 * 251 * <code>PROFESSIONALLY_PRODUCED = 5;</code> 252 */ 253 public static final int PROFESSIONALLY_PRODUCED_VALUE = 5; 254 /** 255 * 256 * 257 * <pre> 258 * Transcribe spoken questions and queries into text. 259 * </pre> 260 * 261 * <code>VOICE_SEARCH = 6;</code> 262 */ 263 public static final int VOICE_SEARCH_VALUE = 6; 264 /** 265 * 266 * 267 * <pre> 268 * Transcribe voice commands, such as for controlling a device. 269 * </pre> 270 * 271 * <code>VOICE_COMMAND = 7;</code> 272 */ 273 public static final int VOICE_COMMAND_VALUE = 7; 274 /** 275 * 276 * 277 * <pre> 278 * Transcribe speech to text to create a written document, such as a 279 * text-message, email or report. 280 * </pre> 281 * 282 * <code>DICTATION = 8;</code> 283 */ 284 public static final int DICTATION_VALUE = 8; 285 getNumber()286 public final int getNumber() { 287 if (this == UNRECOGNIZED) { 288 throw new java.lang.IllegalArgumentException( 289 "Can't get the number of an unknown enum value."); 290 } 291 return value; 292 } 293 294 /** 295 * @param value The numeric wire value of the corresponding enum entry. 296 * @return The enum associated with the given numeric wire value. 297 * @deprecated Use {@link #forNumber(int)} instead. 298 */ 299 @java.lang.Deprecated valueOf(int value)300 public static InteractionType valueOf(int value) { 301 return forNumber(value); 302 } 303 304 /** 305 * @param value The numeric wire value of the corresponding enum entry. 306 * @return The enum associated with the given numeric wire value. 307 */ forNumber(int value)308 public static InteractionType forNumber(int value) { 309 switch (value) { 310 case 0: 311 return INTERACTION_TYPE_UNSPECIFIED; 312 case 1: 313 return DISCUSSION; 314 case 2: 315 return PRESENTATION; 316 case 3: 317 return PHONE_CALL; 318 case 4: 319 return VOICEMAIL; 320 case 5: 321 return PROFESSIONALLY_PRODUCED; 322 case 6: 323 return VOICE_SEARCH; 324 case 7: 325 return VOICE_COMMAND; 326 case 8: 327 return DICTATION; 328 default: 329 return null; 330 } 331 } 332 internalGetValueMap()333 public static com.google.protobuf.Internal.EnumLiteMap<InteractionType> internalGetValueMap() { 334 return internalValueMap; 335 } 336 337 private static final com.google.protobuf.Internal.EnumLiteMap<InteractionType> 338 internalValueMap = 339 new com.google.protobuf.Internal.EnumLiteMap<InteractionType>() { 340 public InteractionType findValueByNumber(int number) { 341 return InteractionType.forNumber(number); 342 } 343 }; 344 getValueDescriptor()345 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { 346 if (this == UNRECOGNIZED) { 347 throw new java.lang.IllegalStateException( 348 "Can't get the descriptor of an unrecognized enum value."); 349 } 350 return getDescriptor().getValues().get(ordinal()); 351 } 352 getDescriptorForType()353 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { 354 return getDescriptor(); 355 } 356 getDescriptor()357 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { 358 return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(0); 359 } 360 361 private static final InteractionType[] VALUES = values(); 362 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)363 public static InteractionType valueOf( 364 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 365 if (desc.getType() != getDescriptor()) { 366 throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); 367 } 368 if (desc.getIndex() == -1) { 369 return UNRECOGNIZED; 370 } 371 return VALUES[desc.getIndex()]; 372 } 373 374 private final int value; 375 InteractionType(int value)376 private InteractionType(int value) { 377 this.value = value; 378 } 379 380 // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.InteractionType) 381 } 382 383 /** 384 * 385 * 386 * <pre> 387 * Enumerates the types of capture settings describing an audio file. 388 * </pre> 389 * 390 * Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance} 391 */ 392 public enum MicrophoneDistance implements com.google.protobuf.ProtocolMessageEnum { 393 /** 394 * 395 * 396 * <pre> 397 * Audio type is not known. 398 * </pre> 399 * 400 * <code>MICROPHONE_DISTANCE_UNSPECIFIED = 0;</code> 401 */ 402 MICROPHONE_DISTANCE_UNSPECIFIED(0), 403 /** 404 * 405 * 406 * <pre> 407 * The audio was captured from a closely placed microphone. Eg. phone, 408 * dictaphone, or handheld microphone. Generally if there speaker is within 409 * 1 meter of the microphone. 410 * </pre> 411 * 412 * <code>NEARFIELD = 1;</code> 413 */ 414 NEARFIELD(1), 415 /** 416 * 417 * 418 * <pre> 419 * The speaker if within 3 meters of the microphone. 420 * </pre> 421 * 422 * <code>MIDFIELD = 2;</code> 423 */ 424 MIDFIELD(2), 425 /** 426 * 427 * 428 * <pre> 429 * The speaker is more than 3 meters away from the microphone. 430 * </pre> 431 * 432 * <code>FARFIELD = 3;</code> 433 */ 434 FARFIELD(3), 435 UNRECOGNIZED(-1), 436 ; 437 438 /** 439 * 440 * 441 * <pre> 442 * Audio type is not known. 443 * </pre> 444 * 445 * <code>MICROPHONE_DISTANCE_UNSPECIFIED = 0;</code> 446 */ 447 public static final int MICROPHONE_DISTANCE_UNSPECIFIED_VALUE = 0; 448 /** 449 * 450 * 451 * <pre> 452 * The audio was captured from a closely placed microphone. Eg. phone, 453 * dictaphone, or handheld microphone. Generally if there speaker is within 454 * 1 meter of the microphone. 455 * </pre> 456 * 457 * <code>NEARFIELD = 1;</code> 458 */ 459 public static final int NEARFIELD_VALUE = 1; 460 /** 461 * 462 * 463 * <pre> 464 * The speaker if within 3 meters of the microphone. 465 * </pre> 466 * 467 * <code>MIDFIELD = 2;</code> 468 */ 469 public static final int MIDFIELD_VALUE = 2; 470 /** 471 * 472 * 473 * <pre> 474 * The speaker is more than 3 meters away from the microphone. 475 * </pre> 476 * 477 * <code>FARFIELD = 3;</code> 478 */ 479 public static final int FARFIELD_VALUE = 3; 480 getNumber()481 public final int getNumber() { 482 if (this == UNRECOGNIZED) { 483 throw new java.lang.IllegalArgumentException( 484 "Can't get the number of an unknown enum value."); 485 } 486 return value; 487 } 488 489 /** 490 * @param value The numeric wire value of the corresponding enum entry. 491 * @return The enum associated with the given numeric wire value. 492 * @deprecated Use {@link #forNumber(int)} instead. 493 */ 494 @java.lang.Deprecated valueOf(int value)495 public static MicrophoneDistance valueOf(int value) { 496 return forNumber(value); 497 } 498 499 /** 500 * @param value The numeric wire value of the corresponding enum entry. 501 * @return The enum associated with the given numeric wire value. 502 */ forNumber(int value)503 public static MicrophoneDistance forNumber(int value) { 504 switch (value) { 505 case 0: 506 return MICROPHONE_DISTANCE_UNSPECIFIED; 507 case 1: 508 return NEARFIELD; 509 case 2: 510 return MIDFIELD; 511 case 3: 512 return FARFIELD; 513 default: 514 return null; 515 } 516 } 517 518 public static com.google.protobuf.Internal.EnumLiteMap<MicrophoneDistance> internalGetValueMap()519 internalGetValueMap() { 520 return internalValueMap; 521 } 522 523 private static final com.google.protobuf.Internal.EnumLiteMap<MicrophoneDistance> 524 internalValueMap = 525 new com.google.protobuf.Internal.EnumLiteMap<MicrophoneDistance>() { 526 public MicrophoneDistance findValueByNumber(int number) { 527 return MicrophoneDistance.forNumber(number); 528 } 529 }; 530 getValueDescriptor()531 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { 532 if (this == UNRECOGNIZED) { 533 throw new java.lang.IllegalStateException( 534 "Can't get the descriptor of an unrecognized enum value."); 535 } 536 return getDescriptor().getValues().get(ordinal()); 537 } 538 getDescriptorForType()539 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { 540 return getDescriptor(); 541 } 542 getDescriptor()543 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { 544 return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(1); 545 } 546 547 private static final MicrophoneDistance[] VALUES = values(); 548 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)549 public static MicrophoneDistance valueOf( 550 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 551 if (desc.getType() != getDescriptor()) { 552 throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); 553 } 554 if (desc.getIndex() == -1) { 555 return UNRECOGNIZED; 556 } 557 return VALUES[desc.getIndex()]; 558 } 559 560 private final int value; 561 MicrophoneDistance(int value)562 private MicrophoneDistance(int value) { 563 this.value = value; 564 } 565 566 // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance) 567 } 568 569 /** 570 * 571 * 572 * <pre> 573 * The original media the speech was recorded on. 574 * </pre> 575 * 576 * Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType} 577 */ 578 public enum OriginalMediaType implements com.google.protobuf.ProtocolMessageEnum { 579 /** 580 * 581 * 582 * <pre> 583 * Unknown original media type. 584 * </pre> 585 * 586 * <code>ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;</code> 587 */ 588 ORIGINAL_MEDIA_TYPE_UNSPECIFIED(0), 589 /** 590 * 591 * 592 * <pre> 593 * The speech data is an audio recording. 594 * </pre> 595 * 596 * <code>AUDIO = 1;</code> 597 */ 598 AUDIO(1), 599 /** 600 * 601 * 602 * <pre> 603 * The speech data originally recorded on a video. 604 * </pre> 605 * 606 * <code>VIDEO = 2;</code> 607 */ 608 VIDEO(2), 609 UNRECOGNIZED(-1), 610 ; 611 612 /** 613 * 614 * 615 * <pre> 616 * Unknown original media type. 617 * </pre> 618 * 619 * <code>ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;</code> 620 */ 621 public static final int ORIGINAL_MEDIA_TYPE_UNSPECIFIED_VALUE = 0; 622 /** 623 * 624 * 625 * <pre> 626 * The speech data is an audio recording. 627 * </pre> 628 * 629 * <code>AUDIO = 1;</code> 630 */ 631 public static final int AUDIO_VALUE = 1; 632 /** 633 * 634 * 635 * <pre> 636 * The speech data originally recorded on a video. 637 * </pre> 638 * 639 * <code>VIDEO = 2;</code> 640 */ 641 public static final int VIDEO_VALUE = 2; 642 getNumber()643 public final int getNumber() { 644 if (this == UNRECOGNIZED) { 645 throw new java.lang.IllegalArgumentException( 646 "Can't get the number of an unknown enum value."); 647 } 648 return value; 649 } 650 651 /** 652 * @param value The numeric wire value of the corresponding enum entry. 653 * @return The enum associated with the given numeric wire value. 654 * @deprecated Use {@link #forNumber(int)} instead. 655 */ 656 @java.lang.Deprecated valueOf(int value)657 public static OriginalMediaType valueOf(int value) { 658 return forNumber(value); 659 } 660 661 /** 662 * @param value The numeric wire value of the corresponding enum entry. 663 * @return The enum associated with the given numeric wire value. 664 */ forNumber(int value)665 public static OriginalMediaType forNumber(int value) { 666 switch (value) { 667 case 0: 668 return ORIGINAL_MEDIA_TYPE_UNSPECIFIED; 669 case 1: 670 return AUDIO; 671 case 2: 672 return VIDEO; 673 default: 674 return null; 675 } 676 } 677 678 public static com.google.protobuf.Internal.EnumLiteMap<OriginalMediaType> internalGetValueMap()679 internalGetValueMap() { 680 return internalValueMap; 681 } 682 683 private static final com.google.protobuf.Internal.EnumLiteMap<OriginalMediaType> 684 internalValueMap = 685 new com.google.protobuf.Internal.EnumLiteMap<OriginalMediaType>() { 686 public OriginalMediaType findValueByNumber(int number) { 687 return OriginalMediaType.forNumber(number); 688 } 689 }; 690 getValueDescriptor()691 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { 692 if (this == UNRECOGNIZED) { 693 throw new java.lang.IllegalStateException( 694 "Can't get the descriptor of an unrecognized enum value."); 695 } 696 return getDescriptor().getValues().get(ordinal()); 697 } 698 getDescriptorForType()699 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { 700 return getDescriptor(); 701 } 702 getDescriptor()703 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { 704 return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(2); 705 } 706 707 private static final OriginalMediaType[] VALUES = values(); 708 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)709 public static OriginalMediaType valueOf( 710 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 711 if (desc.getType() != getDescriptor()) { 712 throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); 713 } 714 if (desc.getIndex() == -1) { 715 return UNRECOGNIZED; 716 } 717 return VALUES[desc.getIndex()]; 718 } 719 720 private final int value; 721 OriginalMediaType(int value)722 private OriginalMediaType(int value) { 723 this.value = value; 724 } 725 726 // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType) 727 } 728 729 /** 730 * 731 * 732 * <pre> 733 * The type of device the speech was recorded with. 734 * </pre> 735 * 736 * Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType} 737 */ 738 public enum RecordingDeviceType implements com.google.protobuf.ProtocolMessageEnum { 739 /** 740 * 741 * 742 * <pre> 743 * The recording device is unknown. 744 * </pre> 745 * 746 * <code>RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;</code> 747 */ 748 RECORDING_DEVICE_TYPE_UNSPECIFIED(0), 749 /** 750 * 751 * 752 * <pre> 753 * Speech was recorded on a smartphone. 754 * </pre> 755 * 756 * <code>SMARTPHONE = 1;</code> 757 */ 758 SMARTPHONE(1), 759 /** 760 * 761 * 762 * <pre> 763 * Speech was recorded using a personal computer or tablet. 764 * </pre> 765 * 766 * <code>PC = 2;</code> 767 */ 768 PC(2), 769 /** 770 * 771 * 772 * <pre> 773 * Speech was recorded over a phone line. 774 * </pre> 775 * 776 * <code>PHONE_LINE = 3;</code> 777 */ 778 PHONE_LINE(3), 779 /** 780 * 781 * 782 * <pre> 783 * Speech was recorded in a vehicle. 784 * </pre> 785 * 786 * <code>VEHICLE = 4;</code> 787 */ 788 VEHICLE(4), 789 /** 790 * 791 * 792 * <pre> 793 * Speech was recorded outdoors. 794 * </pre> 795 * 796 * <code>OTHER_OUTDOOR_DEVICE = 5;</code> 797 */ 798 OTHER_OUTDOOR_DEVICE(5), 799 /** 800 * 801 * 802 * <pre> 803 * Speech was recorded indoors. 804 * </pre> 805 * 806 * <code>OTHER_INDOOR_DEVICE = 6;</code> 807 */ 808 OTHER_INDOOR_DEVICE(6), 809 UNRECOGNIZED(-1), 810 ; 811 812 /** 813 * 814 * 815 * <pre> 816 * The recording device is unknown. 817 * </pre> 818 * 819 * <code>RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;</code> 820 */ 821 public static final int RECORDING_DEVICE_TYPE_UNSPECIFIED_VALUE = 0; 822 /** 823 * 824 * 825 * <pre> 826 * Speech was recorded on a smartphone. 827 * </pre> 828 * 829 * <code>SMARTPHONE = 1;</code> 830 */ 831 public static final int SMARTPHONE_VALUE = 1; 832 /** 833 * 834 * 835 * <pre> 836 * Speech was recorded using a personal computer or tablet. 837 * </pre> 838 * 839 * <code>PC = 2;</code> 840 */ 841 public static final int PC_VALUE = 2; 842 /** 843 * 844 * 845 * <pre> 846 * Speech was recorded over a phone line. 847 * </pre> 848 * 849 * <code>PHONE_LINE = 3;</code> 850 */ 851 public static final int PHONE_LINE_VALUE = 3; 852 /** 853 * 854 * 855 * <pre> 856 * Speech was recorded in a vehicle. 857 * </pre> 858 * 859 * <code>VEHICLE = 4;</code> 860 */ 861 public static final int VEHICLE_VALUE = 4; 862 /** 863 * 864 * 865 * <pre> 866 * Speech was recorded outdoors. 867 * </pre> 868 * 869 * <code>OTHER_OUTDOOR_DEVICE = 5;</code> 870 */ 871 public static final int OTHER_OUTDOOR_DEVICE_VALUE = 5; 872 /** 873 * 874 * 875 * <pre> 876 * Speech was recorded indoors. 877 * </pre> 878 * 879 * <code>OTHER_INDOOR_DEVICE = 6;</code> 880 */ 881 public static final int OTHER_INDOOR_DEVICE_VALUE = 6; 882 getNumber()883 public final int getNumber() { 884 if (this == UNRECOGNIZED) { 885 throw new java.lang.IllegalArgumentException( 886 "Can't get the number of an unknown enum value."); 887 } 888 return value; 889 } 890 891 /** 892 * @param value The numeric wire value of the corresponding enum entry. 893 * @return The enum associated with the given numeric wire value. 894 * @deprecated Use {@link #forNumber(int)} instead. 895 */ 896 @java.lang.Deprecated valueOf(int value)897 public static RecordingDeviceType valueOf(int value) { 898 return forNumber(value); 899 } 900 901 /** 902 * @param value The numeric wire value of the corresponding enum entry. 903 * @return The enum associated with the given numeric wire value. 904 */ forNumber(int value)905 public static RecordingDeviceType forNumber(int value) { 906 switch (value) { 907 case 0: 908 return RECORDING_DEVICE_TYPE_UNSPECIFIED; 909 case 1: 910 return SMARTPHONE; 911 case 2: 912 return PC; 913 case 3: 914 return PHONE_LINE; 915 case 4: 916 return VEHICLE; 917 case 5: 918 return OTHER_OUTDOOR_DEVICE; 919 case 6: 920 return OTHER_INDOOR_DEVICE; 921 default: 922 return null; 923 } 924 } 925 926 public static com.google.protobuf.Internal.EnumLiteMap<RecordingDeviceType> internalGetValueMap()927 internalGetValueMap() { 928 return internalValueMap; 929 } 930 931 private static final com.google.protobuf.Internal.EnumLiteMap<RecordingDeviceType> 932 internalValueMap = 933 new com.google.protobuf.Internal.EnumLiteMap<RecordingDeviceType>() { 934 public RecordingDeviceType findValueByNumber(int number) { 935 return RecordingDeviceType.forNumber(number); 936 } 937 }; 938 getValueDescriptor()939 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { 940 if (this == UNRECOGNIZED) { 941 throw new java.lang.IllegalStateException( 942 "Can't get the descriptor of an unrecognized enum value."); 943 } 944 return getDescriptor().getValues().get(ordinal()); 945 } 946 getDescriptorForType()947 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { 948 return getDescriptor(); 949 } 950 getDescriptor()951 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { 952 return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(3); 953 } 954 955 private static final RecordingDeviceType[] VALUES = values(); 956 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)957 public static RecordingDeviceType valueOf( 958 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 959 if (desc.getType() != getDescriptor()) { 960 throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); 961 } 962 if (desc.getIndex() == -1) { 963 return UNRECOGNIZED; 964 } 965 return VALUES[desc.getIndex()]; 966 } 967 968 private final int value; 969 RecordingDeviceType(int value)970 private RecordingDeviceType(int value) { 971 this.value = value; 972 } 973 974 // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType) 975 } 976 977 public static final int INTERACTION_TYPE_FIELD_NUMBER = 1; 978 private int interactionType_ = 0; 979 /** 980 * 981 * 982 * <pre> 983 * The use case most closely describing the audio content to be recognized. 984 * </pre> 985 * 986 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;</code> 987 * 988 * @return The enum numeric value on the wire for interactionType. 989 */ 990 @java.lang.Override getInteractionTypeValue()991 public int getInteractionTypeValue() { 992 return interactionType_; 993 } 994 /** 995 * 996 * 997 * <pre> 998 * The use case most closely describing the audio content to be recognized. 999 * </pre> 1000 * 1001 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;</code> 1002 * 1003 * @return The interactionType. 1004 */ 1005 @java.lang.Override getInteractionType()1006 public com.google.cloud.speech.v1.RecognitionMetadata.InteractionType getInteractionType() { 1007 com.google.cloud.speech.v1.RecognitionMetadata.InteractionType result = 1008 com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.forNumber(interactionType_); 1009 return result == null 1010 ? com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.UNRECOGNIZED 1011 : result; 1012 } 1013 1014 public static final int INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER = 3; 1015 private int industryNaicsCodeOfAudio_ = 0; 1016 /** 1017 * 1018 * 1019 * <pre> 1020 * The industry vertical to which this speech recognition request most 1021 * closely applies. This is most indicative of the topics contained 1022 * in the audio. Use the 6-digit NAICS code to identify the industry 1023 * vertical - see https://www.naics.com/search/. 1024 * </pre> 1025 * 1026 * <code>uint32 industry_naics_code_of_audio = 3;</code> 1027 * 1028 * @return The industryNaicsCodeOfAudio. 1029 */ 1030 @java.lang.Override getIndustryNaicsCodeOfAudio()1031 public int getIndustryNaicsCodeOfAudio() { 1032 return industryNaicsCodeOfAudio_; 1033 } 1034 1035 public static final int MICROPHONE_DISTANCE_FIELD_NUMBER = 4; 1036 private int microphoneDistance_ = 0; 1037 /** 1038 * 1039 * 1040 * <pre> 1041 * The audio type that most closely describes the audio being recognized. 1042 * </pre> 1043 * 1044 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 1045 * </code> 1046 * 1047 * @return The enum numeric value on the wire for microphoneDistance. 1048 */ 1049 @java.lang.Override getMicrophoneDistanceValue()1050 public int getMicrophoneDistanceValue() { 1051 return microphoneDistance_; 1052 } 1053 /** 1054 * 1055 * 1056 * <pre> 1057 * The audio type that most closely describes the audio being recognized. 1058 * </pre> 1059 * 1060 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 1061 * </code> 1062 * 1063 * @return The microphoneDistance. 1064 */ 1065 @java.lang.Override getMicrophoneDistance()1066 public com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance getMicrophoneDistance() { 1067 com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance result = 1068 com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.forNumber( 1069 microphoneDistance_); 1070 return result == null 1071 ? com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED 1072 : result; 1073 } 1074 1075 public static final int ORIGINAL_MEDIA_TYPE_FIELD_NUMBER = 5; 1076 private int originalMediaType_ = 0; 1077 /** 1078 * 1079 * 1080 * <pre> 1081 * The original media the speech was recorded on. 1082 * </pre> 1083 * 1084 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 1085 * </code> 1086 * 1087 * @return The enum numeric value on the wire for originalMediaType. 1088 */ 1089 @java.lang.Override getOriginalMediaTypeValue()1090 public int getOriginalMediaTypeValue() { 1091 return originalMediaType_; 1092 } 1093 /** 1094 * 1095 * 1096 * <pre> 1097 * The original media the speech was recorded on. 1098 * </pre> 1099 * 1100 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 1101 * </code> 1102 * 1103 * @return The originalMediaType. 1104 */ 1105 @java.lang.Override getOriginalMediaType()1106 public com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType getOriginalMediaType() { 1107 com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType result = 1108 com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.forNumber( 1109 originalMediaType_); 1110 return result == null 1111 ? com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED 1112 : result; 1113 } 1114 1115 public static final int RECORDING_DEVICE_TYPE_FIELD_NUMBER = 6; 1116 private int recordingDeviceType_ = 0; 1117 /** 1118 * 1119 * 1120 * <pre> 1121 * The type of device the speech was recorded with. 1122 * </pre> 1123 * 1124 * <code> 1125 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 1126 * </code> 1127 * 1128 * @return The enum numeric value on the wire for recordingDeviceType. 1129 */ 1130 @java.lang.Override getRecordingDeviceTypeValue()1131 public int getRecordingDeviceTypeValue() { 1132 return recordingDeviceType_; 1133 } 1134 /** 1135 * 1136 * 1137 * <pre> 1138 * The type of device the speech was recorded with. 1139 * </pre> 1140 * 1141 * <code> 1142 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 1143 * </code> 1144 * 1145 * @return The recordingDeviceType. 1146 */ 1147 @java.lang.Override 1148 public com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType getRecordingDeviceType()1149 getRecordingDeviceType() { 1150 com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType result = 1151 com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.forNumber( 1152 recordingDeviceType_); 1153 return result == null 1154 ? com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED 1155 : result; 1156 } 1157 1158 public static final int RECORDING_DEVICE_NAME_FIELD_NUMBER = 7; 1159 1160 @SuppressWarnings("serial") 1161 private volatile java.lang.Object recordingDeviceName_ = ""; 1162 /** 1163 * 1164 * 1165 * <pre> 1166 * The device used to make the recording. Examples 'Nexus 5X' or 1167 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 1168 * 'Cardioid Microphone'. 1169 * </pre> 1170 * 1171 * <code>string recording_device_name = 7;</code> 1172 * 1173 * @return The recordingDeviceName. 1174 */ 1175 @java.lang.Override getRecordingDeviceName()1176 public java.lang.String getRecordingDeviceName() { 1177 java.lang.Object ref = recordingDeviceName_; 1178 if (ref instanceof java.lang.String) { 1179 return (java.lang.String) ref; 1180 } else { 1181 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 1182 java.lang.String s = bs.toStringUtf8(); 1183 recordingDeviceName_ = s; 1184 return s; 1185 } 1186 } 1187 /** 1188 * 1189 * 1190 * <pre> 1191 * The device used to make the recording. Examples 'Nexus 5X' or 1192 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 1193 * 'Cardioid Microphone'. 1194 * </pre> 1195 * 1196 * <code>string recording_device_name = 7;</code> 1197 * 1198 * @return The bytes for recordingDeviceName. 1199 */ 1200 @java.lang.Override getRecordingDeviceNameBytes()1201 public com.google.protobuf.ByteString getRecordingDeviceNameBytes() { 1202 java.lang.Object ref = recordingDeviceName_; 1203 if (ref instanceof java.lang.String) { 1204 com.google.protobuf.ByteString b = 1205 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 1206 recordingDeviceName_ = b; 1207 return b; 1208 } else { 1209 return (com.google.protobuf.ByteString) ref; 1210 } 1211 } 1212 1213 public static final int ORIGINAL_MIME_TYPE_FIELD_NUMBER = 8; 1214 1215 @SuppressWarnings("serial") 1216 private volatile java.lang.Object originalMimeType_ = ""; 1217 /** 1218 * 1219 * 1220 * <pre> 1221 * Mime type of the original audio file. For example `audio/m4a`, 1222 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 1223 * A list of possible audio mime types is maintained at 1224 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 1225 * </pre> 1226 * 1227 * <code>string original_mime_type = 8;</code> 1228 * 1229 * @return The originalMimeType. 1230 */ 1231 @java.lang.Override getOriginalMimeType()1232 public java.lang.String getOriginalMimeType() { 1233 java.lang.Object ref = originalMimeType_; 1234 if (ref instanceof java.lang.String) { 1235 return (java.lang.String) ref; 1236 } else { 1237 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 1238 java.lang.String s = bs.toStringUtf8(); 1239 originalMimeType_ = s; 1240 return s; 1241 } 1242 } 1243 /** 1244 * 1245 * 1246 * <pre> 1247 * Mime type of the original audio file. For example `audio/m4a`, 1248 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 1249 * A list of possible audio mime types is maintained at 1250 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 1251 * </pre> 1252 * 1253 * <code>string original_mime_type = 8;</code> 1254 * 1255 * @return The bytes for originalMimeType. 1256 */ 1257 @java.lang.Override getOriginalMimeTypeBytes()1258 public com.google.protobuf.ByteString getOriginalMimeTypeBytes() { 1259 java.lang.Object ref = originalMimeType_; 1260 if (ref instanceof java.lang.String) { 1261 com.google.protobuf.ByteString b = 1262 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 1263 originalMimeType_ = b; 1264 return b; 1265 } else { 1266 return (com.google.protobuf.ByteString) ref; 1267 } 1268 } 1269 1270 public static final int AUDIO_TOPIC_FIELD_NUMBER = 10; 1271 1272 @SuppressWarnings("serial") 1273 private volatile java.lang.Object audioTopic_ = ""; 1274 /** 1275 * 1276 * 1277 * <pre> 1278 * Description of the content. Eg. "Recordings of federal supreme court 1279 * hearings from 2012". 1280 * </pre> 1281 * 1282 * <code>string audio_topic = 10;</code> 1283 * 1284 * @return The audioTopic. 1285 */ 1286 @java.lang.Override getAudioTopic()1287 public java.lang.String getAudioTopic() { 1288 java.lang.Object ref = audioTopic_; 1289 if (ref instanceof java.lang.String) { 1290 return (java.lang.String) ref; 1291 } else { 1292 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 1293 java.lang.String s = bs.toStringUtf8(); 1294 audioTopic_ = s; 1295 return s; 1296 } 1297 } 1298 /** 1299 * 1300 * 1301 * <pre> 1302 * Description of the content. Eg. "Recordings of federal supreme court 1303 * hearings from 2012". 1304 * </pre> 1305 * 1306 * <code>string audio_topic = 10;</code> 1307 * 1308 * @return The bytes for audioTopic. 1309 */ 1310 @java.lang.Override getAudioTopicBytes()1311 public com.google.protobuf.ByteString getAudioTopicBytes() { 1312 java.lang.Object ref = audioTopic_; 1313 if (ref instanceof java.lang.String) { 1314 com.google.protobuf.ByteString b = 1315 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 1316 audioTopic_ = b; 1317 return b; 1318 } else { 1319 return (com.google.protobuf.ByteString) ref; 1320 } 1321 } 1322 1323 private byte memoizedIsInitialized = -1; 1324 1325 @java.lang.Override isInitialized()1326 public final boolean isInitialized() { 1327 byte isInitialized = memoizedIsInitialized; 1328 if (isInitialized == 1) return true; 1329 if (isInitialized == 0) return false; 1330 1331 memoizedIsInitialized = 1; 1332 return true; 1333 } 1334 1335 @java.lang.Override writeTo(com.google.protobuf.CodedOutputStream output)1336 public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { 1337 if (interactionType_ 1338 != com.google.cloud.speech.v1.RecognitionMetadata.InteractionType 1339 .INTERACTION_TYPE_UNSPECIFIED 1340 .getNumber()) { 1341 output.writeEnum(1, interactionType_); 1342 } 1343 if (industryNaicsCodeOfAudio_ != 0) { 1344 output.writeUInt32(3, industryNaicsCodeOfAudio_); 1345 } 1346 if (microphoneDistance_ 1347 != com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance 1348 .MICROPHONE_DISTANCE_UNSPECIFIED 1349 .getNumber()) { 1350 output.writeEnum(4, microphoneDistance_); 1351 } 1352 if (originalMediaType_ 1353 != com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType 1354 .ORIGINAL_MEDIA_TYPE_UNSPECIFIED 1355 .getNumber()) { 1356 output.writeEnum(5, originalMediaType_); 1357 } 1358 if (recordingDeviceType_ 1359 != com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType 1360 .RECORDING_DEVICE_TYPE_UNSPECIFIED 1361 .getNumber()) { 1362 output.writeEnum(6, recordingDeviceType_); 1363 } 1364 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recordingDeviceName_)) { 1365 com.google.protobuf.GeneratedMessageV3.writeString(output, 7, recordingDeviceName_); 1366 } 1367 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(originalMimeType_)) { 1368 com.google.protobuf.GeneratedMessageV3.writeString(output, 8, originalMimeType_); 1369 } 1370 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(audioTopic_)) { 1371 com.google.protobuf.GeneratedMessageV3.writeString(output, 10, audioTopic_); 1372 } 1373 getUnknownFields().writeTo(output); 1374 } 1375 1376 @java.lang.Override getSerializedSize()1377 public int getSerializedSize() { 1378 int size = memoizedSize; 1379 if (size != -1) return size; 1380 1381 size = 0; 1382 if (interactionType_ 1383 != com.google.cloud.speech.v1.RecognitionMetadata.InteractionType 1384 .INTERACTION_TYPE_UNSPECIFIED 1385 .getNumber()) { 1386 size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, interactionType_); 1387 } 1388 if (industryNaicsCodeOfAudio_ != 0) { 1389 size += com.google.protobuf.CodedOutputStream.computeUInt32Size(3, industryNaicsCodeOfAudio_); 1390 } 1391 if (microphoneDistance_ 1392 != com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance 1393 .MICROPHONE_DISTANCE_UNSPECIFIED 1394 .getNumber()) { 1395 size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, microphoneDistance_); 1396 } 1397 if (originalMediaType_ 1398 != com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType 1399 .ORIGINAL_MEDIA_TYPE_UNSPECIFIED 1400 .getNumber()) { 1401 size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, originalMediaType_); 1402 } 1403 if (recordingDeviceType_ 1404 != com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType 1405 .RECORDING_DEVICE_TYPE_UNSPECIFIED 1406 .getNumber()) { 1407 size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, recordingDeviceType_); 1408 } 1409 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recordingDeviceName_)) { 1410 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, recordingDeviceName_); 1411 } 1412 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(originalMimeType_)) { 1413 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, originalMimeType_); 1414 } 1415 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(audioTopic_)) { 1416 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, audioTopic_); 1417 } 1418 size += getUnknownFields().getSerializedSize(); 1419 memoizedSize = size; 1420 return size; 1421 } 1422 1423 @java.lang.Override equals(final java.lang.Object obj)1424 public boolean equals(final java.lang.Object obj) { 1425 if (obj == this) { 1426 return true; 1427 } 1428 if (!(obj instanceof com.google.cloud.speech.v1.RecognitionMetadata)) { 1429 return super.equals(obj); 1430 } 1431 com.google.cloud.speech.v1.RecognitionMetadata other = 1432 (com.google.cloud.speech.v1.RecognitionMetadata) obj; 1433 1434 if (interactionType_ != other.interactionType_) return false; 1435 if (getIndustryNaicsCodeOfAudio() != other.getIndustryNaicsCodeOfAudio()) return false; 1436 if (microphoneDistance_ != other.microphoneDistance_) return false; 1437 if (originalMediaType_ != other.originalMediaType_) return false; 1438 if (recordingDeviceType_ != other.recordingDeviceType_) return false; 1439 if (!getRecordingDeviceName().equals(other.getRecordingDeviceName())) return false; 1440 if (!getOriginalMimeType().equals(other.getOriginalMimeType())) return false; 1441 if (!getAudioTopic().equals(other.getAudioTopic())) return false; 1442 if (!getUnknownFields().equals(other.getUnknownFields())) return false; 1443 return true; 1444 } 1445 1446 @java.lang.Override hashCode()1447 public int hashCode() { 1448 if (memoizedHashCode != 0) { 1449 return memoizedHashCode; 1450 } 1451 int hash = 41; 1452 hash = (19 * hash) + getDescriptor().hashCode(); 1453 hash = (37 * hash) + INTERACTION_TYPE_FIELD_NUMBER; 1454 hash = (53 * hash) + interactionType_; 1455 hash = (37 * hash) + INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER; 1456 hash = (53 * hash) + getIndustryNaicsCodeOfAudio(); 1457 hash = (37 * hash) + MICROPHONE_DISTANCE_FIELD_NUMBER; 1458 hash = (53 * hash) + microphoneDistance_; 1459 hash = (37 * hash) + ORIGINAL_MEDIA_TYPE_FIELD_NUMBER; 1460 hash = (53 * hash) + originalMediaType_; 1461 hash = (37 * hash) + RECORDING_DEVICE_TYPE_FIELD_NUMBER; 1462 hash = (53 * hash) + recordingDeviceType_; 1463 hash = (37 * hash) + RECORDING_DEVICE_NAME_FIELD_NUMBER; 1464 hash = (53 * hash) + getRecordingDeviceName().hashCode(); 1465 hash = (37 * hash) + ORIGINAL_MIME_TYPE_FIELD_NUMBER; 1466 hash = (53 * hash) + getOriginalMimeType().hashCode(); 1467 hash = (37 * hash) + AUDIO_TOPIC_FIELD_NUMBER; 1468 hash = (53 * hash) + getAudioTopic().hashCode(); 1469 hash = (29 * hash) + getUnknownFields().hashCode(); 1470 memoizedHashCode = hash; 1471 return hash; 1472 } 1473 parseFrom(java.nio.ByteBuffer data)1474 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(java.nio.ByteBuffer data) 1475 throws com.google.protobuf.InvalidProtocolBufferException { 1476 return PARSER.parseFrom(data); 1477 } 1478 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1479 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1480 java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1481 throws com.google.protobuf.InvalidProtocolBufferException { 1482 return PARSER.parseFrom(data, extensionRegistry); 1483 } 1484 parseFrom( com.google.protobuf.ByteString data)1485 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1486 com.google.protobuf.ByteString data) 1487 throws com.google.protobuf.InvalidProtocolBufferException { 1488 return PARSER.parseFrom(data); 1489 } 1490 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1491 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1492 com.google.protobuf.ByteString data, 1493 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1494 throws com.google.protobuf.InvalidProtocolBufferException { 1495 return PARSER.parseFrom(data, extensionRegistry); 1496 } 1497 parseFrom(byte[] data)1498 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(byte[] data) 1499 throws com.google.protobuf.InvalidProtocolBufferException { 1500 return PARSER.parseFrom(data); 1501 } 1502 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1503 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1504 byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1505 throws com.google.protobuf.InvalidProtocolBufferException { 1506 return PARSER.parseFrom(data, extensionRegistry); 1507 } 1508 parseFrom(java.io.InputStream input)1509 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(java.io.InputStream input) 1510 throws java.io.IOException { 1511 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 1512 } 1513 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1514 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1515 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1516 throws java.io.IOException { 1517 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 1518 PARSER, input, extensionRegistry); 1519 } 1520 parseDelimitedFrom( java.io.InputStream input)1521 public static com.google.cloud.speech.v1.RecognitionMetadata parseDelimitedFrom( 1522 java.io.InputStream input) throws java.io.IOException { 1523 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); 1524 } 1525 parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1526 public static com.google.cloud.speech.v1.RecognitionMetadata parseDelimitedFrom( 1527 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1528 throws java.io.IOException { 1529 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( 1530 PARSER, input, extensionRegistry); 1531 } 1532 parseFrom( com.google.protobuf.CodedInputStream input)1533 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1534 com.google.protobuf.CodedInputStream input) throws java.io.IOException { 1535 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 1536 } 1537 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1538 public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom( 1539 com.google.protobuf.CodedInputStream input, 1540 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1541 throws java.io.IOException { 1542 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 1543 PARSER, input, extensionRegistry); 1544 } 1545 1546 @java.lang.Override newBuilderForType()1547 public Builder newBuilderForType() { 1548 return newBuilder(); 1549 } 1550 newBuilder()1551 public static Builder newBuilder() { 1552 return DEFAULT_INSTANCE.toBuilder(); 1553 } 1554 newBuilder(com.google.cloud.speech.v1.RecognitionMetadata prototype)1555 public static Builder newBuilder(com.google.cloud.speech.v1.RecognitionMetadata prototype) { 1556 return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); 1557 } 1558 1559 @java.lang.Override toBuilder()1560 public Builder toBuilder() { 1561 return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); 1562 } 1563 1564 @java.lang.Override newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)1565 protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 1566 Builder builder = new Builder(parent); 1567 return builder; 1568 } 1569 /** 1570 * 1571 * 1572 * <pre> 1573 * Description of audio data to be recognized. 1574 * </pre> 1575 * 1576 * Protobuf type {@code google.cloud.speech.v1.RecognitionMetadata} 1577 */ 1578 public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> 1579 implements 1580 // @@protoc_insertion_point(builder_implements:google.cloud.speech.v1.RecognitionMetadata) 1581 com.google.cloud.speech.v1.RecognitionMetadataOrBuilder { getDescriptor()1582 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 1583 return com.google.cloud.speech.v1.SpeechProto 1584 .internal_static_google_cloud_speech_v1_RecognitionMetadata_descriptor; 1585 } 1586 1587 @java.lang.Override 1588 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()1589 internalGetFieldAccessorTable() { 1590 return com.google.cloud.speech.v1.SpeechProto 1591 .internal_static_google_cloud_speech_v1_RecognitionMetadata_fieldAccessorTable 1592 .ensureFieldAccessorsInitialized( 1593 com.google.cloud.speech.v1.RecognitionMetadata.class, 1594 com.google.cloud.speech.v1.RecognitionMetadata.Builder.class); 1595 } 1596 1597 // Construct using com.google.cloud.speech.v1.RecognitionMetadata.newBuilder() Builder()1598 private Builder() {} 1599 Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)1600 private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 1601 super(parent); 1602 } 1603 1604 @java.lang.Override clear()1605 public Builder clear() { 1606 super.clear(); 1607 bitField0_ = 0; 1608 interactionType_ = 0; 1609 industryNaicsCodeOfAudio_ = 0; 1610 microphoneDistance_ = 0; 1611 originalMediaType_ = 0; 1612 recordingDeviceType_ = 0; 1613 recordingDeviceName_ = ""; 1614 originalMimeType_ = ""; 1615 audioTopic_ = ""; 1616 return this; 1617 } 1618 1619 @java.lang.Override getDescriptorForType()1620 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { 1621 return com.google.cloud.speech.v1.SpeechProto 1622 .internal_static_google_cloud_speech_v1_RecognitionMetadata_descriptor; 1623 } 1624 1625 @java.lang.Override getDefaultInstanceForType()1626 public com.google.cloud.speech.v1.RecognitionMetadata getDefaultInstanceForType() { 1627 return com.google.cloud.speech.v1.RecognitionMetadata.getDefaultInstance(); 1628 } 1629 1630 @java.lang.Override build()1631 public com.google.cloud.speech.v1.RecognitionMetadata build() { 1632 com.google.cloud.speech.v1.RecognitionMetadata result = buildPartial(); 1633 if (!result.isInitialized()) { 1634 throw newUninitializedMessageException(result); 1635 } 1636 return result; 1637 } 1638 1639 @java.lang.Override buildPartial()1640 public com.google.cloud.speech.v1.RecognitionMetadata buildPartial() { 1641 com.google.cloud.speech.v1.RecognitionMetadata result = 1642 new com.google.cloud.speech.v1.RecognitionMetadata(this); 1643 if (bitField0_ != 0) { 1644 buildPartial0(result); 1645 } 1646 onBuilt(); 1647 return result; 1648 } 1649 buildPartial0(com.google.cloud.speech.v1.RecognitionMetadata result)1650 private void buildPartial0(com.google.cloud.speech.v1.RecognitionMetadata result) { 1651 int from_bitField0_ = bitField0_; 1652 if (((from_bitField0_ & 0x00000001) != 0)) { 1653 result.interactionType_ = interactionType_; 1654 } 1655 if (((from_bitField0_ & 0x00000002) != 0)) { 1656 result.industryNaicsCodeOfAudio_ = industryNaicsCodeOfAudio_; 1657 } 1658 if (((from_bitField0_ & 0x00000004) != 0)) { 1659 result.microphoneDistance_ = microphoneDistance_; 1660 } 1661 if (((from_bitField0_ & 0x00000008) != 0)) { 1662 result.originalMediaType_ = originalMediaType_; 1663 } 1664 if (((from_bitField0_ & 0x00000010) != 0)) { 1665 result.recordingDeviceType_ = recordingDeviceType_; 1666 } 1667 if (((from_bitField0_ & 0x00000020) != 0)) { 1668 result.recordingDeviceName_ = recordingDeviceName_; 1669 } 1670 if (((from_bitField0_ & 0x00000040) != 0)) { 1671 result.originalMimeType_ = originalMimeType_; 1672 } 1673 if (((from_bitField0_ & 0x00000080) != 0)) { 1674 result.audioTopic_ = audioTopic_; 1675 } 1676 } 1677 1678 @java.lang.Override clone()1679 public Builder clone() { 1680 return super.clone(); 1681 } 1682 1683 @java.lang.Override setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1684 public Builder setField( 1685 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 1686 return super.setField(field, value); 1687 } 1688 1689 @java.lang.Override clearField(com.google.protobuf.Descriptors.FieldDescriptor field)1690 public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { 1691 return super.clearField(field); 1692 } 1693 1694 @java.lang.Override clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)1695 public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { 1696 return super.clearOneof(oneof); 1697 } 1698 1699 @java.lang.Override setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)1700 public Builder setRepeatedField( 1701 com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { 1702 return super.setRepeatedField(field, index, value); 1703 } 1704 1705 @java.lang.Override addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1706 public Builder addRepeatedField( 1707 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 1708 return super.addRepeatedField(field, value); 1709 } 1710 1711 @java.lang.Override mergeFrom(com.google.protobuf.Message other)1712 public Builder mergeFrom(com.google.protobuf.Message other) { 1713 if (other instanceof com.google.cloud.speech.v1.RecognitionMetadata) { 1714 return mergeFrom((com.google.cloud.speech.v1.RecognitionMetadata) other); 1715 } else { 1716 super.mergeFrom(other); 1717 return this; 1718 } 1719 } 1720 mergeFrom(com.google.cloud.speech.v1.RecognitionMetadata other)1721 public Builder mergeFrom(com.google.cloud.speech.v1.RecognitionMetadata other) { 1722 if (other == com.google.cloud.speech.v1.RecognitionMetadata.getDefaultInstance()) return this; 1723 if (other.interactionType_ != 0) { 1724 setInteractionTypeValue(other.getInteractionTypeValue()); 1725 } 1726 if (other.getIndustryNaicsCodeOfAudio() != 0) { 1727 setIndustryNaicsCodeOfAudio(other.getIndustryNaicsCodeOfAudio()); 1728 } 1729 if (other.microphoneDistance_ != 0) { 1730 setMicrophoneDistanceValue(other.getMicrophoneDistanceValue()); 1731 } 1732 if (other.originalMediaType_ != 0) { 1733 setOriginalMediaTypeValue(other.getOriginalMediaTypeValue()); 1734 } 1735 if (other.recordingDeviceType_ != 0) { 1736 setRecordingDeviceTypeValue(other.getRecordingDeviceTypeValue()); 1737 } 1738 if (!other.getRecordingDeviceName().isEmpty()) { 1739 recordingDeviceName_ = other.recordingDeviceName_; 1740 bitField0_ |= 0x00000020; 1741 onChanged(); 1742 } 1743 if (!other.getOriginalMimeType().isEmpty()) { 1744 originalMimeType_ = other.originalMimeType_; 1745 bitField0_ |= 0x00000040; 1746 onChanged(); 1747 } 1748 if (!other.getAudioTopic().isEmpty()) { 1749 audioTopic_ = other.audioTopic_; 1750 bitField0_ |= 0x00000080; 1751 onChanged(); 1752 } 1753 this.mergeUnknownFields(other.getUnknownFields()); 1754 onChanged(); 1755 return this; 1756 } 1757 1758 @java.lang.Override isInitialized()1759 public final boolean isInitialized() { 1760 return true; 1761 } 1762 1763 @java.lang.Override mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1764 public Builder mergeFrom( 1765 com.google.protobuf.CodedInputStream input, 1766 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1767 throws java.io.IOException { 1768 if (extensionRegistry == null) { 1769 throw new java.lang.NullPointerException(); 1770 } 1771 try { 1772 boolean done = false; 1773 while (!done) { 1774 int tag = input.readTag(); 1775 switch (tag) { 1776 case 0: 1777 done = true; 1778 break; 1779 case 8: 1780 { 1781 interactionType_ = input.readEnum(); 1782 bitField0_ |= 0x00000001; 1783 break; 1784 } // case 8 1785 case 24: 1786 { 1787 industryNaicsCodeOfAudio_ = input.readUInt32(); 1788 bitField0_ |= 0x00000002; 1789 break; 1790 } // case 24 1791 case 32: 1792 { 1793 microphoneDistance_ = input.readEnum(); 1794 bitField0_ |= 0x00000004; 1795 break; 1796 } // case 32 1797 case 40: 1798 { 1799 originalMediaType_ = input.readEnum(); 1800 bitField0_ |= 0x00000008; 1801 break; 1802 } // case 40 1803 case 48: 1804 { 1805 recordingDeviceType_ = input.readEnum(); 1806 bitField0_ |= 0x00000010; 1807 break; 1808 } // case 48 1809 case 58: 1810 { 1811 recordingDeviceName_ = input.readStringRequireUtf8(); 1812 bitField0_ |= 0x00000020; 1813 break; 1814 } // case 58 1815 case 66: 1816 { 1817 originalMimeType_ = input.readStringRequireUtf8(); 1818 bitField0_ |= 0x00000040; 1819 break; 1820 } // case 66 1821 case 82: 1822 { 1823 audioTopic_ = input.readStringRequireUtf8(); 1824 bitField0_ |= 0x00000080; 1825 break; 1826 } // case 82 1827 default: 1828 { 1829 if (!super.parseUnknownField(input, extensionRegistry, tag)) { 1830 done = true; // was an endgroup tag 1831 } 1832 break; 1833 } // default: 1834 } // switch (tag) 1835 } // while (!done) 1836 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1837 throw e.unwrapIOException(); 1838 } finally { 1839 onChanged(); 1840 } // finally 1841 return this; 1842 } 1843 1844 private int bitField0_; 1845 1846 private int interactionType_ = 0; 1847 /** 1848 * 1849 * 1850 * <pre> 1851 * The use case most closely describing the audio content to be recognized. 1852 * </pre> 1853 * 1854 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1; 1855 * </code> 1856 * 1857 * @return The enum numeric value on the wire for interactionType. 1858 */ 1859 @java.lang.Override getInteractionTypeValue()1860 public int getInteractionTypeValue() { 1861 return interactionType_; 1862 } 1863 /** 1864 * 1865 * 1866 * <pre> 1867 * The use case most closely describing the audio content to be recognized. 1868 * </pre> 1869 * 1870 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1; 1871 * </code> 1872 * 1873 * @param value The enum numeric value on the wire for interactionType to set. 1874 * @return This builder for chaining. 1875 */ setInteractionTypeValue(int value)1876 public Builder setInteractionTypeValue(int value) { 1877 interactionType_ = value; 1878 bitField0_ |= 0x00000001; 1879 onChanged(); 1880 return this; 1881 } 1882 /** 1883 * 1884 * 1885 * <pre> 1886 * The use case most closely describing the audio content to be recognized. 1887 * </pre> 1888 * 1889 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1; 1890 * </code> 1891 * 1892 * @return The interactionType. 1893 */ 1894 @java.lang.Override getInteractionType()1895 public com.google.cloud.speech.v1.RecognitionMetadata.InteractionType getInteractionType() { 1896 com.google.cloud.speech.v1.RecognitionMetadata.InteractionType result = 1897 com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.forNumber( 1898 interactionType_); 1899 return result == null 1900 ? com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.UNRECOGNIZED 1901 : result; 1902 } 1903 /** 1904 * 1905 * 1906 * <pre> 1907 * The use case most closely describing the audio content to be recognized. 1908 * </pre> 1909 * 1910 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1; 1911 * </code> 1912 * 1913 * @param value The interactionType to set. 1914 * @return This builder for chaining. 1915 */ setInteractionType( com.google.cloud.speech.v1.RecognitionMetadata.InteractionType value)1916 public Builder setInteractionType( 1917 com.google.cloud.speech.v1.RecognitionMetadata.InteractionType value) { 1918 if (value == null) { 1919 throw new NullPointerException(); 1920 } 1921 bitField0_ |= 0x00000001; 1922 interactionType_ = value.getNumber(); 1923 onChanged(); 1924 return this; 1925 } 1926 /** 1927 * 1928 * 1929 * <pre> 1930 * The use case most closely describing the audio content to be recognized. 1931 * </pre> 1932 * 1933 * <code>.google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1; 1934 * </code> 1935 * 1936 * @return This builder for chaining. 1937 */ clearInteractionType()1938 public Builder clearInteractionType() { 1939 bitField0_ = (bitField0_ & ~0x00000001); 1940 interactionType_ = 0; 1941 onChanged(); 1942 return this; 1943 } 1944 1945 private int industryNaicsCodeOfAudio_; 1946 /** 1947 * 1948 * 1949 * <pre> 1950 * The industry vertical to which this speech recognition request most 1951 * closely applies. This is most indicative of the topics contained 1952 * in the audio. Use the 6-digit NAICS code to identify the industry 1953 * vertical - see https://www.naics.com/search/. 1954 * </pre> 1955 * 1956 * <code>uint32 industry_naics_code_of_audio = 3;</code> 1957 * 1958 * @return The industryNaicsCodeOfAudio. 1959 */ 1960 @java.lang.Override getIndustryNaicsCodeOfAudio()1961 public int getIndustryNaicsCodeOfAudio() { 1962 return industryNaicsCodeOfAudio_; 1963 } 1964 /** 1965 * 1966 * 1967 * <pre> 1968 * The industry vertical to which this speech recognition request most 1969 * closely applies. This is most indicative of the topics contained 1970 * in the audio. Use the 6-digit NAICS code to identify the industry 1971 * vertical - see https://www.naics.com/search/. 1972 * </pre> 1973 * 1974 * <code>uint32 industry_naics_code_of_audio = 3;</code> 1975 * 1976 * @param value The industryNaicsCodeOfAudio to set. 1977 * @return This builder for chaining. 1978 */ setIndustryNaicsCodeOfAudio(int value)1979 public Builder setIndustryNaicsCodeOfAudio(int value) { 1980 1981 industryNaicsCodeOfAudio_ = value; 1982 bitField0_ |= 0x00000002; 1983 onChanged(); 1984 return this; 1985 } 1986 /** 1987 * 1988 * 1989 * <pre> 1990 * The industry vertical to which this speech recognition request most 1991 * closely applies. This is most indicative of the topics contained 1992 * in the audio. Use the 6-digit NAICS code to identify the industry 1993 * vertical - see https://www.naics.com/search/. 1994 * </pre> 1995 * 1996 * <code>uint32 industry_naics_code_of_audio = 3;</code> 1997 * 1998 * @return This builder for chaining. 1999 */ clearIndustryNaicsCodeOfAudio()2000 public Builder clearIndustryNaicsCodeOfAudio() { 2001 bitField0_ = (bitField0_ & ~0x00000002); 2002 industryNaicsCodeOfAudio_ = 0; 2003 onChanged(); 2004 return this; 2005 } 2006 2007 private int microphoneDistance_ = 0; 2008 /** 2009 * 2010 * 2011 * <pre> 2012 * The audio type that most closely describes the audio being recognized. 2013 * </pre> 2014 * 2015 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 2016 * </code> 2017 * 2018 * @return The enum numeric value on the wire for microphoneDistance. 2019 */ 2020 @java.lang.Override getMicrophoneDistanceValue()2021 public int getMicrophoneDistanceValue() { 2022 return microphoneDistance_; 2023 } 2024 /** 2025 * 2026 * 2027 * <pre> 2028 * The audio type that most closely describes the audio being recognized. 2029 * </pre> 2030 * 2031 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 2032 * </code> 2033 * 2034 * @param value The enum numeric value on the wire for microphoneDistance to set. 2035 * @return This builder for chaining. 2036 */ setMicrophoneDistanceValue(int value)2037 public Builder setMicrophoneDistanceValue(int value) { 2038 microphoneDistance_ = value; 2039 bitField0_ |= 0x00000004; 2040 onChanged(); 2041 return this; 2042 } 2043 /** 2044 * 2045 * 2046 * <pre> 2047 * The audio type that most closely describes the audio being recognized. 2048 * </pre> 2049 * 2050 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 2051 * </code> 2052 * 2053 * @return The microphoneDistance. 2054 */ 2055 @java.lang.Override 2056 public com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance getMicrophoneDistance()2057 getMicrophoneDistance() { 2058 com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance result = 2059 com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.forNumber( 2060 microphoneDistance_); 2061 return result == null 2062 ? com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED 2063 : result; 2064 } 2065 /** 2066 * 2067 * 2068 * <pre> 2069 * The audio type that most closely describes the audio being recognized. 2070 * </pre> 2071 * 2072 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 2073 * </code> 2074 * 2075 * @param value The microphoneDistance to set. 2076 * @return This builder for chaining. 2077 */ setMicrophoneDistance( com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance value)2078 public Builder setMicrophoneDistance( 2079 com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance value) { 2080 if (value == null) { 2081 throw new NullPointerException(); 2082 } 2083 bitField0_ |= 0x00000004; 2084 microphoneDistance_ = value.getNumber(); 2085 onChanged(); 2086 return this; 2087 } 2088 /** 2089 * 2090 * 2091 * <pre> 2092 * The audio type that most closely describes the audio being recognized. 2093 * </pre> 2094 * 2095 * <code>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4; 2096 * </code> 2097 * 2098 * @return This builder for chaining. 2099 */ clearMicrophoneDistance()2100 public Builder clearMicrophoneDistance() { 2101 bitField0_ = (bitField0_ & ~0x00000004); 2102 microphoneDistance_ = 0; 2103 onChanged(); 2104 return this; 2105 } 2106 2107 private int originalMediaType_ = 0; 2108 /** 2109 * 2110 * 2111 * <pre> 2112 * The original media the speech was recorded on. 2113 * </pre> 2114 * 2115 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 2116 * </code> 2117 * 2118 * @return The enum numeric value on the wire for originalMediaType. 2119 */ 2120 @java.lang.Override getOriginalMediaTypeValue()2121 public int getOriginalMediaTypeValue() { 2122 return originalMediaType_; 2123 } 2124 /** 2125 * 2126 * 2127 * <pre> 2128 * The original media the speech was recorded on. 2129 * </pre> 2130 * 2131 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 2132 * </code> 2133 * 2134 * @param value The enum numeric value on the wire for originalMediaType to set. 2135 * @return This builder for chaining. 2136 */ setOriginalMediaTypeValue(int value)2137 public Builder setOriginalMediaTypeValue(int value) { 2138 originalMediaType_ = value; 2139 bitField0_ |= 0x00000008; 2140 onChanged(); 2141 return this; 2142 } 2143 /** 2144 * 2145 * 2146 * <pre> 2147 * The original media the speech was recorded on. 2148 * </pre> 2149 * 2150 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 2151 * </code> 2152 * 2153 * @return The originalMediaType. 2154 */ 2155 @java.lang.Override getOriginalMediaType()2156 public com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType getOriginalMediaType() { 2157 com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType result = 2158 com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.forNumber( 2159 originalMediaType_); 2160 return result == null 2161 ? com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED 2162 : result; 2163 } 2164 /** 2165 * 2166 * 2167 * <pre> 2168 * The original media the speech was recorded on. 2169 * </pre> 2170 * 2171 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 2172 * </code> 2173 * 2174 * @param value The originalMediaType to set. 2175 * @return This builder for chaining. 2176 */ setOriginalMediaType( com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType value)2177 public Builder setOriginalMediaType( 2178 com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType value) { 2179 if (value == null) { 2180 throw new NullPointerException(); 2181 } 2182 bitField0_ |= 0x00000008; 2183 originalMediaType_ = value.getNumber(); 2184 onChanged(); 2185 return this; 2186 } 2187 /** 2188 * 2189 * 2190 * <pre> 2191 * The original media the speech was recorded on. 2192 * </pre> 2193 * 2194 * <code>.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5; 2195 * </code> 2196 * 2197 * @return This builder for chaining. 2198 */ clearOriginalMediaType()2199 public Builder clearOriginalMediaType() { 2200 bitField0_ = (bitField0_ & ~0x00000008); 2201 originalMediaType_ = 0; 2202 onChanged(); 2203 return this; 2204 } 2205 2206 private int recordingDeviceType_ = 0; 2207 /** 2208 * 2209 * 2210 * <pre> 2211 * The type of device the speech was recorded with. 2212 * </pre> 2213 * 2214 * <code> 2215 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 2216 * </code> 2217 * 2218 * @return The enum numeric value on the wire for recordingDeviceType. 2219 */ 2220 @java.lang.Override getRecordingDeviceTypeValue()2221 public int getRecordingDeviceTypeValue() { 2222 return recordingDeviceType_; 2223 } 2224 /** 2225 * 2226 * 2227 * <pre> 2228 * The type of device the speech was recorded with. 2229 * </pre> 2230 * 2231 * <code> 2232 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 2233 * </code> 2234 * 2235 * @param value The enum numeric value on the wire for recordingDeviceType to set. 2236 * @return This builder for chaining. 2237 */ setRecordingDeviceTypeValue(int value)2238 public Builder setRecordingDeviceTypeValue(int value) { 2239 recordingDeviceType_ = value; 2240 bitField0_ |= 0x00000010; 2241 onChanged(); 2242 return this; 2243 } 2244 /** 2245 * 2246 * 2247 * <pre> 2248 * The type of device the speech was recorded with. 2249 * </pre> 2250 * 2251 * <code> 2252 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 2253 * </code> 2254 * 2255 * @return The recordingDeviceType. 2256 */ 2257 @java.lang.Override 2258 public com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType getRecordingDeviceType()2259 getRecordingDeviceType() { 2260 com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType result = 2261 com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.forNumber( 2262 recordingDeviceType_); 2263 return result == null 2264 ? com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED 2265 : result; 2266 } 2267 /** 2268 * 2269 * 2270 * <pre> 2271 * The type of device the speech was recorded with. 2272 * </pre> 2273 * 2274 * <code> 2275 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 2276 * </code> 2277 * 2278 * @param value The recordingDeviceType to set. 2279 * @return This builder for chaining. 2280 */ setRecordingDeviceType( com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType value)2281 public Builder setRecordingDeviceType( 2282 com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType value) { 2283 if (value == null) { 2284 throw new NullPointerException(); 2285 } 2286 bitField0_ |= 0x00000010; 2287 recordingDeviceType_ = value.getNumber(); 2288 onChanged(); 2289 return this; 2290 } 2291 /** 2292 * 2293 * 2294 * <pre> 2295 * The type of device the speech was recorded with. 2296 * </pre> 2297 * 2298 * <code> 2299 * .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6; 2300 * </code> 2301 * 2302 * @return This builder for chaining. 2303 */ clearRecordingDeviceType()2304 public Builder clearRecordingDeviceType() { 2305 bitField0_ = (bitField0_ & ~0x00000010); 2306 recordingDeviceType_ = 0; 2307 onChanged(); 2308 return this; 2309 } 2310 2311 private java.lang.Object recordingDeviceName_ = ""; 2312 /** 2313 * 2314 * 2315 * <pre> 2316 * The device used to make the recording. Examples 'Nexus 5X' or 2317 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 2318 * 'Cardioid Microphone'. 2319 * </pre> 2320 * 2321 * <code>string recording_device_name = 7;</code> 2322 * 2323 * @return The recordingDeviceName. 2324 */ getRecordingDeviceName()2325 public java.lang.String getRecordingDeviceName() { 2326 java.lang.Object ref = recordingDeviceName_; 2327 if (!(ref instanceof java.lang.String)) { 2328 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 2329 java.lang.String s = bs.toStringUtf8(); 2330 recordingDeviceName_ = s; 2331 return s; 2332 } else { 2333 return (java.lang.String) ref; 2334 } 2335 } 2336 /** 2337 * 2338 * 2339 * <pre> 2340 * The device used to make the recording. Examples 'Nexus 5X' or 2341 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 2342 * 'Cardioid Microphone'. 2343 * </pre> 2344 * 2345 * <code>string recording_device_name = 7;</code> 2346 * 2347 * @return The bytes for recordingDeviceName. 2348 */ getRecordingDeviceNameBytes()2349 public com.google.protobuf.ByteString getRecordingDeviceNameBytes() { 2350 java.lang.Object ref = recordingDeviceName_; 2351 if (ref instanceof String) { 2352 com.google.protobuf.ByteString b = 2353 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 2354 recordingDeviceName_ = b; 2355 return b; 2356 } else { 2357 return (com.google.protobuf.ByteString) ref; 2358 } 2359 } 2360 /** 2361 * 2362 * 2363 * <pre> 2364 * The device used to make the recording. Examples 'Nexus 5X' or 2365 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 2366 * 'Cardioid Microphone'. 2367 * </pre> 2368 * 2369 * <code>string recording_device_name = 7;</code> 2370 * 2371 * @param value The recordingDeviceName to set. 2372 * @return This builder for chaining. 2373 */ setRecordingDeviceName(java.lang.String value)2374 public Builder setRecordingDeviceName(java.lang.String value) { 2375 if (value == null) { 2376 throw new NullPointerException(); 2377 } 2378 recordingDeviceName_ = value; 2379 bitField0_ |= 0x00000020; 2380 onChanged(); 2381 return this; 2382 } 2383 /** 2384 * 2385 * 2386 * <pre> 2387 * The device used to make the recording. Examples 'Nexus 5X' or 2388 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 2389 * 'Cardioid Microphone'. 2390 * </pre> 2391 * 2392 * <code>string recording_device_name = 7;</code> 2393 * 2394 * @return This builder for chaining. 2395 */ clearRecordingDeviceName()2396 public Builder clearRecordingDeviceName() { 2397 recordingDeviceName_ = getDefaultInstance().getRecordingDeviceName(); 2398 bitField0_ = (bitField0_ & ~0x00000020); 2399 onChanged(); 2400 return this; 2401 } 2402 /** 2403 * 2404 * 2405 * <pre> 2406 * The device used to make the recording. Examples 'Nexus 5X' or 2407 * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or 2408 * 'Cardioid Microphone'. 2409 * </pre> 2410 * 2411 * <code>string recording_device_name = 7;</code> 2412 * 2413 * @param value The bytes for recordingDeviceName to set. 2414 * @return This builder for chaining. 2415 */ setRecordingDeviceNameBytes(com.google.protobuf.ByteString value)2416 public Builder setRecordingDeviceNameBytes(com.google.protobuf.ByteString value) { 2417 if (value == null) { 2418 throw new NullPointerException(); 2419 } 2420 checkByteStringIsUtf8(value); 2421 recordingDeviceName_ = value; 2422 bitField0_ |= 0x00000020; 2423 onChanged(); 2424 return this; 2425 } 2426 2427 private java.lang.Object originalMimeType_ = ""; 2428 /** 2429 * 2430 * 2431 * <pre> 2432 * Mime type of the original audio file. For example `audio/m4a`, 2433 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 2434 * A list of possible audio mime types is maintained at 2435 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 2436 * </pre> 2437 * 2438 * <code>string original_mime_type = 8;</code> 2439 * 2440 * @return The originalMimeType. 2441 */ getOriginalMimeType()2442 public java.lang.String getOriginalMimeType() { 2443 java.lang.Object ref = originalMimeType_; 2444 if (!(ref instanceof java.lang.String)) { 2445 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 2446 java.lang.String s = bs.toStringUtf8(); 2447 originalMimeType_ = s; 2448 return s; 2449 } else { 2450 return (java.lang.String) ref; 2451 } 2452 } 2453 /** 2454 * 2455 * 2456 * <pre> 2457 * Mime type of the original audio file. For example `audio/m4a`, 2458 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 2459 * A list of possible audio mime types is maintained at 2460 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 2461 * </pre> 2462 * 2463 * <code>string original_mime_type = 8;</code> 2464 * 2465 * @return The bytes for originalMimeType. 2466 */ getOriginalMimeTypeBytes()2467 public com.google.protobuf.ByteString getOriginalMimeTypeBytes() { 2468 java.lang.Object ref = originalMimeType_; 2469 if (ref instanceof String) { 2470 com.google.protobuf.ByteString b = 2471 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 2472 originalMimeType_ = b; 2473 return b; 2474 } else { 2475 return (com.google.protobuf.ByteString) ref; 2476 } 2477 } 2478 /** 2479 * 2480 * 2481 * <pre> 2482 * Mime type of the original audio file. For example `audio/m4a`, 2483 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 2484 * A list of possible audio mime types is maintained at 2485 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 2486 * </pre> 2487 * 2488 * <code>string original_mime_type = 8;</code> 2489 * 2490 * @param value The originalMimeType to set. 2491 * @return This builder for chaining. 2492 */ setOriginalMimeType(java.lang.String value)2493 public Builder setOriginalMimeType(java.lang.String value) { 2494 if (value == null) { 2495 throw new NullPointerException(); 2496 } 2497 originalMimeType_ = value; 2498 bitField0_ |= 0x00000040; 2499 onChanged(); 2500 return this; 2501 } 2502 /** 2503 * 2504 * 2505 * <pre> 2506 * Mime type of the original audio file. For example `audio/m4a`, 2507 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 2508 * A list of possible audio mime types is maintained at 2509 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 2510 * </pre> 2511 * 2512 * <code>string original_mime_type = 8;</code> 2513 * 2514 * @return This builder for chaining. 2515 */ clearOriginalMimeType()2516 public Builder clearOriginalMimeType() { 2517 originalMimeType_ = getDefaultInstance().getOriginalMimeType(); 2518 bitField0_ = (bitField0_ & ~0x00000040); 2519 onChanged(); 2520 return this; 2521 } 2522 /** 2523 * 2524 * 2525 * <pre> 2526 * Mime type of the original audio file. For example `audio/m4a`, 2527 * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. 2528 * A list of possible audio mime types is maintained at 2529 * http://www.iana.org/assignments/media-types/media-types.xhtml#audio 2530 * </pre> 2531 * 2532 * <code>string original_mime_type = 8;</code> 2533 * 2534 * @param value The bytes for originalMimeType to set. 2535 * @return This builder for chaining. 2536 */ setOriginalMimeTypeBytes(com.google.protobuf.ByteString value)2537 public Builder setOriginalMimeTypeBytes(com.google.protobuf.ByteString value) { 2538 if (value == null) { 2539 throw new NullPointerException(); 2540 } 2541 checkByteStringIsUtf8(value); 2542 originalMimeType_ = value; 2543 bitField0_ |= 0x00000040; 2544 onChanged(); 2545 return this; 2546 } 2547 2548 private java.lang.Object audioTopic_ = ""; 2549 /** 2550 * 2551 * 2552 * <pre> 2553 * Description of the content. Eg. "Recordings of federal supreme court 2554 * hearings from 2012". 2555 * </pre> 2556 * 2557 * <code>string audio_topic = 10;</code> 2558 * 2559 * @return The audioTopic. 2560 */ getAudioTopic()2561 public java.lang.String getAudioTopic() { 2562 java.lang.Object ref = audioTopic_; 2563 if (!(ref instanceof java.lang.String)) { 2564 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 2565 java.lang.String s = bs.toStringUtf8(); 2566 audioTopic_ = s; 2567 return s; 2568 } else { 2569 return (java.lang.String) ref; 2570 } 2571 } 2572 /** 2573 * 2574 * 2575 * <pre> 2576 * Description of the content. Eg. "Recordings of federal supreme court 2577 * hearings from 2012". 2578 * </pre> 2579 * 2580 * <code>string audio_topic = 10;</code> 2581 * 2582 * @return The bytes for audioTopic. 2583 */ getAudioTopicBytes()2584 public com.google.protobuf.ByteString getAudioTopicBytes() { 2585 java.lang.Object ref = audioTopic_; 2586 if (ref instanceof String) { 2587 com.google.protobuf.ByteString b = 2588 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 2589 audioTopic_ = b; 2590 return b; 2591 } else { 2592 return (com.google.protobuf.ByteString) ref; 2593 } 2594 } 2595 /** 2596 * 2597 * 2598 * <pre> 2599 * Description of the content. Eg. "Recordings of federal supreme court 2600 * hearings from 2012". 2601 * </pre> 2602 * 2603 * <code>string audio_topic = 10;</code> 2604 * 2605 * @param value The audioTopic to set. 2606 * @return This builder for chaining. 2607 */ setAudioTopic(java.lang.String value)2608 public Builder setAudioTopic(java.lang.String value) { 2609 if (value == null) { 2610 throw new NullPointerException(); 2611 } 2612 audioTopic_ = value; 2613 bitField0_ |= 0x00000080; 2614 onChanged(); 2615 return this; 2616 } 2617 /** 2618 * 2619 * 2620 * <pre> 2621 * Description of the content. Eg. "Recordings of federal supreme court 2622 * hearings from 2012". 2623 * </pre> 2624 * 2625 * <code>string audio_topic = 10;</code> 2626 * 2627 * @return This builder for chaining. 2628 */ clearAudioTopic()2629 public Builder clearAudioTopic() { 2630 audioTopic_ = getDefaultInstance().getAudioTopic(); 2631 bitField0_ = (bitField0_ & ~0x00000080); 2632 onChanged(); 2633 return this; 2634 } 2635 /** 2636 * 2637 * 2638 * <pre> 2639 * Description of the content. Eg. "Recordings of federal supreme court 2640 * hearings from 2012". 2641 * </pre> 2642 * 2643 * <code>string audio_topic = 10;</code> 2644 * 2645 * @param value The bytes for audioTopic to set. 2646 * @return This builder for chaining. 2647 */ setAudioTopicBytes(com.google.protobuf.ByteString value)2648 public Builder setAudioTopicBytes(com.google.protobuf.ByteString value) { 2649 if (value == null) { 2650 throw new NullPointerException(); 2651 } 2652 checkByteStringIsUtf8(value); 2653 audioTopic_ = value; 2654 bitField0_ |= 0x00000080; 2655 onChanged(); 2656 return this; 2657 } 2658 2659 @java.lang.Override setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)2660 public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { 2661 return super.setUnknownFields(unknownFields); 2662 } 2663 2664 @java.lang.Override mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)2665 public final Builder mergeUnknownFields( 2666 final com.google.protobuf.UnknownFieldSet unknownFields) { 2667 return super.mergeUnknownFields(unknownFields); 2668 } 2669 2670 // @@protoc_insertion_point(builder_scope:google.cloud.speech.v1.RecognitionMetadata) 2671 } 2672 2673 // @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionMetadata) 2674 private static final com.google.cloud.speech.v1.RecognitionMetadata DEFAULT_INSTANCE; 2675 2676 static { 2677 DEFAULT_INSTANCE = new com.google.cloud.speech.v1.RecognitionMetadata(); 2678 } 2679 getDefaultInstance()2680 public static com.google.cloud.speech.v1.RecognitionMetadata getDefaultInstance() { 2681 return DEFAULT_INSTANCE; 2682 } 2683 2684 private static final com.google.protobuf.Parser<RecognitionMetadata> PARSER = 2685 new com.google.protobuf.AbstractParser<RecognitionMetadata>() { 2686 @java.lang.Override 2687 public RecognitionMetadata parsePartialFrom( 2688 com.google.protobuf.CodedInputStream input, 2689 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2690 throws com.google.protobuf.InvalidProtocolBufferException { 2691 Builder builder = newBuilder(); 2692 try { 2693 builder.mergeFrom(input, extensionRegistry); 2694 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2695 throw e.setUnfinishedMessage(builder.buildPartial()); 2696 } catch (com.google.protobuf.UninitializedMessageException e) { 2697 throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); 2698 } catch (java.io.IOException e) { 2699 throw new com.google.protobuf.InvalidProtocolBufferException(e) 2700 .setUnfinishedMessage(builder.buildPartial()); 2701 } 2702 return builder.buildPartial(); 2703 } 2704 }; 2705 parser()2706 public static com.google.protobuf.Parser<RecognitionMetadata> parser() { 2707 return PARSER; 2708 } 2709 2710 @java.lang.Override getParserForType()2711 public com.google.protobuf.Parser<RecognitionMetadata> getParserForType() { 2712 return PARSER; 2713 } 2714 2715 @java.lang.Override getDefaultInstanceForType()2716 public com.google.cloud.speech.v1.RecognitionMetadata getDefaultInstanceForType() { 2717 return DEFAULT_INSTANCE; 2718 } 2719 } 2720