blob: fc0f00312797682b64e7bb80cce5427fea00db8a [file] [log] [blame]
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google_streaming_api.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "google_streaming_api.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/port.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/stubs/starboard_poem.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
// @@protoc_insertion_point(includes)
namespace cobalt {
namespace speech {
namespace proto {
void protobuf_ShutdownFile_google_5fstreaming_5fapi_2eproto() {
delete SpeechRecognitionEvent::default_instance_;
delete SpeechRecognitionResult::default_instance_;
delete SpeechRecognitionAlternative::default_instance_;
}
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_impl() {
GOOGLE_PROTOBUF_VERIFY_VERSION;
#else
void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto() {
static bool already_here = false;
if (already_here) return;
already_here = true;
GOOGLE_PROTOBUF_VERIFY_VERSION;
#endif
SpeechRecognitionEvent::default_instance_ = new SpeechRecognitionEvent();
SpeechRecognitionResult::default_instance_ = new SpeechRecognitionResult();
SpeechRecognitionAlternative::default_instance_ = new SpeechRecognitionAlternative();
SpeechRecognitionEvent::default_instance_->InitAsDefaultInstance();
SpeechRecognitionResult::default_instance_->InitAsDefaultInstance();
SpeechRecognitionAlternative::default_instance_->InitAsDefaultInstance();
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_google_5fstreaming_5fapi_2eproto);
}
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_once_);
void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto() {
::google::protobuf::GoogleOnceInit(&protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_once_,
&protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_impl);
}
#else
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer_google_5fstreaming_5fapi_2eproto {
StaticDescriptorInitializer_google_5fstreaming_5fapi_2eproto() {
protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
}
} static_descriptor_initializer_google_5fstreaming_5fapi_2eproto_;
#endif
namespace {
static void MergeFromFail(int line) GOOGLE_ATTRIBUTE_COLD;
GOOGLE_ATTRIBUTE_NOINLINE static void MergeFromFail(int line) {
GOOGLE_CHECK(false) << __FILE__ << ":" << line;
}
} // namespace
// ===================================================================
static ::std::string* MutableUnknownFieldsForSpeechRecognitionEvent(
SpeechRecognitionEvent* ptr) {
return ptr->mutable_unknown_fields();
}
bool SpeechRecognitionEvent_StatusCode_IsValid(int value) {
switch(value) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:
return true;
default:
return false;
}
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_SUCCESS;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_NO_SPEECH;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_ABORTED;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_AUDIO_CAPTURE;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_NETWORK;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_NOT_ALLOWED;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_SERVICE_NOT_ALLOWED;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_BAD_GRAMMAR;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::STATUS_LANGUAGE_NOT_SUPPORTED;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::StatusCode_MIN;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::StatusCode_MAX;
const int SpeechRecognitionEvent::StatusCode_ARRAYSIZE;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
bool SpeechRecognitionEvent_EndpointerEventType_IsValid(int value) {
switch(value) {
case 0:
case 1:
case 2:
case 3:
return true;
default:
return false;
}
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::START_OF_SPEECH;
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::END_OF_SPEECH;
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::END_OF_AUDIO;
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::END_OF_UTTERANCE;
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::EndpointerEventType_MIN;
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::EndpointerEventType_MAX;
const int SpeechRecognitionEvent::EndpointerEventType_ARRAYSIZE;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int SpeechRecognitionEvent::kStatusFieldNumber;
const int SpeechRecognitionEvent::kResultFieldNumber;
const int SpeechRecognitionEvent::kEndpointFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
SpeechRecognitionEvent::SpeechRecognitionEvent()
: ::google::protobuf::MessageLite(), _arena_ptr_(NULL) {
SharedCtor();
// @@protoc_insertion_point(constructor:cobalt.speech.proto.SpeechRecognitionEvent)
}
void SpeechRecognitionEvent::InitAsDefaultInstance() {
}
SpeechRecognitionEvent::SpeechRecognitionEvent(const SpeechRecognitionEvent& from)
: ::google::protobuf::MessageLite(),
_arena_ptr_(NULL) {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cobalt.speech.proto.SpeechRecognitionEvent)
}
void SpeechRecognitionEvent::SharedCtor() {
::google::protobuf::internal::GetEmptyString();
_cached_size_ = 0;
_unknown_fields_.UnsafeSetDefault(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
status_ = 0;
endpoint_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
SpeechRecognitionEvent::~SpeechRecognitionEvent() {
// @@protoc_insertion_point(destructor:cobalt.speech.proto.SpeechRecognitionEvent)
SharedDtor();
}
void SpeechRecognitionEvent::SharedDtor() {
_unknown_fields_.DestroyNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
if (this != &default_instance()) {
#else
if (this != default_instance_) {
#endif
}
}
void SpeechRecognitionEvent::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const SpeechRecognitionEvent& SpeechRecognitionEvent::default_instance() {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#else
if (default_instance_ == NULL) protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#endif
return *default_instance_;
}
SpeechRecognitionEvent* SpeechRecognitionEvent::default_instance_ = NULL;
SpeechRecognitionEvent* SpeechRecognitionEvent::New(::google::protobuf::Arena* arena) const {
SpeechRecognitionEvent* n = new SpeechRecognitionEvent;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void SpeechRecognitionEvent::Clear() {
// @@protoc_insertion_point(message_clear_start:cobalt.speech.proto.SpeechRecognitionEvent)
#if defined(__clang__)
#define ZR_HELPER_(f) \
_Pragma("clang diagnostic push") \
_Pragma("clang diagnostic ignored \"-Winvalid-offsetof\"") \
__builtin_offsetof(SpeechRecognitionEvent, f) \
_Pragma("clang diagnostic pop")
#else
#define ZR_HELPER_(f) reinterpret_cast<char*>(\
&reinterpret_cast<SpeechRecognitionEvent*>(16)->f)
#endif
#define ZR_(first, last) do {\
::memset(&first, 0,\
ZR_HELPER_(last) - ZR_HELPER_(first) + sizeof(last));\
} while (0)
ZR_(status_, endpoint_);
#undef ZR_HELPER_
#undef ZR_
result_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
_unknown_fields_.ClearToEmptyNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
bool SpeechRecognitionEvent::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
::google::protobuf::io::LazyStringOutputStream unknown_fields_string(
::google::protobuf::internal::NewPermanentCallback(
&MutableUnknownFieldsForSpeechRecognitionEvent, this));
::google::protobuf::io::CodedOutputStream unknown_fields_stream(
&unknown_fields_string, false);
// @@protoc_insertion_point(parse_start:cobalt.speech.proto.SpeechRecognitionEvent)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cobalt.speech.proto.SpeechRecognitionEvent.StatusCode status = 1 [default = STATUS_SUCCESS];
case 1: {
if (tag == 8) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode_IsValid(value)) {
set_status(static_cast< ::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode >(value));
} else {
unknown_fields_stream.WriteVarint32(8);
unknown_fields_stream.WriteVarint32(value);
}
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_result;
break;
}
// repeated .cobalt.speech.proto.SpeechRecognitionResult result = 2;
case 2: {
if (tag == 18) {
parse_result:
DO_(input->IncrementRecursionDepth());
parse_loop_result:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtualNoRecursionDepth(
input, add_result()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_loop_result;
input->UnsafeDecrementRecursionDepth();
if (input->ExpectTag(32)) goto parse_endpoint;
break;
}
// optional .cobalt.speech.proto.SpeechRecognitionEvent.EndpointerEventType endpoint = 4;
case 4: {
if (tag == 32) {
parse_endpoint:
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType_IsValid(value)) {
set_endpoint(static_cast< ::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType >(value));
} else {
unknown_fields_stream.WriteVarint32(32);
unknown_fields_stream.WriteVarint32(value);
}
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(
input, tag, &unknown_fields_stream));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cobalt.speech.proto.SpeechRecognitionEvent)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cobalt.speech.proto.SpeechRecognitionEvent)
return false;
#undef DO_
}
void SpeechRecognitionEvent::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cobalt.speech.proto.SpeechRecognitionEvent)
// optional .cobalt.speech.proto.SpeechRecognitionEvent.StatusCode status = 1 [default = STATUS_SUCCESS];
if (has_status()) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
1, this->status(), output);
}
// repeated .cobalt.speech.proto.SpeechRecognitionResult result = 2;
for (unsigned int i = 0, n = this->result_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessage(
2, this->result(i), output);
}
// optional .cobalt.speech.proto.SpeechRecognitionEvent.EndpointerEventType endpoint = 4;
if (has_endpoint()) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
4, this->endpoint(), output);
}
output->WriteRaw(unknown_fields().data(),
static_cast<int>(unknown_fields().size()));
// @@protoc_insertion_point(serialize_end:cobalt.speech.proto.SpeechRecognitionEvent)
}
int SpeechRecognitionEvent::ByteSize() const {
// @@protoc_insertion_point(message_byte_size_start:cobalt.speech.proto.SpeechRecognitionEvent)
int total_size = 0;
if (_has_bits_[0 / 32] & 5u) {
// optional .cobalt.speech.proto.SpeechRecognitionEvent.StatusCode status = 1 [default = STATUS_SUCCESS];
if (has_status()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->status());
}
// optional .cobalt.speech.proto.SpeechRecognitionEvent.EndpointerEventType endpoint = 4;
if (has_endpoint()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->endpoint());
}
}
// repeated .cobalt.speech.proto.SpeechRecognitionResult result = 2;
total_size += 1 * this->result_size();
for (int i = 0; i < this->result_size(); i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->result(i));
}
total_size += unknown_fields().size();
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void SpeechRecognitionEvent::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite& from) {
MergeFrom(*::google::protobuf::down_cast<const SpeechRecognitionEvent*>(&from));
}
void SpeechRecognitionEvent::MergeFrom(const SpeechRecognitionEvent& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:cobalt.speech.proto.SpeechRecognitionEvent)
if (GOOGLE_PREDICT_FALSE(&from == this)) MergeFromFail(__LINE__);
result_.MergeFrom(from.result_);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_status()) {
set_status(from.status());
}
if (from.has_endpoint()) {
set_endpoint(from.endpoint());
}
}
if (!from.unknown_fields().empty()) {
mutable_unknown_fields()->append(from.unknown_fields());
}
}
void SpeechRecognitionEvent::CopyFrom(const SpeechRecognitionEvent& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:cobalt.speech.proto.SpeechRecognitionEvent)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool SpeechRecognitionEvent::IsInitialized() const {
return true;
}
void SpeechRecognitionEvent::Swap(SpeechRecognitionEvent* other) {
if (other == this) return;
InternalSwap(other);
}
void SpeechRecognitionEvent::InternalSwap(SpeechRecognitionEvent* other) {
std::swap(status_, other->status_);
result_.UnsafeArenaSwap(&other->result_);
std::swap(endpoint_, other->endpoint_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
::std::string SpeechRecognitionEvent::GetTypeName() const {
return "cobalt.speech.proto.SpeechRecognitionEvent";
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// SpeechRecognitionEvent
// optional .cobalt.speech.proto.SpeechRecognitionEvent.StatusCode status = 1 [default = STATUS_SUCCESS];
bool SpeechRecognitionEvent::has_status() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
void SpeechRecognitionEvent::set_has_status() {
_has_bits_[0] |= 0x00000001u;
}
void SpeechRecognitionEvent::clear_has_status() {
_has_bits_[0] &= ~0x00000001u;
}
void SpeechRecognitionEvent::clear_status() {
status_ = 0;
clear_has_status();
}
::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::status() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionEvent.status)
return static_cast< ::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode >(status_);
}
void SpeechRecognitionEvent::set_status(::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode value) {
assert(::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode_IsValid(value));
set_has_status();
status_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionEvent.status)
}
// repeated .cobalt.speech.proto.SpeechRecognitionResult result = 2;
int SpeechRecognitionEvent::result_size() const {
return result_.size();
}
void SpeechRecognitionEvent::clear_result() {
result_.Clear();
}
const ::cobalt::speech::proto::SpeechRecognitionResult& SpeechRecognitionEvent::result(int index) const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_.Get(index);
}
::cobalt::speech::proto::SpeechRecognitionResult* SpeechRecognitionEvent::mutable_result(int index) {
// @@protoc_insertion_point(field_mutable:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_.Mutable(index);
}
::cobalt::speech::proto::SpeechRecognitionResult* SpeechRecognitionEvent::add_result() {
// @@protoc_insertion_point(field_add:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_.Add();
}
::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult >*
SpeechRecognitionEvent::mutable_result() {
// @@protoc_insertion_point(field_mutable_list:cobalt.speech.proto.SpeechRecognitionEvent.result)
return &result_;
}
const ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult >&
SpeechRecognitionEvent::result() const {
// @@protoc_insertion_point(field_list:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_;
}
// optional .cobalt.speech.proto.SpeechRecognitionEvent.EndpointerEventType endpoint = 4;
bool SpeechRecognitionEvent::has_endpoint() const {
return (_has_bits_[0] & 0x00000004u) != 0;
}
void SpeechRecognitionEvent::set_has_endpoint() {
_has_bits_[0] |= 0x00000004u;
}
void SpeechRecognitionEvent::clear_has_endpoint() {
_has_bits_[0] &= ~0x00000004u;
}
void SpeechRecognitionEvent::clear_endpoint() {
endpoint_ = 0;
clear_has_endpoint();
}
::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::endpoint() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionEvent.endpoint)
return static_cast< ::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType >(endpoint_);
}
void SpeechRecognitionEvent::set_endpoint(::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType value) {
assert(::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType_IsValid(value));
set_has_endpoint();
endpoint_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionEvent.endpoint)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
static ::std::string* MutableUnknownFieldsForSpeechRecognitionResult(
SpeechRecognitionResult* ptr) {
return ptr->mutable_unknown_fields();
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int SpeechRecognitionResult::kAlternativeFieldNumber;
const int SpeechRecognitionResult::kFinalFieldNumber;
const int SpeechRecognitionResult::kStabilityFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
SpeechRecognitionResult::SpeechRecognitionResult()
: ::google::protobuf::MessageLite(), _arena_ptr_(NULL) {
SharedCtor();
// @@protoc_insertion_point(constructor:cobalt.speech.proto.SpeechRecognitionResult)
}
void SpeechRecognitionResult::InitAsDefaultInstance() {
}
SpeechRecognitionResult::SpeechRecognitionResult(const SpeechRecognitionResult& from)
: ::google::protobuf::MessageLite(),
_arena_ptr_(NULL) {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cobalt.speech.proto.SpeechRecognitionResult)
}
void SpeechRecognitionResult::SharedCtor() {
::google::protobuf::internal::GetEmptyString();
_cached_size_ = 0;
_unknown_fields_.UnsafeSetDefault(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
final_ = false;
stability_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
SpeechRecognitionResult::~SpeechRecognitionResult() {
// @@protoc_insertion_point(destructor:cobalt.speech.proto.SpeechRecognitionResult)
SharedDtor();
}
void SpeechRecognitionResult::SharedDtor() {
_unknown_fields_.DestroyNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
if (this != &default_instance()) {
#else
if (this != default_instance_) {
#endif
}
}
void SpeechRecognitionResult::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const SpeechRecognitionResult& SpeechRecognitionResult::default_instance() {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#else
if (default_instance_ == NULL) protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#endif
return *default_instance_;
}
SpeechRecognitionResult* SpeechRecognitionResult::default_instance_ = NULL;
SpeechRecognitionResult* SpeechRecognitionResult::New(::google::protobuf::Arena* arena) const {
SpeechRecognitionResult* n = new SpeechRecognitionResult;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void SpeechRecognitionResult::Clear() {
// @@protoc_insertion_point(message_clear_start:cobalt.speech.proto.SpeechRecognitionResult)
#if defined(__clang__)
#define ZR_HELPER_(f) \
_Pragma("clang diagnostic push") \
_Pragma("clang diagnostic ignored \"-Winvalid-offsetof\"") \
__builtin_offsetof(SpeechRecognitionResult, f) \
_Pragma("clang diagnostic pop")
#else
#define ZR_HELPER_(f) reinterpret_cast<char*>(\
&reinterpret_cast<SpeechRecognitionResult*>(16)->f)
#endif
#define ZR_(first, last) do {\
::memset(&first, 0,\
ZR_HELPER_(last) - ZR_HELPER_(first) + sizeof(last));\
} while (0)
ZR_(final_, stability_);
#undef ZR_HELPER_
#undef ZR_
alternative_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
_unknown_fields_.ClearToEmptyNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
bool SpeechRecognitionResult::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
::google::protobuf::io::LazyStringOutputStream unknown_fields_string(
::google::protobuf::internal::NewPermanentCallback(
&MutableUnknownFieldsForSpeechRecognitionResult, this));
::google::protobuf::io::CodedOutputStream unknown_fields_stream(
&unknown_fields_string, false);
// @@protoc_insertion_point(parse_start:cobalt.speech.proto.SpeechRecognitionResult)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated .cobalt.speech.proto.SpeechRecognitionAlternative alternative = 1;
case 1: {
if (tag == 10) {
DO_(input->IncrementRecursionDepth());
parse_loop_alternative:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtualNoRecursionDepth(
input, add_alternative()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(10)) goto parse_loop_alternative;
input->UnsafeDecrementRecursionDepth();
if (input->ExpectTag(16)) goto parse_final;
break;
}
// optional bool final = 2 [default = false];
case 2: {
if (tag == 16) {
parse_final:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
input, &final_)));
set_has_final();
} else {
goto handle_unusual;
}
if (input->ExpectTag(29)) goto parse_stability;
break;
}
// optional float stability = 3;
case 3: {
if (tag == 29) {
parse_stability:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
input, &stability_)));
set_has_stability();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(
input, tag, &unknown_fields_stream));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cobalt.speech.proto.SpeechRecognitionResult)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cobalt.speech.proto.SpeechRecognitionResult)
return false;
#undef DO_
}
void SpeechRecognitionResult::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cobalt.speech.proto.SpeechRecognitionResult)
// repeated .cobalt.speech.proto.SpeechRecognitionAlternative alternative = 1;
for (unsigned int i = 0, n = this->alternative_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessage(
1, this->alternative(i), output);
}
// optional bool final = 2 [default = false];
if (has_final()) {
::google::protobuf::internal::WireFormatLite::WriteBool(2, this->final(), output);
}
// optional float stability = 3;
if (has_stability()) {
::google::protobuf::internal::WireFormatLite::WriteFloat(3, this->stability(), output);
}
output->WriteRaw(unknown_fields().data(),
static_cast<int>(unknown_fields().size()));
// @@protoc_insertion_point(serialize_end:cobalt.speech.proto.SpeechRecognitionResult)
}
int SpeechRecognitionResult::ByteSize() const {
// @@protoc_insertion_point(message_byte_size_start:cobalt.speech.proto.SpeechRecognitionResult)
int total_size = 0;
if (_has_bits_[1 / 32] & 6u) {
// optional bool final = 2 [default = false];
if (has_final()) {
total_size += 1 + 1;
}
// optional float stability = 3;
if (has_stability()) {
total_size += 1 + 4;
}
}
// repeated .cobalt.speech.proto.SpeechRecognitionAlternative alternative = 1;
total_size += 1 * this->alternative_size();
for (int i = 0; i < this->alternative_size(); i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->alternative(i));
}
total_size += unknown_fields().size();
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void SpeechRecognitionResult::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite& from) {
MergeFrom(*::google::protobuf::down_cast<const SpeechRecognitionResult*>(&from));
}
void SpeechRecognitionResult::MergeFrom(const SpeechRecognitionResult& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:cobalt.speech.proto.SpeechRecognitionResult)
if (GOOGLE_PREDICT_FALSE(&from == this)) MergeFromFail(__LINE__);
alternative_.MergeFrom(from.alternative_);
if (from._has_bits_[1 / 32] & (0xffu << (1 % 32))) {
if (from.has_final()) {
set_final(from.final());
}
if (from.has_stability()) {
set_stability(from.stability());
}
}
if (!from.unknown_fields().empty()) {
mutable_unknown_fields()->append(from.unknown_fields());
}
}
void SpeechRecognitionResult::CopyFrom(const SpeechRecognitionResult& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:cobalt.speech.proto.SpeechRecognitionResult)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool SpeechRecognitionResult::IsInitialized() const {
return true;
}
void SpeechRecognitionResult::Swap(SpeechRecognitionResult* other) {
if (other == this) return;
InternalSwap(other);
}
void SpeechRecognitionResult::InternalSwap(SpeechRecognitionResult* other) {
alternative_.UnsafeArenaSwap(&other->alternative_);
std::swap(final_, other->final_);
std::swap(stability_, other->stability_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
::std::string SpeechRecognitionResult::GetTypeName() const {
return "cobalt.speech.proto.SpeechRecognitionResult";
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// SpeechRecognitionResult
// repeated .cobalt.speech.proto.SpeechRecognitionAlternative alternative = 1;
int SpeechRecognitionResult::alternative_size() const {
return alternative_.size();
}
void SpeechRecognitionResult::clear_alternative() {
alternative_.Clear();
}
const ::cobalt::speech::proto::SpeechRecognitionAlternative& SpeechRecognitionResult::alternative(int index) const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_.Get(index);
}
::cobalt::speech::proto::SpeechRecognitionAlternative* SpeechRecognitionResult::mutable_alternative(int index) {
// @@protoc_insertion_point(field_mutable:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_.Mutable(index);
}
::cobalt::speech::proto::SpeechRecognitionAlternative* SpeechRecognitionResult::add_alternative() {
// @@protoc_insertion_point(field_add:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_.Add();
}
::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative >*
SpeechRecognitionResult::mutable_alternative() {
// @@protoc_insertion_point(field_mutable_list:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return &alternative_;
}
const ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative >&
SpeechRecognitionResult::alternative() const {
// @@protoc_insertion_point(field_list:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_;
}
// optional bool final = 2 [default = false];
bool SpeechRecognitionResult::has_final() const {
return (_has_bits_[0] & 0x00000002u) != 0;
}
void SpeechRecognitionResult::set_has_final() {
_has_bits_[0] |= 0x00000002u;
}
void SpeechRecognitionResult::clear_has_final() {
_has_bits_[0] &= ~0x00000002u;
}
void SpeechRecognitionResult::clear_final() {
final_ = false;
clear_has_final();
}
bool SpeechRecognitionResult::final() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionResult.final)
return final_;
}
void SpeechRecognitionResult::set_final(bool value) {
set_has_final();
final_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionResult.final)
}
// optional float stability = 3;
bool SpeechRecognitionResult::has_stability() const {
return (_has_bits_[0] & 0x00000004u) != 0;
}
void SpeechRecognitionResult::set_has_stability() {
_has_bits_[0] |= 0x00000004u;
}
void SpeechRecognitionResult::clear_has_stability() {
_has_bits_[0] &= ~0x00000004u;
}
void SpeechRecognitionResult::clear_stability() {
stability_ = 0;
clear_has_stability();
}
float SpeechRecognitionResult::stability() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionResult.stability)
return stability_;
}
void SpeechRecognitionResult::set_stability(float value) {
set_has_stability();
stability_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionResult.stability)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
static ::std::string* MutableUnknownFieldsForSpeechRecognitionAlternative(
SpeechRecognitionAlternative* ptr) {
return ptr->mutable_unknown_fields();
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int SpeechRecognitionAlternative::kTranscriptFieldNumber;
const int SpeechRecognitionAlternative::kConfidenceFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
SpeechRecognitionAlternative::SpeechRecognitionAlternative()
: ::google::protobuf::MessageLite(), _arena_ptr_(NULL) {
SharedCtor();
// @@protoc_insertion_point(constructor:cobalt.speech.proto.SpeechRecognitionAlternative)
}
void SpeechRecognitionAlternative::InitAsDefaultInstance() {
}
SpeechRecognitionAlternative::SpeechRecognitionAlternative(const SpeechRecognitionAlternative& from)
: ::google::protobuf::MessageLite(),
_arena_ptr_(NULL) {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cobalt.speech.proto.SpeechRecognitionAlternative)
}
void SpeechRecognitionAlternative::SharedCtor() {
::google::protobuf::internal::GetEmptyString();
_cached_size_ = 0;
_unknown_fields_.UnsafeSetDefault(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
transcript_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
confidence_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
SpeechRecognitionAlternative::~SpeechRecognitionAlternative() {
// @@protoc_insertion_point(destructor:cobalt.speech.proto.SpeechRecognitionAlternative)
SharedDtor();
}
void SpeechRecognitionAlternative::SharedDtor() {
_unknown_fields_.DestroyNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
transcript_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
if (this != &default_instance()) {
#else
if (this != default_instance_) {
#endif
}
}
void SpeechRecognitionAlternative::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const SpeechRecognitionAlternative& SpeechRecognitionAlternative::default_instance() {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#else
if (default_instance_ == NULL) protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#endif
return *default_instance_;
}
SpeechRecognitionAlternative* SpeechRecognitionAlternative::default_instance_ = NULL;
SpeechRecognitionAlternative* SpeechRecognitionAlternative::New(::google::protobuf::Arena* arena) const {
SpeechRecognitionAlternative* n = new SpeechRecognitionAlternative;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void SpeechRecognitionAlternative::Clear() {
// @@protoc_insertion_point(message_clear_start:cobalt.speech.proto.SpeechRecognitionAlternative)
if (_has_bits_[0 / 32] & 3u) {
if (has_transcript()) {
transcript_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
confidence_ = 0;
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
_unknown_fields_.ClearToEmptyNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
bool SpeechRecognitionAlternative::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
::google::protobuf::io::LazyStringOutputStream unknown_fields_string(
::google::protobuf::internal::NewPermanentCallback(
&MutableUnknownFieldsForSpeechRecognitionAlternative, this));
::google::protobuf::io::CodedOutputStream unknown_fields_stream(
&unknown_fields_string, false);
// @@protoc_insertion_point(parse_start:cobalt.speech.proto.SpeechRecognitionAlternative)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional string transcript = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_transcript()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(21)) goto parse_confidence;
break;
}
// optional float confidence = 2;
case 2: {
if (tag == 21) {
parse_confidence:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
input, &confidence_)));
set_has_confidence();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(
input, tag, &unknown_fields_stream));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cobalt.speech.proto.SpeechRecognitionAlternative)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cobalt.speech.proto.SpeechRecognitionAlternative)
return false;
#undef DO_
}
void SpeechRecognitionAlternative::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cobalt.speech.proto.SpeechRecognitionAlternative)
// optional string transcript = 1;
if (has_transcript()) {
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
1, this->transcript(), output);
}
// optional float confidence = 2;
if (has_confidence()) {
::google::protobuf::internal::WireFormatLite::WriteFloat(2, this->confidence(), output);
}
output->WriteRaw(unknown_fields().data(),
static_cast<int>(unknown_fields().size()));
// @@protoc_insertion_point(serialize_end:cobalt.speech.proto.SpeechRecognitionAlternative)
}
int SpeechRecognitionAlternative::ByteSize() const {
// @@protoc_insertion_point(message_byte_size_start:cobalt.speech.proto.SpeechRecognitionAlternative)
int total_size = 0;
if (_has_bits_[0 / 32] & 3u) {
// optional string transcript = 1;
if (has_transcript()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->transcript());
}
// optional float confidence = 2;
if (has_confidence()) {
total_size += 1 + 4;
}
}
total_size += unknown_fields().size();
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void SpeechRecognitionAlternative::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite& from) {
MergeFrom(*::google::protobuf::down_cast<const SpeechRecognitionAlternative*>(&from));
}
void SpeechRecognitionAlternative::MergeFrom(const SpeechRecognitionAlternative& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:cobalt.speech.proto.SpeechRecognitionAlternative)
if (GOOGLE_PREDICT_FALSE(&from == this)) MergeFromFail(__LINE__);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_transcript()) {
set_has_transcript();
transcript_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.transcript_);
}
if (from.has_confidence()) {
set_confidence(from.confidence());
}
}
if (!from.unknown_fields().empty()) {
mutable_unknown_fields()->append(from.unknown_fields());
}
}
void SpeechRecognitionAlternative::CopyFrom(const SpeechRecognitionAlternative& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:cobalt.speech.proto.SpeechRecognitionAlternative)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool SpeechRecognitionAlternative::IsInitialized() const {
return true;
}
void SpeechRecognitionAlternative::Swap(SpeechRecognitionAlternative* other) {
if (other == this) return;
InternalSwap(other);
}
void SpeechRecognitionAlternative::InternalSwap(SpeechRecognitionAlternative* other) {
transcript_.Swap(&other->transcript_);
std::swap(confidence_, other->confidence_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
::std::string SpeechRecognitionAlternative::GetTypeName() const {
return "cobalt.speech.proto.SpeechRecognitionAlternative";
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// SpeechRecognitionAlternative
// optional string transcript = 1;
bool SpeechRecognitionAlternative::has_transcript() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
void SpeechRecognitionAlternative::set_has_transcript() {
_has_bits_[0] |= 0x00000001u;
}
void SpeechRecognitionAlternative::clear_has_transcript() {
_has_bits_[0] &= ~0x00000001u;
}
void SpeechRecognitionAlternative::clear_transcript() {
transcript_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
clear_has_transcript();
}
const ::std::string& SpeechRecognitionAlternative::transcript() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
return transcript_.GetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void SpeechRecognitionAlternative::set_transcript(const ::std::string& value) {
set_has_transcript();
transcript_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
void SpeechRecognitionAlternative::set_transcript(const char* value) {
set_has_transcript();
transcript_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
void SpeechRecognitionAlternative::set_transcript(const char* value, size_t size) {
set_has_transcript();
transcript_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
::std::string* SpeechRecognitionAlternative::mutable_transcript() {
set_has_transcript();
// @@protoc_insertion_point(field_mutable:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
return transcript_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* SpeechRecognitionAlternative::release_transcript() {
// @@protoc_insertion_point(field_release:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
clear_has_transcript();
return transcript_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void SpeechRecognitionAlternative::set_allocated_transcript(::std::string* transcript) {
if (transcript != NULL) {
set_has_transcript();
} else {
clear_has_transcript();
}
transcript_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), transcript);
// @@protoc_insertion_point(field_set_allocated:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
// optional float confidence = 2;
bool SpeechRecognitionAlternative::has_confidence() const {
return (_has_bits_[0] & 0x00000002u) != 0;
}
void SpeechRecognitionAlternative::set_has_confidence() {
_has_bits_[0] |= 0x00000002u;
}
void SpeechRecognitionAlternative::clear_has_confidence() {
_has_bits_[0] &= ~0x00000002u;
}
void SpeechRecognitionAlternative::clear_confidence() {
confidence_ = 0;
clear_has_confidence();
}
float SpeechRecognitionAlternative::confidence() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionAlternative.confidence)
return confidence_;
}
void SpeechRecognitionAlternative::set_confidence(float value) {
set_has_confidence();
confidence_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionAlternative.confidence)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// @@protoc_insertion_point(namespace_scope)
} // namespace proto
} // namespace speech
} // namespace cobalt
// @@protoc_insertion_point(global_scope)