blob: d7028318fcf4869dbd6a9f55e6540af9e13a2566 [file] [log] [blame]
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google_streaming_api.proto
#ifndef PROTOBUF_google_5fstreaming_5fapi_2eproto__INCLUDED
#define PROTOBUF_google_5fstreaming_5fapi_2eproto__INCLUDED
#include <string>
#include <google/protobuf/stubs/common.h>
#if GOOGLE_PROTOBUF_VERSION < 3000000
#error This file was generated by a newer version of protoc which is
#error incompatible with your Protocol Buffer headers. Please update
#error your headers.
#endif
#if 3000000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
#error This file was generated by an older version of protoc which is
#error incompatible with your Protocol Buffer headers. Please
#error regenerate this file with a newer version of protoc.
#endif
#include <google/protobuf/arena.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/message_lite.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/extension_set.h>
#include <google/protobuf/generated_enum_util.h>
// @@protoc_insertion_point(includes)
namespace cobalt {
namespace speech {
namespace proto {
// Internal implementation detail -- do not call these.
void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
void protobuf_AssignDesc_google_5fstreaming_5fapi_2eproto();
void protobuf_ShutdownFile_google_5fstreaming_5fapi_2eproto();
class SpeechRecognitionAlternative;
class SpeechRecognitionEvent;
class SpeechRecognitionResult;
enum SpeechRecognitionEvent_StatusCode {
SpeechRecognitionEvent_StatusCode_STATUS_SUCCESS = 0,
SpeechRecognitionEvent_StatusCode_STATUS_NO_SPEECH = 1,
SpeechRecognitionEvent_StatusCode_STATUS_ABORTED = 2,
SpeechRecognitionEvent_StatusCode_STATUS_AUDIO_CAPTURE = 3,
SpeechRecognitionEvent_StatusCode_STATUS_NETWORK = 4,
SpeechRecognitionEvent_StatusCode_STATUS_NOT_ALLOWED = 5,
SpeechRecognitionEvent_StatusCode_STATUS_SERVICE_NOT_ALLOWED = 6,
SpeechRecognitionEvent_StatusCode_STATUS_BAD_GRAMMAR = 7,
SpeechRecognitionEvent_StatusCode_STATUS_LANGUAGE_NOT_SUPPORTED = 8
};
bool SpeechRecognitionEvent_StatusCode_IsValid(int value);
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent_StatusCode_StatusCode_MIN = SpeechRecognitionEvent_StatusCode_STATUS_SUCCESS;
const SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent_StatusCode_StatusCode_MAX = SpeechRecognitionEvent_StatusCode_STATUS_LANGUAGE_NOT_SUPPORTED;
const int SpeechRecognitionEvent_StatusCode_StatusCode_ARRAYSIZE = SpeechRecognitionEvent_StatusCode_StatusCode_MAX + 1;
enum SpeechRecognitionEvent_EndpointerEventType {
SpeechRecognitionEvent_EndpointerEventType_START_OF_SPEECH = 0,
SpeechRecognitionEvent_EndpointerEventType_END_OF_SPEECH = 1,
SpeechRecognitionEvent_EndpointerEventType_END_OF_AUDIO = 2,
SpeechRecognitionEvent_EndpointerEventType_END_OF_UTTERANCE = 3
};
bool SpeechRecognitionEvent_EndpointerEventType_IsValid(int value);
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_MIN = SpeechRecognitionEvent_EndpointerEventType_START_OF_SPEECH;
const SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_MAX = SpeechRecognitionEvent_EndpointerEventType_END_OF_UTTERANCE;
const int SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_ARRAYSIZE = SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_MAX + 1;
// ===================================================================
class SpeechRecognitionEvent : public ::google::protobuf::MessageLite {
public:
SpeechRecognitionEvent();
virtual ~SpeechRecognitionEvent();
SpeechRecognitionEvent(const SpeechRecognitionEvent& from);
inline SpeechRecognitionEvent& operator=(const SpeechRecognitionEvent& from) {
CopyFrom(from);
return *this;
}
inline const ::std::string& unknown_fields() const {
return _unknown_fields_.GetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
inline ::std::string* mutable_unknown_fields() {
return _unknown_fields_.MutableNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
static const SpeechRecognitionEvent& default_instance();
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
// Returns the internal default instance pointer. This function can
// return NULL thus should not be used by the user. This is intended
// for Protobuf internal code. Please use default_instance() declared
// above instead.
static inline const SpeechRecognitionEvent* internal_default_instance() {
return default_instance_;
}
#endif
GOOGLE_ATTRIBUTE_NOINLINE void Swap(SpeechRecognitionEvent* other);
// implements Message ----------------------------------------------
inline SpeechRecognitionEvent* New() const { return New(NULL); }
SpeechRecognitionEvent* New(::google::protobuf::Arena* arena) const;
void CheckTypeAndMergeFrom(const ::google::protobuf::MessageLite& from);
void CopyFrom(const SpeechRecognitionEvent& from);
void MergeFrom(const SpeechRecognitionEvent& from);
void Clear();
bool IsInitialized() const;
int ByteSize() const;
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input);
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const;
void DiscardUnknownFields();
int GetCachedSize() const { return _cached_size_; }
private:
void SharedCtor();
void SharedDtor();
void SetCachedSize(int size) const;
void InternalSwap(SpeechRecognitionEvent* other);
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return _arena_ptr_;
}
inline ::google::protobuf::Arena* MaybeArenaPtr() const {
return _arena_ptr_;
}
public:
::std::string GetTypeName() const;
// nested types ----------------------------------------------------
typedef SpeechRecognitionEvent_StatusCode StatusCode;
static const StatusCode STATUS_SUCCESS =
SpeechRecognitionEvent_StatusCode_STATUS_SUCCESS;
static const StatusCode STATUS_NO_SPEECH =
SpeechRecognitionEvent_StatusCode_STATUS_NO_SPEECH;
static const StatusCode STATUS_ABORTED =
SpeechRecognitionEvent_StatusCode_STATUS_ABORTED;
static const StatusCode STATUS_AUDIO_CAPTURE =
SpeechRecognitionEvent_StatusCode_STATUS_AUDIO_CAPTURE;
static const StatusCode STATUS_NETWORK =
SpeechRecognitionEvent_StatusCode_STATUS_NETWORK;
static const StatusCode STATUS_NOT_ALLOWED =
SpeechRecognitionEvent_StatusCode_STATUS_NOT_ALLOWED;
static const StatusCode STATUS_SERVICE_NOT_ALLOWED =
SpeechRecognitionEvent_StatusCode_STATUS_SERVICE_NOT_ALLOWED;
static const StatusCode STATUS_BAD_GRAMMAR =
SpeechRecognitionEvent_StatusCode_STATUS_BAD_GRAMMAR;
static const StatusCode STATUS_LANGUAGE_NOT_SUPPORTED =
SpeechRecognitionEvent_StatusCode_STATUS_LANGUAGE_NOT_SUPPORTED;
static inline bool StatusCode_IsValid(int value) {
return SpeechRecognitionEvent_StatusCode_IsValid(value);
}
static const StatusCode StatusCode_MIN =
SpeechRecognitionEvent_StatusCode_StatusCode_MIN;
static const StatusCode StatusCode_MAX =
SpeechRecognitionEvent_StatusCode_StatusCode_MAX;
static const int StatusCode_ARRAYSIZE =
SpeechRecognitionEvent_StatusCode_StatusCode_ARRAYSIZE;
typedef SpeechRecognitionEvent_EndpointerEventType EndpointerEventType;
static const EndpointerEventType START_OF_SPEECH =
SpeechRecognitionEvent_EndpointerEventType_START_OF_SPEECH;
static const EndpointerEventType END_OF_SPEECH =
SpeechRecognitionEvent_EndpointerEventType_END_OF_SPEECH;
static const EndpointerEventType END_OF_AUDIO =
SpeechRecognitionEvent_EndpointerEventType_END_OF_AUDIO;
static const EndpointerEventType END_OF_UTTERANCE =
SpeechRecognitionEvent_EndpointerEventType_END_OF_UTTERANCE;
static inline bool EndpointerEventType_IsValid(int value) {
return SpeechRecognitionEvent_EndpointerEventType_IsValid(value);
}
static const EndpointerEventType EndpointerEventType_MIN =
SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_MIN;
static const EndpointerEventType EndpointerEventType_MAX =
SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_MAX;
static const int EndpointerEventType_ARRAYSIZE =
SpeechRecognitionEvent_EndpointerEventType_EndpointerEventType_ARRAYSIZE;
// accessors -------------------------------------------------------
// optional .cobalt.speech.proto.SpeechRecognitionEvent.StatusCode status = 1 [default = STATUS_SUCCESS];
bool has_status() const;
void clear_status();
static const int kStatusFieldNumber = 1;
::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode status() const;
void set_status(::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode value);
// repeated .cobalt.speech.proto.SpeechRecognitionResult result = 2;
int result_size() const;
void clear_result();
static const int kResultFieldNumber = 2;
const ::cobalt::speech::proto::SpeechRecognitionResult& result(int index) const;
::cobalt::speech::proto::SpeechRecognitionResult* mutable_result(int index);
::cobalt::speech::proto::SpeechRecognitionResult* add_result();
::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult >*
mutable_result();
const ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult >&
result() const;
// optional .cobalt.speech.proto.SpeechRecognitionEvent.EndpointerEventType endpoint = 4;
bool has_endpoint() const;
void clear_endpoint();
static const int kEndpointFieldNumber = 4;
::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType endpoint() const;
void set_endpoint(::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType value);
// @@protoc_insertion_point(class_scope:cobalt.speech.proto.SpeechRecognitionEvent)
private:
inline void set_has_status();
inline void clear_has_status();
inline void set_has_endpoint();
inline void clear_has_endpoint();
::google::protobuf::internal::ArenaStringPtr _unknown_fields_;
::google::protobuf::Arena* _arena_ptr_;
::google::protobuf::uint32 _has_bits_[1];
mutable int _cached_size_;
::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult > result_;
int status_;
int endpoint_;
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
friend void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_impl();
#else
friend void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#endif
friend void protobuf_AssignDesc_google_5fstreaming_5fapi_2eproto();
friend void protobuf_ShutdownFile_google_5fstreaming_5fapi_2eproto();
void InitAsDefaultInstance();
static SpeechRecognitionEvent* default_instance_;
};
// -------------------------------------------------------------------
class SpeechRecognitionResult : public ::google::protobuf::MessageLite {
public:
SpeechRecognitionResult();
virtual ~SpeechRecognitionResult();
SpeechRecognitionResult(const SpeechRecognitionResult& from);
inline SpeechRecognitionResult& operator=(const SpeechRecognitionResult& from) {
CopyFrom(from);
return *this;
}
inline const ::std::string& unknown_fields() const {
return _unknown_fields_.GetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
inline ::std::string* mutable_unknown_fields() {
return _unknown_fields_.MutableNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
static const SpeechRecognitionResult& default_instance();
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
// Returns the internal default instance pointer. This function can
// return NULL thus should not be used by the user. This is intended
// for Protobuf internal code. Please use default_instance() declared
// above instead.
static inline const SpeechRecognitionResult* internal_default_instance() {
return default_instance_;
}
#endif
GOOGLE_ATTRIBUTE_NOINLINE void Swap(SpeechRecognitionResult* other);
// implements Message ----------------------------------------------
inline SpeechRecognitionResult* New() const { return New(NULL); }
SpeechRecognitionResult* New(::google::protobuf::Arena* arena) const;
void CheckTypeAndMergeFrom(const ::google::protobuf::MessageLite& from);
void CopyFrom(const SpeechRecognitionResult& from);
void MergeFrom(const SpeechRecognitionResult& from);
void Clear();
bool IsInitialized() const;
int ByteSize() const;
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input);
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const;
void DiscardUnknownFields();
int GetCachedSize() const { return _cached_size_; }
private:
void SharedCtor();
void SharedDtor();
void SetCachedSize(int size) const;
void InternalSwap(SpeechRecognitionResult* other);
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return _arena_ptr_;
}
inline ::google::protobuf::Arena* MaybeArenaPtr() const {
return _arena_ptr_;
}
public:
::std::string GetTypeName() const;
// nested types ----------------------------------------------------
// accessors -------------------------------------------------------
// repeated .cobalt.speech.proto.SpeechRecognitionAlternative alternative = 1;
int alternative_size() const;
void clear_alternative();
static const int kAlternativeFieldNumber = 1;
const ::cobalt::speech::proto::SpeechRecognitionAlternative& alternative(int index) const;
::cobalt::speech::proto::SpeechRecognitionAlternative* mutable_alternative(int index);
::cobalt::speech::proto::SpeechRecognitionAlternative* add_alternative();
::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative >*
mutable_alternative();
const ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative >&
alternative() const;
// optional bool final = 2 [default = false];
bool has_final() const;
void clear_final();
static const int kFinalFieldNumber = 2;
bool final() const;
void set_final(bool value);
// optional float stability = 3;
bool has_stability() const;
void clear_stability();
static const int kStabilityFieldNumber = 3;
float stability() const;
void set_stability(float value);
// @@protoc_insertion_point(class_scope:cobalt.speech.proto.SpeechRecognitionResult)
private:
inline void set_has_final();
inline void clear_has_final();
inline void set_has_stability();
inline void clear_has_stability();
::google::protobuf::internal::ArenaStringPtr _unknown_fields_;
::google::protobuf::Arena* _arena_ptr_;
::google::protobuf::uint32 _has_bits_[1];
mutable int _cached_size_;
::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative > alternative_;
bool final_;
float stability_;
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
friend void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_impl();
#else
friend void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#endif
friend void protobuf_AssignDesc_google_5fstreaming_5fapi_2eproto();
friend void protobuf_ShutdownFile_google_5fstreaming_5fapi_2eproto();
void InitAsDefaultInstance();
static SpeechRecognitionResult* default_instance_;
};
// -------------------------------------------------------------------
class SpeechRecognitionAlternative : public ::google::protobuf::MessageLite {
public:
SpeechRecognitionAlternative();
virtual ~SpeechRecognitionAlternative();
SpeechRecognitionAlternative(const SpeechRecognitionAlternative& from);
inline SpeechRecognitionAlternative& operator=(const SpeechRecognitionAlternative& from) {
CopyFrom(from);
return *this;
}
inline const ::std::string& unknown_fields() const {
return _unknown_fields_.GetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
inline ::std::string* mutable_unknown_fields() {
return _unknown_fields_.MutableNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
static const SpeechRecognitionAlternative& default_instance();
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
// Returns the internal default instance pointer. This function can
// return NULL thus should not be used by the user. This is intended
// for Protobuf internal code. Please use default_instance() declared
// above instead.
static inline const SpeechRecognitionAlternative* internal_default_instance() {
return default_instance_;
}
#endif
GOOGLE_ATTRIBUTE_NOINLINE void Swap(SpeechRecognitionAlternative* other);
// implements Message ----------------------------------------------
inline SpeechRecognitionAlternative* New() const { return New(NULL); }
SpeechRecognitionAlternative* New(::google::protobuf::Arena* arena) const;
void CheckTypeAndMergeFrom(const ::google::protobuf::MessageLite& from);
void CopyFrom(const SpeechRecognitionAlternative& from);
void MergeFrom(const SpeechRecognitionAlternative& from);
void Clear();
bool IsInitialized() const;
int ByteSize() const;
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input);
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const;
void DiscardUnknownFields();
int GetCachedSize() const { return _cached_size_; }
private:
void SharedCtor();
void SharedDtor();
void SetCachedSize(int size) const;
void InternalSwap(SpeechRecognitionAlternative* other);
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return _arena_ptr_;
}
inline ::google::protobuf::Arena* MaybeArenaPtr() const {
return _arena_ptr_;
}
public:
::std::string GetTypeName() const;
// nested types ----------------------------------------------------
// accessors -------------------------------------------------------
// optional string transcript = 1;
bool has_transcript() const;
void clear_transcript();
static const int kTranscriptFieldNumber = 1;
const ::std::string& transcript() const;
void set_transcript(const ::std::string& value);
void set_transcript(const char* value);
void set_transcript(const char* value, size_t size);
::std::string* mutable_transcript();
::std::string* release_transcript();
void set_allocated_transcript(::std::string* transcript);
// optional float confidence = 2;
bool has_confidence() const;
void clear_confidence();
static const int kConfidenceFieldNumber = 2;
float confidence() const;
void set_confidence(float value);
// @@protoc_insertion_point(class_scope:cobalt.speech.proto.SpeechRecognitionAlternative)
private:
inline void set_has_transcript();
inline void clear_has_transcript();
inline void set_has_confidence();
inline void clear_has_confidence();
::google::protobuf::internal::ArenaStringPtr _unknown_fields_;
::google::protobuf::Arena* _arena_ptr_;
::google::protobuf::uint32 _has_bits_[1];
mutable int _cached_size_;
::google::protobuf::internal::ArenaStringPtr transcript_;
float confidence_;
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
friend void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto_impl();
#else
friend void protobuf_AddDesc_google_5fstreaming_5fapi_2eproto();
#endif
friend void protobuf_AssignDesc_google_5fstreaming_5fapi_2eproto();
friend void protobuf_ShutdownFile_google_5fstreaming_5fapi_2eproto();
void InitAsDefaultInstance();
static SpeechRecognitionAlternative* default_instance_;
};
// ===================================================================
// ===================================================================
#if !PROTOBUF_INLINE_NOT_IN_HEADERS
// SpeechRecognitionEvent
// optional .cobalt.speech.proto.SpeechRecognitionEvent.StatusCode status = 1 [default = STATUS_SUCCESS];
inline bool SpeechRecognitionEvent::has_status() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
inline void SpeechRecognitionEvent::set_has_status() {
_has_bits_[0] |= 0x00000001u;
}
inline void SpeechRecognitionEvent::clear_has_status() {
_has_bits_[0] &= ~0x00000001u;
}
inline void SpeechRecognitionEvent::clear_status() {
status_ = 0;
clear_has_status();
}
inline ::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode SpeechRecognitionEvent::status() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionEvent.status)
return static_cast< ::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode >(status_);
}
inline void SpeechRecognitionEvent::set_status(::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode value) {
assert(::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode_IsValid(value));
set_has_status();
status_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionEvent.status)
}
// repeated .cobalt.speech.proto.SpeechRecognitionResult result = 2;
inline int SpeechRecognitionEvent::result_size() const {
return result_.size();
}
inline void SpeechRecognitionEvent::clear_result() {
result_.Clear();
}
inline const ::cobalt::speech::proto::SpeechRecognitionResult& SpeechRecognitionEvent::result(int index) const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_.Get(index);
}
inline ::cobalt::speech::proto::SpeechRecognitionResult* SpeechRecognitionEvent::mutable_result(int index) {
// @@protoc_insertion_point(field_mutable:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_.Mutable(index);
}
inline ::cobalt::speech::proto::SpeechRecognitionResult* SpeechRecognitionEvent::add_result() {
// @@protoc_insertion_point(field_add:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_.Add();
}
inline ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult >*
SpeechRecognitionEvent::mutable_result() {
// @@protoc_insertion_point(field_mutable_list:cobalt.speech.proto.SpeechRecognitionEvent.result)
return &result_;
}
inline const ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionResult >&
SpeechRecognitionEvent::result() const {
// @@protoc_insertion_point(field_list:cobalt.speech.proto.SpeechRecognitionEvent.result)
return result_;
}
// optional .cobalt.speech.proto.SpeechRecognitionEvent.EndpointerEventType endpoint = 4;
inline bool SpeechRecognitionEvent::has_endpoint() const {
return (_has_bits_[0] & 0x00000004u) != 0;
}
inline void SpeechRecognitionEvent::set_has_endpoint() {
_has_bits_[0] |= 0x00000004u;
}
inline void SpeechRecognitionEvent::clear_has_endpoint() {
_has_bits_[0] &= ~0x00000004u;
}
inline void SpeechRecognitionEvent::clear_endpoint() {
endpoint_ = 0;
clear_has_endpoint();
}
inline ::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType SpeechRecognitionEvent::endpoint() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionEvent.endpoint)
return static_cast< ::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType >(endpoint_);
}
inline void SpeechRecognitionEvent::set_endpoint(::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType value) {
assert(::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType_IsValid(value));
set_has_endpoint();
endpoint_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionEvent.endpoint)
}
// -------------------------------------------------------------------
// SpeechRecognitionResult
// repeated .cobalt.speech.proto.SpeechRecognitionAlternative alternative = 1;
inline int SpeechRecognitionResult::alternative_size() const {
return alternative_.size();
}
inline void SpeechRecognitionResult::clear_alternative() {
alternative_.Clear();
}
inline const ::cobalt::speech::proto::SpeechRecognitionAlternative& SpeechRecognitionResult::alternative(int index) const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_.Get(index);
}
inline ::cobalt::speech::proto::SpeechRecognitionAlternative* SpeechRecognitionResult::mutable_alternative(int index) {
// @@protoc_insertion_point(field_mutable:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_.Mutable(index);
}
inline ::cobalt::speech::proto::SpeechRecognitionAlternative* SpeechRecognitionResult::add_alternative() {
// @@protoc_insertion_point(field_add:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_.Add();
}
inline ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative >*
SpeechRecognitionResult::mutable_alternative() {
// @@protoc_insertion_point(field_mutable_list:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return &alternative_;
}
inline const ::google::protobuf::RepeatedPtrField< ::cobalt::speech::proto::SpeechRecognitionAlternative >&
SpeechRecognitionResult::alternative() const {
// @@protoc_insertion_point(field_list:cobalt.speech.proto.SpeechRecognitionResult.alternative)
return alternative_;
}
// optional bool final = 2 [default = false];
inline bool SpeechRecognitionResult::has_final() const {
return (_has_bits_[0] & 0x00000002u) != 0;
}
inline void SpeechRecognitionResult::set_has_final() {
_has_bits_[0] |= 0x00000002u;
}
inline void SpeechRecognitionResult::clear_has_final() {
_has_bits_[0] &= ~0x00000002u;
}
inline void SpeechRecognitionResult::clear_final() {
final_ = false;
clear_has_final();
}
inline bool SpeechRecognitionResult::final() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionResult.final)
return final_;
}
inline void SpeechRecognitionResult::set_final(bool value) {
set_has_final();
final_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionResult.final)
}
// optional float stability = 3;
inline bool SpeechRecognitionResult::has_stability() const {
return (_has_bits_[0] & 0x00000004u) != 0;
}
inline void SpeechRecognitionResult::set_has_stability() {
_has_bits_[0] |= 0x00000004u;
}
inline void SpeechRecognitionResult::clear_has_stability() {
_has_bits_[0] &= ~0x00000004u;
}
inline void SpeechRecognitionResult::clear_stability() {
stability_ = 0;
clear_has_stability();
}
inline float SpeechRecognitionResult::stability() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionResult.stability)
return stability_;
}
inline void SpeechRecognitionResult::set_stability(float value) {
set_has_stability();
stability_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionResult.stability)
}
// -------------------------------------------------------------------
// SpeechRecognitionAlternative
// optional string transcript = 1;
inline bool SpeechRecognitionAlternative::has_transcript() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
inline void SpeechRecognitionAlternative::set_has_transcript() {
_has_bits_[0] |= 0x00000001u;
}
inline void SpeechRecognitionAlternative::clear_has_transcript() {
_has_bits_[0] &= ~0x00000001u;
}
inline void SpeechRecognitionAlternative::clear_transcript() {
transcript_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
clear_has_transcript();
}
inline const ::std::string& SpeechRecognitionAlternative::transcript() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
return transcript_.GetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
inline void SpeechRecognitionAlternative::set_transcript(const ::std::string& value) {
set_has_transcript();
transcript_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
inline void SpeechRecognitionAlternative::set_transcript(const char* value) {
set_has_transcript();
transcript_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
inline void SpeechRecognitionAlternative::set_transcript(const char* value, size_t size) {
set_has_transcript();
transcript_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
inline ::std::string* SpeechRecognitionAlternative::mutable_transcript() {
set_has_transcript();
// @@protoc_insertion_point(field_mutable:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
return transcript_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
inline ::std::string* SpeechRecognitionAlternative::release_transcript() {
// @@protoc_insertion_point(field_release:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
clear_has_transcript();
return transcript_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
inline void SpeechRecognitionAlternative::set_allocated_transcript(::std::string* transcript) {
if (transcript != NULL) {
set_has_transcript();
} else {
clear_has_transcript();
}
transcript_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), transcript);
// @@protoc_insertion_point(field_set_allocated:cobalt.speech.proto.SpeechRecognitionAlternative.transcript)
}
// optional float confidence = 2;
inline bool SpeechRecognitionAlternative::has_confidence() const {
return (_has_bits_[0] & 0x00000002u) != 0;
}
inline void SpeechRecognitionAlternative::set_has_confidence() {
_has_bits_[0] |= 0x00000002u;
}
inline void SpeechRecognitionAlternative::clear_has_confidence() {
_has_bits_[0] &= ~0x00000002u;
}
inline void SpeechRecognitionAlternative::clear_confidence() {
confidence_ = 0;
clear_has_confidence();
}
inline float SpeechRecognitionAlternative::confidence() const {
// @@protoc_insertion_point(field_get:cobalt.speech.proto.SpeechRecognitionAlternative.confidence)
return confidence_;
}
inline void SpeechRecognitionAlternative::set_confidence(float value) {
set_has_confidence();
confidence_ = value;
// @@protoc_insertion_point(field_set:cobalt.speech.proto.SpeechRecognitionAlternative.confidence)
}
#endif // !PROTOBUF_INLINE_NOT_IN_HEADERS
// -------------------------------------------------------------------
// -------------------------------------------------------------------
// @@protoc_insertion_point(namespace_scope)
} // namespace proto
} // namespace speech
} // namespace cobalt
#ifndef SWIG
namespace google {
namespace protobuf {
template <> struct is_proto_enum< ::cobalt::speech::proto::SpeechRecognitionEvent_StatusCode> : ::google::protobuf::internal::true_type {};
template <> struct is_proto_enum< ::cobalt::speech::proto::SpeechRecognitionEvent_EndpointerEventType> : ::google::protobuf::internal::true_type {};
} // namespace protobuf
} // namespace google
#endif // SWIG
// @@protoc_insertion_point(global_scope)
#endif // PROTOBUF_google_5fstreaming_5fapi_2eproto__INCLUDED