blob: 6369fdb4bc3250510db4db9560f684bfcf7749d3 [file] [log] [blame]
// Copyright 2023 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v4.25.3
// source: google/ai/generativelanguage/v1beta2/text_service.proto
package generativelanguagepb
import (
context "context"
reflect "reflect"
sync "sync"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Request to generate a text completion response from the model.
type GenerateTextRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The model name to use with the format name=models/{model}.
Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"`
// Required. The free-form input text given to the model as a prompt.
//
// Given a prompt, the model will generate a TextCompletion response it
// predicts as the completion of the input text.
Prompt *TextPrompt `protobuf:"bytes,2,opt,name=prompt,proto3" json:"prompt,omitempty"`
// Controls the randomness of the output.
// Note: The default value varies by model, see the `Model.temperature`
// attribute of the `Model` returned the `getModel` function.
//
// Values can range from [0.0,1.0],
// inclusive. A value closer to 1.0 will produce responses that are more
// varied and creative, while a value closer to 0.0 will typically result in
// more straightforward responses from the model.
Temperature *float32 `protobuf:"fixed32,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"`
// Number of generated responses to return.
//
// This value must be between [1, 8], inclusive. If unset, this will default
// to 1.
CandidateCount *int32 `protobuf:"varint,4,opt,name=candidate_count,json=candidateCount,proto3,oneof" json:"candidate_count,omitempty"`
// The maximum number of tokens to include in a candidate.
//
// If unset, this will default to 64.
MaxOutputTokens *int32 `protobuf:"varint,5,opt,name=max_output_tokens,json=maxOutputTokens,proto3,oneof" json:"max_output_tokens,omitempty"`
// The maximum cumulative probability of tokens to consider when sampling.
//
// The model uses combined Top-k and nucleus sampling.
//
// Tokens are sorted based on their assigned probabilities so that only the
// most liekly tokens are considered. Top-k sampling directly limits the
// maximum number of tokens to consider, while Nucleus sampling limits number
// of tokens based on the cumulative probability.
//
// Note: The default value varies by model, see the `Model.top_p`
// attribute of the `Model` returned the `getModel` function.
TopP *float32 `protobuf:"fixed32,6,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"`
// The maximum number of tokens to consider when sampling.
//
// The model uses combined Top-k and nucleus sampling.
//
// Top-k sampling considers the set of `top_k` most probable tokens.
// Defaults to 40.
//
// Note: The default value varies by model, see the `Model.top_k`
// attribute of the `Model` returned the `getModel` function.
TopK *int32 `protobuf:"varint,7,opt,name=top_k,json=topK,proto3,oneof" json:"top_k,omitempty"`
// A list of unique `SafetySetting` instances for blocking unsafe content.
//
// that will be enforced on the `GenerateTextRequest.prompt` and
// `GenerateTextResponse.candidates`. There should not be more than one
// setting for each `SafetyCategory` type. The API will block any prompts and
// responses that fail to meet the thresholds set by these settings. This list
// overrides the default settings for each `SafetyCategory` specified in the
// safety_settings. If there is no `SafetySetting` for a given
// `SafetyCategory` provided in the list, the API will use the default safety
// setting for that category.
SafetySettings []*SafetySetting `protobuf:"bytes,8,rep,name=safety_settings,json=safetySettings,proto3" json:"safety_settings,omitempty"`
// The set of character sequences (up to 5) that will stop output generation.
// If specified, the API will stop at the first appearance of a stop
// sequence. The stop sequence will not be included as part of the response.
StopSequences []string `protobuf:"bytes,9,rep,name=stop_sequences,json=stopSequences,proto3" json:"stop_sequences,omitempty"`
}
func (x *GenerateTextRequest) Reset() {
*x = GenerateTextRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GenerateTextRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GenerateTextRequest) ProtoMessage() {}
func (x *GenerateTextRequest) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GenerateTextRequest.ProtoReflect.Descriptor instead.
func (*GenerateTextRequest) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{0}
}
func (x *GenerateTextRequest) GetModel() string {
if x != nil {
return x.Model
}
return ""
}
func (x *GenerateTextRequest) GetPrompt() *TextPrompt {
if x != nil {
return x.Prompt
}
return nil
}
func (x *GenerateTextRequest) GetTemperature() float32 {
if x != nil && x.Temperature != nil {
return *x.Temperature
}
return 0
}
func (x *GenerateTextRequest) GetCandidateCount() int32 {
if x != nil && x.CandidateCount != nil {
return *x.CandidateCount
}
return 0
}
func (x *GenerateTextRequest) GetMaxOutputTokens() int32 {
if x != nil && x.MaxOutputTokens != nil {
return *x.MaxOutputTokens
}
return 0
}
func (x *GenerateTextRequest) GetTopP() float32 {
if x != nil && x.TopP != nil {
return *x.TopP
}
return 0
}
func (x *GenerateTextRequest) GetTopK() int32 {
if x != nil && x.TopK != nil {
return *x.TopK
}
return 0
}
func (x *GenerateTextRequest) GetSafetySettings() []*SafetySetting {
if x != nil {
return x.SafetySettings
}
return nil
}
func (x *GenerateTextRequest) GetStopSequences() []string {
if x != nil {
return x.StopSequences
}
return nil
}
// The response from the model, including candidate completions.
type GenerateTextResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Candidate responses from the model.
Candidates []*TextCompletion `protobuf:"bytes,1,rep,name=candidates,proto3" json:"candidates,omitempty"`
// A set of content filtering metadata for the prompt and response
// text.
//
// This indicates which `SafetyCategory`(s) blocked a
// candidate from this response, the lowest `HarmProbability`
// that triggered a block, and the HarmThreshold setting for that category.
// This indicates the smallest change to the `SafetySettings` that would be
// necessary to unblock at least 1 response.
//
// The blocking is configured by the `SafetySettings` in the request (or the
// default `SafetySettings` of the API).
Filters []*ContentFilter `protobuf:"bytes,3,rep,name=filters,proto3" json:"filters,omitempty"`
// Returns any safety feedback related to content filtering.
SafetyFeedback []*SafetyFeedback `protobuf:"bytes,4,rep,name=safety_feedback,json=safetyFeedback,proto3" json:"safety_feedback,omitempty"`
}
func (x *GenerateTextResponse) Reset() {
*x = GenerateTextResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GenerateTextResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GenerateTextResponse) ProtoMessage() {}
func (x *GenerateTextResponse) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GenerateTextResponse.ProtoReflect.Descriptor instead.
func (*GenerateTextResponse) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{1}
}
func (x *GenerateTextResponse) GetCandidates() []*TextCompletion {
if x != nil {
return x.Candidates
}
return nil
}
func (x *GenerateTextResponse) GetFilters() []*ContentFilter {
if x != nil {
return x.Filters
}
return nil
}
func (x *GenerateTextResponse) GetSafetyFeedback() []*SafetyFeedback {
if x != nil {
return x.SafetyFeedback
}
return nil
}
// Text given to the model as a prompt.
//
// The Model will use this TextPrompt to Generate a text completion.
type TextPrompt struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The prompt text.
Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
}
func (x *TextPrompt) Reset() {
*x = TextPrompt{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *TextPrompt) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*TextPrompt) ProtoMessage() {}
func (x *TextPrompt) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use TextPrompt.ProtoReflect.Descriptor instead.
func (*TextPrompt) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{2}
}
func (x *TextPrompt) GetText() string {
if x != nil {
return x.Text
}
return ""
}
// Output text returned from a model.
type TextCompletion struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Output only. The generated text returned from the model.
Output string `protobuf:"bytes,1,opt,name=output,proto3" json:"output,omitempty"`
// Ratings for the safety of a response.
//
// There is at most one rating per category.
SafetyRatings []*SafetyRating `protobuf:"bytes,2,rep,name=safety_ratings,json=safetyRatings,proto3" json:"safety_ratings,omitempty"`
// Output only. Citation information for model-generated `output` in this
// `TextCompletion`.
//
// This field may be populated with attribution information for any text
// included in the `output`.
CitationMetadata *CitationMetadata `protobuf:"bytes,3,opt,name=citation_metadata,json=citationMetadata,proto3,oneof" json:"citation_metadata,omitempty"`
}
func (x *TextCompletion) Reset() {
*x = TextCompletion{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *TextCompletion) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*TextCompletion) ProtoMessage() {}
func (x *TextCompletion) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use TextCompletion.ProtoReflect.Descriptor instead.
func (*TextCompletion) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{3}
}
func (x *TextCompletion) GetOutput() string {
if x != nil {
return x.Output
}
return ""
}
func (x *TextCompletion) GetSafetyRatings() []*SafetyRating {
if x != nil {
return x.SafetyRatings
}
return nil
}
func (x *TextCompletion) GetCitationMetadata() *CitationMetadata {
if x != nil {
return x.CitationMetadata
}
return nil
}
// Request to get a text embedding from the model.
type EmbedTextRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The model name to use with the format model=models/{model}.
Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"`
// Required. The free-form input text that the model will turn into an
// embedding.
Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"`
}
func (x *EmbedTextRequest) Reset() {
*x = EmbedTextRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EmbedTextRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EmbedTextRequest) ProtoMessage() {}
func (x *EmbedTextRequest) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EmbedTextRequest.ProtoReflect.Descriptor instead.
func (*EmbedTextRequest) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{4}
}
func (x *EmbedTextRequest) GetModel() string {
if x != nil {
return x.Model
}
return ""
}
func (x *EmbedTextRequest) GetText() string {
if x != nil {
return x.Text
}
return ""
}
// The response to a EmbedTextRequest.
type EmbedTextResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Output only. The embedding generated from the input text.
Embedding *Embedding `protobuf:"bytes,1,opt,name=embedding,proto3,oneof" json:"embedding,omitempty"`
}
func (x *EmbedTextResponse) Reset() {
*x = EmbedTextResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EmbedTextResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EmbedTextResponse) ProtoMessage() {}
func (x *EmbedTextResponse) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EmbedTextResponse.ProtoReflect.Descriptor instead.
func (*EmbedTextResponse) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{5}
}
func (x *EmbedTextResponse) GetEmbedding() *Embedding {
if x != nil {
return x.Embedding
}
return nil
}
// A list of floats representing the embedding.
type Embedding struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The embedding values.
Value []float32 `protobuf:"fixed32,1,rep,packed,name=value,proto3" json:"value,omitempty"`
}
func (x *Embedding) Reset() {
*x = Embedding{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Embedding) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Embedding) ProtoMessage() {}
func (x *Embedding) ProtoReflect() protoreflect.Message {
mi := &file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Embedding.ProtoReflect.Descriptor instead.
func (*Embedding) Descriptor() ([]byte, []int) {
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP(), []int{6}
}
func (x *Embedding) GetValue() []float32 {
if x != nil {
return x.Value
}
return nil
}
var File_google_ai_generativelanguage_v1beta2_text_service_proto protoreflect.FileDescriptor
var file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDesc = []byte{
0x0a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65,
0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76,
0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2f, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x24, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c,
0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x1a,
0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72,
0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, 0x31,
0x62, 0x65, 0x74, 0x61, 0x32, 0x2f, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x31, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f,
0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61,
0x67, 0x65, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2f, 0x73, 0x61, 0x66, 0x65, 0x74,
0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70,
0x69, 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64,
0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb8, 0x04, 0x0a, 0x13, 0x47,
0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, 0x65, 0x6e, 0x65, 0x72,
0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4d, 0x6f, 0x64,
0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x4d, 0x0a, 0x06, 0x70, 0x72, 0x6f,
0x6d, 0x70, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65,
0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32,
0x2e, 0x54, 0x65, 0x78, 0x74, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02,
0x52, 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x12, 0x25, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70,
0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x48, 0x00, 0x52,
0x0b, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x88, 0x01, 0x01, 0x12,
0x2c, 0x0a, 0x0f, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75,
0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x0e, 0x63, 0x61, 0x6e, 0x64,
0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a,
0x11, 0x6d, 0x61, 0x78, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65,
0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x02, 0x52, 0x0f, 0x6d, 0x61, 0x78, 0x4f,
0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x18,
0x0a, 0x05, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x48, 0x03, 0x52,
0x04, 0x74, 0x6f, 0x70, 0x50, 0x88, 0x01, 0x01, 0x12, 0x18, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x5f,
0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x48, 0x04, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x88,
0x01, 0x01, 0x12, 0x5c, 0x0a, 0x0f, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x5f, 0x73, 0x65, 0x74,
0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69,
0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74,
0x61, 0x32, 0x2e, 0x53, 0x61, 0x66, 0x65, 0x74, 0x79, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67,
0x52, 0x0e, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73,
0x12, 0x25, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63,
0x65, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x74, 0x6f, 0x70, 0x53, 0x65,
0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x74, 0x65, 0x6d, 0x70,
0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x63, 0x61, 0x6e, 0x64,
0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x14, 0x0a, 0x12, 0x5f,
0x6d, 0x61, 0x78, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e,
0x73, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x42, 0x08, 0x0a, 0x06, 0x5f,
0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x9a, 0x02, 0x0a, 0x14, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61,
0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54,
0x0a, 0x0a, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67,
0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67,
0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x54, 0x65, 0x78, 0x74, 0x43, 0x6f,
0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64,
0x61, 0x74, 0x65, 0x73, 0x12, 0x4d, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x18,
0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67,
0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x43, 0x6f, 0x6e,
0x74, 0x65, 0x6e, 0x74, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x73, 0x12, 0x5d, 0x0a, 0x0f, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x5f, 0x66, 0x65,
0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74,
0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65,
0x74, 0x61, 0x32, 0x2e, 0x53, 0x61, 0x66, 0x65, 0x74, 0x79, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61,
0x63, 0x6b, 0x52, 0x0e, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61,
0x63, 0x6b, 0x22, 0x25, 0x0a, 0x0a, 0x54, 0x65, 0x78, 0x74, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74,
0x12, 0x17, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03,
0xe0, 0x41, 0x02, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x22, 0x8d, 0x02, 0x0a, 0x0e, 0x54, 0x65,
0x78, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x06,
0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41,
0x03, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x59, 0x0a, 0x0e, 0x73, 0x61, 0x66,
0x65, 0x74, 0x79, 0x5f, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x32, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65,
0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65,
0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x53, 0x61, 0x66, 0x65, 0x74, 0x79, 0x52,
0x61, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x0d, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x52, 0x61, 0x74,
0x69, 0x6e, 0x67, 0x73, 0x12, 0x6d, 0x0a, 0x11, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x36, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65,
0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76,
0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x43, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x48, 0x00, 0x52, 0x10,
0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x72, 0x0a, 0x10, 0x45, 0x6d, 0x62,
0x65, 0x64, 0x54, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x45, 0x0a,
0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x2f, 0xe0, 0x41,
0x02, 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65,
0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x05, 0x6d,
0x6f, 0x64, 0x65, 0x6c, 0x12, 0x17, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x22, 0x7a, 0x0a,
0x11, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x54, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x12, 0x57, 0x0a, 0x09, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x18,
0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67,
0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x45, 0x6d, 0x62,
0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x48, 0x00, 0x52, 0x09, 0x65,
0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f,
0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x21, 0x0a, 0x09, 0x45, 0x6d, 0x62,
0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18,
0x01, 0x20, 0x03, 0x28, 0x02, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0xf3, 0x03, 0x0a,
0x0b, 0x54, 0x65, 0x78, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x81, 0x02, 0x0a,
0x0c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x12, 0x39, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61,
0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62,
0x65, 0x74, 0x61, 0x32, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78,
0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x3a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c,
0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e,
0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x7a, 0xda, 0x41, 0x46, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x70,
0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72,
0x65, 0x2c, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e,
0x74, 0x2c, 0x6d, 0x61, 0x78, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b,
0x65, 0x6e, 0x73, 0x2c, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x2c, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x82,
0xd3, 0xe4, 0x93, 0x02, 0x2b, 0x3a, 0x01, 0x2a, 0x22, 0x26, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,
0x61, 0x32, 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73,
0x2f, 0x2a, 0x7d, 0x3a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74,
0x12, 0xb9, 0x01, 0x0a, 0x09, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x54, 0x65, 0x78, 0x74, 0x12, 0x36,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72,
0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31,
0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x54, 0x65, 0x78, 0x74, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x37, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e,
0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2e, 0x45, 0x6d,
0x62, 0x65, 0x64, 0x54, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
0x3b, 0xda, 0x41, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x74, 0x65, 0x78, 0x74, 0x82, 0xd3,
0xe4, 0x93, 0x02, 0x28, 0x3a, 0x01, 0x2a, 0x22, 0x23, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x32, 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, 0x2f,
0x2a, 0x7d, 0x3a, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x54, 0x65, 0x78, 0x74, 0x1a, 0x24, 0xca, 0x41,
0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75,
0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63,
0x6f, 0x6d, 0x42, 0x9e, 0x01, 0x0a, 0x28, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c,
0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x42,
0x10, 0x54, 0x65, 0x78, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74,
0x6f, 0x50, 0x01, 0x5a, 0x5e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65,
0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x61,
0x70, 0x69, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x32, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61,
0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x70, 0x62, 0x3b, 0x67,
0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67,
0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescOnce sync.Once
file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescData = file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDesc
)
func file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescGZIP() []byte {
file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescOnce.Do(func() {
file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescData)
})
return file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDescData
}
var file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
var file_google_ai_generativelanguage_v1beta2_text_service_proto_goTypes = []interface{}{
(*GenerateTextRequest)(nil), // 0: google.ai.generativelanguage.v1beta2.GenerateTextRequest
(*GenerateTextResponse)(nil), // 1: google.ai.generativelanguage.v1beta2.GenerateTextResponse
(*TextPrompt)(nil), // 2: google.ai.generativelanguage.v1beta2.TextPrompt
(*TextCompletion)(nil), // 3: google.ai.generativelanguage.v1beta2.TextCompletion
(*EmbedTextRequest)(nil), // 4: google.ai.generativelanguage.v1beta2.EmbedTextRequest
(*EmbedTextResponse)(nil), // 5: google.ai.generativelanguage.v1beta2.EmbedTextResponse
(*Embedding)(nil), // 6: google.ai.generativelanguage.v1beta2.Embedding
(*SafetySetting)(nil), // 7: google.ai.generativelanguage.v1beta2.SafetySetting
(*ContentFilter)(nil), // 8: google.ai.generativelanguage.v1beta2.ContentFilter
(*SafetyFeedback)(nil), // 9: google.ai.generativelanguage.v1beta2.SafetyFeedback
(*SafetyRating)(nil), // 10: google.ai.generativelanguage.v1beta2.SafetyRating
(*CitationMetadata)(nil), // 11: google.ai.generativelanguage.v1beta2.CitationMetadata
}
var file_google_ai_generativelanguage_v1beta2_text_service_proto_depIdxs = []int32{
2, // 0: google.ai.generativelanguage.v1beta2.GenerateTextRequest.prompt:type_name -> google.ai.generativelanguage.v1beta2.TextPrompt
7, // 1: google.ai.generativelanguage.v1beta2.GenerateTextRequest.safety_settings:type_name -> google.ai.generativelanguage.v1beta2.SafetySetting
3, // 2: google.ai.generativelanguage.v1beta2.GenerateTextResponse.candidates:type_name -> google.ai.generativelanguage.v1beta2.TextCompletion
8, // 3: google.ai.generativelanguage.v1beta2.GenerateTextResponse.filters:type_name -> google.ai.generativelanguage.v1beta2.ContentFilter
9, // 4: google.ai.generativelanguage.v1beta2.GenerateTextResponse.safety_feedback:type_name -> google.ai.generativelanguage.v1beta2.SafetyFeedback
10, // 5: google.ai.generativelanguage.v1beta2.TextCompletion.safety_ratings:type_name -> google.ai.generativelanguage.v1beta2.SafetyRating
11, // 6: google.ai.generativelanguage.v1beta2.TextCompletion.citation_metadata:type_name -> google.ai.generativelanguage.v1beta2.CitationMetadata
6, // 7: google.ai.generativelanguage.v1beta2.EmbedTextResponse.embedding:type_name -> google.ai.generativelanguage.v1beta2.Embedding
0, // 8: google.ai.generativelanguage.v1beta2.TextService.GenerateText:input_type -> google.ai.generativelanguage.v1beta2.GenerateTextRequest
4, // 9: google.ai.generativelanguage.v1beta2.TextService.EmbedText:input_type -> google.ai.generativelanguage.v1beta2.EmbedTextRequest
1, // 10: google.ai.generativelanguage.v1beta2.TextService.GenerateText:output_type -> google.ai.generativelanguage.v1beta2.GenerateTextResponse
5, // 11: google.ai.generativelanguage.v1beta2.TextService.EmbedText:output_type -> google.ai.generativelanguage.v1beta2.EmbedTextResponse
10, // [10:12] is the sub-list for method output_type
8, // [8:10] is the sub-list for method input_type
8, // [8:8] is the sub-list for extension type_name
8, // [8:8] is the sub-list for extension extendee
0, // [0:8] is the sub-list for field type_name
}
func init() { file_google_ai_generativelanguage_v1beta2_text_service_proto_init() }
func file_google_ai_generativelanguage_v1beta2_text_service_proto_init() {
if File_google_ai_generativelanguage_v1beta2_text_service_proto != nil {
return
}
file_google_ai_generativelanguage_v1beta2_citation_proto_init()
file_google_ai_generativelanguage_v1beta2_safety_proto_init()
if !protoimpl.UnsafeEnabled {
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GenerateTextRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GenerateTextResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*TextPrompt); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*TextCompletion); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EmbedTextRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EmbedTextResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Embedding); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[0].OneofWrappers = []interface{}{}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[3].OneofWrappers = []interface{}{}
file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes[5].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDesc,
NumEnums: 0,
NumMessages: 7,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_google_ai_generativelanguage_v1beta2_text_service_proto_goTypes,
DependencyIndexes: file_google_ai_generativelanguage_v1beta2_text_service_proto_depIdxs,
MessageInfos: file_google_ai_generativelanguage_v1beta2_text_service_proto_msgTypes,
}.Build()
File_google_ai_generativelanguage_v1beta2_text_service_proto = out.File
file_google_ai_generativelanguage_v1beta2_text_service_proto_rawDesc = nil
file_google_ai_generativelanguage_v1beta2_text_service_proto_goTypes = nil
file_google_ai_generativelanguage_v1beta2_text_service_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// TextServiceClient is the client API for TextService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type TextServiceClient interface {
// Generates a response from the model given an input message.
GenerateText(ctx context.Context, in *GenerateTextRequest, opts ...grpc.CallOption) (*GenerateTextResponse, error)
// Generates an embedding from the model given an input message.
EmbedText(ctx context.Context, in *EmbedTextRequest, opts ...grpc.CallOption) (*EmbedTextResponse, error)
}
type textServiceClient struct {
cc grpc.ClientConnInterface
}
func NewTextServiceClient(cc grpc.ClientConnInterface) TextServiceClient {
return &textServiceClient{cc}
}
func (c *textServiceClient) GenerateText(ctx context.Context, in *GenerateTextRequest, opts ...grpc.CallOption) (*GenerateTextResponse, error) {
out := new(GenerateTextResponse)
err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1beta2.TextService/GenerateText", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *textServiceClient) EmbedText(ctx context.Context, in *EmbedTextRequest, opts ...grpc.CallOption) (*EmbedTextResponse, error) {
out := new(EmbedTextResponse)
err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1beta2.TextService/EmbedText", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// TextServiceServer is the server API for TextService service.
type TextServiceServer interface {
// Generates a response from the model given an input message.
GenerateText(context.Context, *GenerateTextRequest) (*GenerateTextResponse, error)
// Generates an embedding from the model given an input message.
EmbedText(context.Context, *EmbedTextRequest) (*EmbedTextResponse, error)
}
// UnimplementedTextServiceServer can be embedded to have forward compatible implementations.
type UnimplementedTextServiceServer struct {
}
func (*UnimplementedTextServiceServer) GenerateText(context.Context, *GenerateTextRequest) (*GenerateTextResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GenerateText not implemented")
}
func (*UnimplementedTextServiceServer) EmbedText(context.Context, *EmbedTextRequest) (*EmbedTextResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method EmbedText not implemented")
}
func RegisterTextServiceServer(s *grpc.Server, srv TextServiceServer) {
s.RegisterService(&_TextService_serviceDesc, srv)
}
func _TextService_GenerateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GenerateTextRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(TextServiceServer).GenerateText(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ai.generativelanguage.v1beta2.TextService/GenerateText",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(TextServiceServer).GenerateText(ctx, req.(*GenerateTextRequest))
}
return interceptor(ctx, in, info, handler)
}
func _TextService_EmbedText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(EmbedTextRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(TextServiceServer).EmbedText(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ai.generativelanguage.v1beta2.TextService/EmbedText",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(TextServiceServer).EmbedText(ctx, req.(*EmbedTextRequest))
}
return interceptor(ctx, in, info, handler)
}
var _TextService_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.ai.generativelanguage.v1beta2.TextService",
HandlerType: (*TextServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GenerateText",
Handler: _TextService_GenerateText_Handler,
},
{
MethodName: "EmbedText",
Handler: _TextService_EmbedText_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "google/ai/generativelanguage/v1beta2/text_service.proto",
}