| // Copyright 2023 Google LLC |
| // |
| // Licensed under the Apache License, Version 2.0 (the "License"); |
| // you may not use this file except in compliance with the License. |
| // You may obtain a copy of the License at |
| // |
| // http://www.apache.org/licenses/LICENSE-2.0 |
| // |
| // Unless required by applicable law or agreed to in writing, software |
| // distributed under the License is distributed on an "AS IS" BASIS, |
| // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| // See the License for the specific language governing permissions and |
| // limitations under the License. |
| |
| // Code generated by protoc-gen-go. DO NOT EDIT. |
| // versions: |
| // protoc-gen-go v1.33.0 |
| // protoc v4.25.3 |
| // source: google/ai/generativelanguage/v1/generative_service.proto |
| |
| package generativelanguagepb |
| |
| import ( |
| context "context" |
| reflect "reflect" |
| sync "sync" |
| |
| _ "google.golang.org/genproto/googleapis/api/annotations" |
| grpc "google.golang.org/grpc" |
| codes "google.golang.org/grpc/codes" |
| status "google.golang.org/grpc/status" |
| protoreflect "google.golang.org/protobuf/reflect/protoreflect" |
| protoimpl "google.golang.org/protobuf/runtime/protoimpl" |
| ) |
| |
| const ( |
| // Verify that this generated code is sufficiently up-to-date. |
| _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) |
| // Verify that runtime/protoimpl is sufficiently up-to-date. |
| _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) |
| ) |
| |
| // Type of task for which the embedding will be used. |
| type TaskType int32 |
| |
| const ( |
| // Unset value, which will default to one of the other enum values. |
| TaskType_TASK_TYPE_UNSPECIFIED TaskType = 0 |
| // Specifies the given text is a query in a search/retrieval setting. |
| TaskType_RETRIEVAL_QUERY TaskType = 1 |
| // Specifies the given text is a document from the corpus being searched. |
| TaskType_RETRIEVAL_DOCUMENT TaskType = 2 |
| // Specifies the given text will be used for STS. |
| TaskType_SEMANTIC_SIMILARITY TaskType = 3 |
| // Specifies that the given text will be classified. |
| TaskType_CLASSIFICATION TaskType = 4 |
| // Specifies that the embeddings will be used for clustering. |
| TaskType_CLUSTERING TaskType = 5 |
| // Specifies that the given text will be used for question answering. |
| TaskType_QUESTION_ANSWERING TaskType = 6 |
| // Specifies that the given text will be used for fact verification. |
| TaskType_FACT_VERIFICATION TaskType = 7 |
| ) |
| |
| // Enum value maps for TaskType. |
| var ( |
| TaskType_name = map[int32]string{ |
| 0: "TASK_TYPE_UNSPECIFIED", |
| 1: "RETRIEVAL_QUERY", |
| 2: "RETRIEVAL_DOCUMENT", |
| 3: "SEMANTIC_SIMILARITY", |
| 4: "CLASSIFICATION", |
| 5: "CLUSTERING", |
| 6: "QUESTION_ANSWERING", |
| 7: "FACT_VERIFICATION", |
| } |
| TaskType_value = map[string]int32{ |
| "TASK_TYPE_UNSPECIFIED": 0, |
| "RETRIEVAL_QUERY": 1, |
| "RETRIEVAL_DOCUMENT": 2, |
| "SEMANTIC_SIMILARITY": 3, |
| "CLASSIFICATION": 4, |
| "CLUSTERING": 5, |
| "QUESTION_ANSWERING": 6, |
| "FACT_VERIFICATION": 7, |
| } |
| ) |
| |
| func (x TaskType) Enum() *TaskType { |
| p := new(TaskType) |
| *p = x |
| return p |
| } |
| |
| func (x TaskType) String() string { |
| return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) |
| } |
| |
| func (TaskType) Descriptor() protoreflect.EnumDescriptor { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes[0].Descriptor() |
| } |
| |
| func (TaskType) Type() protoreflect.EnumType { |
| return &file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes[0] |
| } |
| |
| func (x TaskType) Number() protoreflect.EnumNumber { |
| return protoreflect.EnumNumber(x) |
| } |
| |
| // Deprecated: Use TaskType.Descriptor instead. |
| func (TaskType) EnumDescriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{0} |
| } |
| |
| // Specifies what was the reason why prompt was blocked. |
| type GenerateContentResponse_PromptFeedback_BlockReason int32 |
| |
| const ( |
| // Default value. This value is unused. |
| GenerateContentResponse_PromptFeedback_BLOCK_REASON_UNSPECIFIED GenerateContentResponse_PromptFeedback_BlockReason = 0 |
| // Prompt was blocked due to safety reasons. You can inspect |
| // `safety_ratings` to understand which safety category blocked it. |
| GenerateContentResponse_PromptFeedback_SAFETY GenerateContentResponse_PromptFeedback_BlockReason = 1 |
| // Prompt was blocked due to unknown reaasons. |
| GenerateContentResponse_PromptFeedback_OTHER GenerateContentResponse_PromptFeedback_BlockReason = 2 |
| ) |
| |
| // Enum value maps for GenerateContentResponse_PromptFeedback_BlockReason. |
| var ( |
| GenerateContentResponse_PromptFeedback_BlockReason_name = map[int32]string{ |
| 0: "BLOCK_REASON_UNSPECIFIED", |
| 1: "SAFETY", |
| 2: "OTHER", |
| } |
| GenerateContentResponse_PromptFeedback_BlockReason_value = map[string]int32{ |
| "BLOCK_REASON_UNSPECIFIED": 0, |
| "SAFETY": 1, |
| "OTHER": 2, |
| } |
| ) |
| |
| func (x GenerateContentResponse_PromptFeedback_BlockReason) Enum() *GenerateContentResponse_PromptFeedback_BlockReason { |
| p := new(GenerateContentResponse_PromptFeedback_BlockReason) |
| *p = x |
| return p |
| } |
| |
| func (x GenerateContentResponse_PromptFeedback_BlockReason) String() string { |
| return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) |
| } |
| |
| func (GenerateContentResponse_PromptFeedback_BlockReason) Descriptor() protoreflect.EnumDescriptor { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes[1].Descriptor() |
| } |
| |
| func (GenerateContentResponse_PromptFeedback_BlockReason) Type() protoreflect.EnumType { |
| return &file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes[1] |
| } |
| |
| func (x GenerateContentResponse_PromptFeedback_BlockReason) Number() protoreflect.EnumNumber { |
| return protoreflect.EnumNumber(x) |
| } |
| |
| // Deprecated: Use GenerateContentResponse_PromptFeedback_BlockReason.Descriptor instead. |
| func (GenerateContentResponse_PromptFeedback_BlockReason) EnumDescriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{2, 0, 0} |
| } |
| |
| // Defines the reason why the model stopped generating tokens. |
| type Candidate_FinishReason int32 |
| |
| const ( |
| // Default value. This value is unused. |
| Candidate_FINISH_REASON_UNSPECIFIED Candidate_FinishReason = 0 |
| // Natural stop point of the model or provided stop sequence. |
| Candidate_STOP Candidate_FinishReason = 1 |
| // The maximum number of tokens as specified in the request was reached. |
| Candidate_MAX_TOKENS Candidate_FinishReason = 2 |
| // The candidate content was flagged for safety reasons. |
| Candidate_SAFETY Candidate_FinishReason = 3 |
| // The candidate content was flagged for recitation reasons. |
| Candidate_RECITATION Candidate_FinishReason = 4 |
| // Unknown reason. |
| Candidate_OTHER Candidate_FinishReason = 5 |
| ) |
| |
| // Enum value maps for Candidate_FinishReason. |
| var ( |
| Candidate_FinishReason_name = map[int32]string{ |
| 0: "FINISH_REASON_UNSPECIFIED", |
| 1: "STOP", |
| 2: "MAX_TOKENS", |
| 3: "SAFETY", |
| 4: "RECITATION", |
| 5: "OTHER", |
| } |
| Candidate_FinishReason_value = map[string]int32{ |
| "FINISH_REASON_UNSPECIFIED": 0, |
| "STOP": 1, |
| "MAX_TOKENS": 2, |
| "SAFETY": 3, |
| "RECITATION": 4, |
| "OTHER": 5, |
| } |
| ) |
| |
| func (x Candidate_FinishReason) Enum() *Candidate_FinishReason { |
| p := new(Candidate_FinishReason) |
| *p = x |
| return p |
| } |
| |
| func (x Candidate_FinishReason) String() string { |
| return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) |
| } |
| |
| func (Candidate_FinishReason) Descriptor() protoreflect.EnumDescriptor { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes[2].Descriptor() |
| } |
| |
| func (Candidate_FinishReason) Type() protoreflect.EnumType { |
| return &file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes[2] |
| } |
| |
| func (x Candidate_FinishReason) Number() protoreflect.EnumNumber { |
| return protoreflect.EnumNumber(x) |
| } |
| |
| // Deprecated: Use Candidate_FinishReason.Descriptor instead. |
| func (Candidate_FinishReason) EnumDescriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{3, 0} |
| } |
| |
| // Request to generate a completion from the model. |
| type GenerateContentRequest struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. The name of the `Model` to use for generating the completion. |
| // |
| // Format: `name=models/{model}`. |
| Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` |
| // Required. The content of the current conversation with the model. |
| // |
| // For single-turn queries, this is a single instance. For multi-turn queries, |
| // this is a repeated field that contains conversation history + latest |
| // request. |
| Contents []*Content `protobuf:"bytes,2,rep,name=contents,proto3" json:"contents,omitempty"` |
| // Optional. A list of unique `SafetySetting` instances for blocking unsafe |
| // content. |
| // |
| // This will be enforced on the `GenerateContentRequest.contents` and |
| // `GenerateContentResponse.candidates`. There should not be more than one |
| // setting for each `SafetyCategory` type. The API will block any contents and |
| // responses that fail to meet the thresholds set by these settings. This list |
| // overrides the default settings for each `SafetyCategory` specified in the |
| // safety_settings. If there is no `SafetySetting` for a given |
| // `SafetyCategory` provided in the list, the API will use the default safety |
| // setting for that category. Harm categories HARM_CATEGORY_HATE_SPEECH, |
| // HARM_CATEGORY_SEXUALLY_EXPLICIT, HARM_CATEGORY_DANGEROUS_CONTENT, |
| // HARM_CATEGORY_HARASSMENT are supported. |
| SafetySettings []*SafetySetting `protobuf:"bytes,3,rep,name=safety_settings,json=safetySettings,proto3" json:"safety_settings,omitempty"` |
| // Optional. Configuration options for model generation and outputs. |
| GenerationConfig *GenerationConfig `protobuf:"bytes,4,opt,name=generation_config,json=generationConfig,proto3,oneof" json:"generation_config,omitempty"` |
| } |
| |
| func (x *GenerateContentRequest) Reset() { |
| *x = GenerateContentRequest{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[0] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *GenerateContentRequest) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*GenerateContentRequest) ProtoMessage() {} |
| |
| func (x *GenerateContentRequest) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[0] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use GenerateContentRequest.ProtoReflect.Descriptor instead. |
| func (*GenerateContentRequest) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{0} |
| } |
| |
| func (x *GenerateContentRequest) GetModel() string { |
| if x != nil { |
| return x.Model |
| } |
| return "" |
| } |
| |
| func (x *GenerateContentRequest) GetContents() []*Content { |
| if x != nil { |
| return x.Contents |
| } |
| return nil |
| } |
| |
| func (x *GenerateContentRequest) GetSafetySettings() []*SafetySetting { |
| if x != nil { |
| return x.SafetySettings |
| } |
| return nil |
| } |
| |
| func (x *GenerateContentRequest) GetGenerationConfig() *GenerationConfig { |
| if x != nil { |
| return x.GenerationConfig |
| } |
| return nil |
| } |
| |
| // Configuration options for model generation and outputs. Not all parameters |
| // may be configurable for every model. |
| type GenerationConfig struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Optional. Number of generated responses to return. |
| // |
| // Currently, this value can only be set to 1. If unset, this will default |
| // to 1. |
| CandidateCount *int32 `protobuf:"varint,1,opt,name=candidate_count,json=candidateCount,proto3,oneof" json:"candidate_count,omitempty"` |
| // Optional. The set of character sequences (up to 5) that will stop output |
| // generation. If specified, the API will stop at the first appearance of a |
| // stop sequence. The stop sequence will not be included as part of the |
| // response. |
| StopSequences []string `protobuf:"bytes,2,rep,name=stop_sequences,json=stopSequences,proto3" json:"stop_sequences,omitempty"` |
| // Optional. The maximum number of tokens to include in a candidate. |
| // |
| // Note: The default value varies by model, see the `Model.output_token_limit` |
| // attribute of the `Model` returned from the `getModel` function. |
| MaxOutputTokens *int32 `protobuf:"varint,4,opt,name=max_output_tokens,json=maxOutputTokens,proto3,oneof" json:"max_output_tokens,omitempty"` |
| // Optional. Controls the randomness of the output. |
| // |
| // Note: The default value varies by model, see the `Model.temperature` |
| // attribute of the `Model` returned from the `getModel` function. |
| // |
| // Values can range from [0.0, 2.0]. |
| Temperature *float32 `protobuf:"fixed32,5,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` |
| // Optional. The maximum cumulative probability of tokens to consider when |
| // sampling. |
| // |
| // The model uses combined Top-k and nucleus sampling. |
| // |
| // Tokens are sorted based on their assigned probabilities so that only the |
| // most likely tokens are considered. Top-k sampling directly limits the |
| // maximum number of tokens to consider, while Nucleus sampling limits number |
| // of tokens based on the cumulative probability. |
| // |
| // Note: The default value varies by model, see the `Model.top_p` |
| // attribute of the `Model` returned from the `getModel` function. |
| TopP *float32 `protobuf:"fixed32,6,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` |
| // Optional. The maximum number of tokens to consider when sampling. |
| // |
| // Models use nucleus sampling or combined Top-k and nucleus sampling. |
| // Top-k sampling considers the set of `top_k` most probable tokens. |
| // Models running with nucleus sampling don't allow top_k setting. |
| // |
| // Note: The default value varies by model, see the `Model.top_k` |
| // attribute of the `Model` returned from the `getModel` function. Empty |
| // `top_k` field in `Model` indicates the model doesn't apply top-k sampling |
| // and doesn't allow setting `top_k` on requests. |
| TopK *int32 `protobuf:"varint,7,opt,name=top_k,json=topK,proto3,oneof" json:"top_k,omitempty"` |
| } |
| |
| func (x *GenerationConfig) Reset() { |
| *x = GenerationConfig{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[1] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *GenerationConfig) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*GenerationConfig) ProtoMessage() {} |
| |
| func (x *GenerationConfig) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[1] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use GenerationConfig.ProtoReflect.Descriptor instead. |
| func (*GenerationConfig) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{1} |
| } |
| |
| func (x *GenerationConfig) GetCandidateCount() int32 { |
| if x != nil && x.CandidateCount != nil { |
| return *x.CandidateCount |
| } |
| return 0 |
| } |
| |
| func (x *GenerationConfig) GetStopSequences() []string { |
| if x != nil { |
| return x.StopSequences |
| } |
| return nil |
| } |
| |
| func (x *GenerationConfig) GetMaxOutputTokens() int32 { |
| if x != nil && x.MaxOutputTokens != nil { |
| return *x.MaxOutputTokens |
| } |
| return 0 |
| } |
| |
| func (x *GenerationConfig) GetTemperature() float32 { |
| if x != nil && x.Temperature != nil { |
| return *x.Temperature |
| } |
| return 0 |
| } |
| |
| func (x *GenerationConfig) GetTopP() float32 { |
| if x != nil && x.TopP != nil { |
| return *x.TopP |
| } |
| return 0 |
| } |
| |
| func (x *GenerationConfig) GetTopK() int32 { |
| if x != nil && x.TopK != nil { |
| return *x.TopK |
| } |
| return 0 |
| } |
| |
| // Response from the model supporting multiple candidates. |
| // |
| // Note on safety ratings and content filtering. They are reported for both |
| // prompt in `GenerateContentResponse.prompt_feedback` and for each candidate |
| // in `finish_reason` and in `safety_ratings`. The API contract is that: |
| // - either all requested candidates are returned or no candidates at all |
| // - no candidates are returned only if there was something wrong with the |
| // prompt (see `prompt_feedback`) |
| // - feedback on each candidate is reported on `finish_reason` and |
| // `safety_ratings`. |
| type GenerateContentResponse struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Candidate responses from the model. |
| Candidates []*Candidate `protobuf:"bytes,1,rep,name=candidates,proto3" json:"candidates,omitempty"` |
| // Returns the prompt's feedback related to the content filters. |
| PromptFeedback *GenerateContentResponse_PromptFeedback `protobuf:"bytes,2,opt,name=prompt_feedback,json=promptFeedback,proto3" json:"prompt_feedback,omitempty"` |
| } |
| |
| func (x *GenerateContentResponse) Reset() { |
| *x = GenerateContentResponse{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[2] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *GenerateContentResponse) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*GenerateContentResponse) ProtoMessage() {} |
| |
| func (x *GenerateContentResponse) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[2] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use GenerateContentResponse.ProtoReflect.Descriptor instead. |
| func (*GenerateContentResponse) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{2} |
| } |
| |
| func (x *GenerateContentResponse) GetCandidates() []*Candidate { |
| if x != nil { |
| return x.Candidates |
| } |
| return nil |
| } |
| |
| func (x *GenerateContentResponse) GetPromptFeedback() *GenerateContentResponse_PromptFeedback { |
| if x != nil { |
| return x.PromptFeedback |
| } |
| return nil |
| } |
| |
| // A response candidate generated from the model. |
| type Candidate struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Output only. Index of the candidate in the list of candidates. |
| Index *int32 `protobuf:"varint,3,opt,name=index,proto3,oneof" json:"index,omitempty"` |
| // Output only. Generated content returned from the model. |
| Content *Content `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` |
| // Optional. Output only. The reason why the model stopped generating tokens. |
| // |
| // If empty, the model has not stopped generating the tokens. |
| FinishReason Candidate_FinishReason `protobuf:"varint,2,opt,name=finish_reason,json=finishReason,proto3,enum=google.ai.generativelanguage.v1.Candidate_FinishReason" json:"finish_reason,omitempty"` |
| // List of ratings for the safety of a response candidate. |
| // |
| // There is at most one rating per category. |
| SafetyRatings []*SafetyRating `protobuf:"bytes,5,rep,name=safety_ratings,json=safetyRatings,proto3" json:"safety_ratings,omitempty"` |
| // Output only. Citation information for model-generated candidate. |
| // |
| // This field may be populated with recitation information for any text |
| // included in the `content`. These are passages that are "recited" from |
| // copyrighted material in the foundational LLM's training data. |
| CitationMetadata *CitationMetadata `protobuf:"bytes,6,opt,name=citation_metadata,json=citationMetadata,proto3" json:"citation_metadata,omitempty"` |
| // Output only. Token count for this candidate. |
| TokenCount int32 `protobuf:"varint,7,opt,name=token_count,json=tokenCount,proto3" json:"token_count,omitempty"` |
| } |
| |
| func (x *Candidate) Reset() { |
| *x = Candidate{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[3] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *Candidate) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*Candidate) ProtoMessage() {} |
| |
| func (x *Candidate) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[3] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use Candidate.ProtoReflect.Descriptor instead. |
| func (*Candidate) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{3} |
| } |
| |
| func (x *Candidate) GetIndex() int32 { |
| if x != nil && x.Index != nil { |
| return *x.Index |
| } |
| return 0 |
| } |
| |
| func (x *Candidate) GetContent() *Content { |
| if x != nil { |
| return x.Content |
| } |
| return nil |
| } |
| |
| func (x *Candidate) GetFinishReason() Candidate_FinishReason { |
| if x != nil { |
| return x.FinishReason |
| } |
| return Candidate_FINISH_REASON_UNSPECIFIED |
| } |
| |
| func (x *Candidate) GetSafetyRatings() []*SafetyRating { |
| if x != nil { |
| return x.SafetyRatings |
| } |
| return nil |
| } |
| |
| func (x *Candidate) GetCitationMetadata() *CitationMetadata { |
| if x != nil { |
| return x.CitationMetadata |
| } |
| return nil |
| } |
| |
| func (x *Candidate) GetTokenCount() int32 { |
| if x != nil { |
| return x.TokenCount |
| } |
| return 0 |
| } |
| |
| // Request containing the `Content` for the model to embed. |
| type EmbedContentRequest struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. The model's resource name. This serves as an ID for the Model to |
| // use. |
| // |
| // This name should match a model name returned by the `ListModels` method. |
| // |
| // Format: `models/{model}` |
| Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` |
| // Required. The content to embed. Only the `parts.text` fields will be |
| // counted. |
| Content *Content `protobuf:"bytes,2,opt,name=content,proto3" json:"content,omitempty"` |
| // Optional. Optional task type for which the embeddings will be used. Can |
| // only be set for `models/embedding-001`. |
| TaskType *TaskType `protobuf:"varint,3,opt,name=task_type,json=taskType,proto3,enum=google.ai.generativelanguage.v1.TaskType,oneof" json:"task_type,omitempty"` |
| // Optional. An optional title for the text. Only applicable when TaskType is |
| // `RETRIEVAL_DOCUMENT`. |
| // |
| // Note: Specifying a `title` for `RETRIEVAL_DOCUMENT` provides better quality |
| // embeddings for retrieval. |
| Title *string `protobuf:"bytes,4,opt,name=title,proto3,oneof" json:"title,omitempty"` |
| // Optional. Optional reduced dimension for the output embedding. If set, |
| // excessive values in the output embedding are truncated from the end. |
| // Supported by `models/text-embedding-latest`. |
| OutputDimensionality *int32 `protobuf:"varint,5,opt,name=output_dimensionality,json=outputDimensionality,proto3,oneof" json:"output_dimensionality,omitempty"` |
| } |
| |
| func (x *EmbedContentRequest) Reset() { |
| *x = EmbedContentRequest{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[4] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *EmbedContentRequest) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*EmbedContentRequest) ProtoMessage() {} |
| |
| func (x *EmbedContentRequest) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[4] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use EmbedContentRequest.ProtoReflect.Descriptor instead. |
| func (*EmbedContentRequest) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{4} |
| } |
| |
| func (x *EmbedContentRequest) GetModel() string { |
| if x != nil { |
| return x.Model |
| } |
| return "" |
| } |
| |
| func (x *EmbedContentRequest) GetContent() *Content { |
| if x != nil { |
| return x.Content |
| } |
| return nil |
| } |
| |
| func (x *EmbedContentRequest) GetTaskType() TaskType { |
| if x != nil && x.TaskType != nil { |
| return *x.TaskType |
| } |
| return TaskType_TASK_TYPE_UNSPECIFIED |
| } |
| |
| func (x *EmbedContentRequest) GetTitle() string { |
| if x != nil && x.Title != nil { |
| return *x.Title |
| } |
| return "" |
| } |
| |
| func (x *EmbedContentRequest) GetOutputDimensionality() int32 { |
| if x != nil && x.OutputDimensionality != nil { |
| return *x.OutputDimensionality |
| } |
| return 0 |
| } |
| |
| // A list of floats representing an embedding. |
| type ContentEmbedding struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // The embedding values. |
| Values []float32 `protobuf:"fixed32,1,rep,packed,name=values,proto3" json:"values,omitempty"` |
| } |
| |
| func (x *ContentEmbedding) Reset() { |
| *x = ContentEmbedding{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[5] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *ContentEmbedding) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*ContentEmbedding) ProtoMessage() {} |
| |
| func (x *ContentEmbedding) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[5] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use ContentEmbedding.ProtoReflect.Descriptor instead. |
| func (*ContentEmbedding) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{5} |
| } |
| |
| func (x *ContentEmbedding) GetValues() []float32 { |
| if x != nil { |
| return x.Values |
| } |
| return nil |
| } |
| |
| // The response to an `EmbedContentRequest`. |
| type EmbedContentResponse struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Output only. The embedding generated from the input content. |
| Embedding *ContentEmbedding `protobuf:"bytes,1,opt,name=embedding,proto3" json:"embedding,omitempty"` |
| } |
| |
| func (x *EmbedContentResponse) Reset() { |
| *x = EmbedContentResponse{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[6] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *EmbedContentResponse) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*EmbedContentResponse) ProtoMessage() {} |
| |
| func (x *EmbedContentResponse) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[6] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use EmbedContentResponse.ProtoReflect.Descriptor instead. |
| func (*EmbedContentResponse) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{6} |
| } |
| |
| func (x *EmbedContentResponse) GetEmbedding() *ContentEmbedding { |
| if x != nil { |
| return x.Embedding |
| } |
| return nil |
| } |
| |
| // Batch request to get embeddings from the model for a list of prompts. |
| type BatchEmbedContentsRequest struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. The model's resource name. This serves as an ID for the Model to |
| // use. |
| // |
| // This name should match a model name returned by the `ListModels` method. |
| // |
| // Format: `models/{model}` |
| Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` |
| // Required. Embed requests for the batch. The model in each of these requests |
| // must match the model specified `BatchEmbedContentsRequest.model`. |
| Requests []*EmbedContentRequest `protobuf:"bytes,2,rep,name=requests,proto3" json:"requests,omitempty"` |
| } |
| |
| func (x *BatchEmbedContentsRequest) Reset() { |
| *x = BatchEmbedContentsRequest{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[7] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *BatchEmbedContentsRequest) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*BatchEmbedContentsRequest) ProtoMessage() {} |
| |
| func (x *BatchEmbedContentsRequest) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[7] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use BatchEmbedContentsRequest.ProtoReflect.Descriptor instead. |
| func (*BatchEmbedContentsRequest) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{7} |
| } |
| |
| func (x *BatchEmbedContentsRequest) GetModel() string { |
| if x != nil { |
| return x.Model |
| } |
| return "" |
| } |
| |
| func (x *BatchEmbedContentsRequest) GetRequests() []*EmbedContentRequest { |
| if x != nil { |
| return x.Requests |
| } |
| return nil |
| } |
| |
| // The response to a `BatchEmbedContentsRequest`. |
| type BatchEmbedContentsResponse struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Output only. The embeddings for each request, in the same order as provided |
| // in the batch request. |
| Embeddings []*ContentEmbedding `protobuf:"bytes,1,rep,name=embeddings,proto3" json:"embeddings,omitempty"` |
| } |
| |
| func (x *BatchEmbedContentsResponse) Reset() { |
| *x = BatchEmbedContentsResponse{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[8] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *BatchEmbedContentsResponse) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*BatchEmbedContentsResponse) ProtoMessage() {} |
| |
| func (x *BatchEmbedContentsResponse) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[8] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use BatchEmbedContentsResponse.ProtoReflect.Descriptor instead. |
| func (*BatchEmbedContentsResponse) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{8} |
| } |
| |
| func (x *BatchEmbedContentsResponse) GetEmbeddings() []*ContentEmbedding { |
| if x != nil { |
| return x.Embeddings |
| } |
| return nil |
| } |
| |
| // Counts the number of tokens in the `prompt` sent to a model. |
| // |
| // Models may tokenize text differently, so each model may return a different |
| // `token_count`. |
| type CountTokensRequest struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. The model's resource name. This serves as an ID for the Model to |
| // use. |
| // |
| // This name should match a model name returned by the `ListModels` method. |
| // |
| // Format: `models/{model}` |
| Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` |
| // Required. The input given to the model as a prompt. |
| Contents []*Content `protobuf:"bytes,2,rep,name=contents,proto3" json:"contents,omitempty"` |
| } |
| |
| func (x *CountTokensRequest) Reset() { |
| *x = CountTokensRequest{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[9] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *CountTokensRequest) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*CountTokensRequest) ProtoMessage() {} |
| |
| func (x *CountTokensRequest) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[9] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use CountTokensRequest.ProtoReflect.Descriptor instead. |
| func (*CountTokensRequest) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{9} |
| } |
| |
| func (x *CountTokensRequest) GetModel() string { |
| if x != nil { |
| return x.Model |
| } |
| return "" |
| } |
| |
| func (x *CountTokensRequest) GetContents() []*Content { |
| if x != nil { |
| return x.Contents |
| } |
| return nil |
| } |
| |
| // A response from `CountTokens`. |
| // |
| // It returns the model's `token_count` for the `prompt`. |
| type CountTokensResponse struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // The number of tokens that the `model` tokenizes the `prompt` into. |
| // |
| // Always non-negative. |
| TotalTokens int32 `protobuf:"varint,1,opt,name=total_tokens,json=totalTokens,proto3" json:"total_tokens,omitempty"` |
| } |
| |
| func (x *CountTokensResponse) Reset() { |
| *x = CountTokensResponse{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[10] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *CountTokensResponse) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*CountTokensResponse) ProtoMessage() {} |
| |
| func (x *CountTokensResponse) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[10] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use CountTokensResponse.ProtoReflect.Descriptor instead. |
| func (*CountTokensResponse) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{10} |
| } |
| |
| func (x *CountTokensResponse) GetTotalTokens() int32 { |
| if x != nil { |
| return x.TotalTokens |
| } |
| return 0 |
| } |
| |
| // A set of the feedback metadata the prompt specified in |
| // `GenerateContentRequest.content`. |
| type GenerateContentResponse_PromptFeedback struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Optional. If set, the prompt was blocked and no candidates are returned. |
| // Rephrase your prompt. |
| BlockReason GenerateContentResponse_PromptFeedback_BlockReason `protobuf:"varint,1,opt,name=block_reason,json=blockReason,proto3,enum=google.ai.generativelanguage.v1.GenerateContentResponse_PromptFeedback_BlockReason" json:"block_reason,omitempty"` |
| // Ratings for safety of the prompt. |
| // There is at most one rating per category. |
| SafetyRatings []*SafetyRating `protobuf:"bytes,2,rep,name=safety_ratings,json=safetyRatings,proto3" json:"safety_ratings,omitempty"` |
| } |
| |
| func (x *GenerateContentResponse_PromptFeedback) Reset() { |
| *x = GenerateContentResponse_PromptFeedback{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[11] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *GenerateContentResponse_PromptFeedback) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*GenerateContentResponse_PromptFeedback) ProtoMessage() {} |
| |
| func (x *GenerateContentResponse_PromptFeedback) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[11] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use GenerateContentResponse_PromptFeedback.ProtoReflect.Descriptor instead. |
| func (*GenerateContentResponse_PromptFeedback) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP(), []int{2, 0} |
| } |
| |
| func (x *GenerateContentResponse_PromptFeedback) GetBlockReason() GenerateContentResponse_PromptFeedback_BlockReason { |
| if x != nil { |
| return x.BlockReason |
| } |
| return GenerateContentResponse_PromptFeedback_BLOCK_REASON_UNSPECIFIED |
| } |
| |
| func (x *GenerateContentResponse_PromptFeedback) GetSafetyRatings() []*SafetyRating { |
| if x != nil { |
| return x.SafetyRatings |
| } |
| return nil |
| } |
| |
| var File_google_ai_generativelanguage_v1_generative_service_proto protoreflect.FileDescriptor |
| |
| var file_google_ai_generativelanguage_v1_generative_service_proto_rawDesc = []byte{ |
| 0x0a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, |
| 0x31, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x73, 0x65, 0x72, |
| 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1f, 0x67, 0x6f, 0x6f, 0x67, |
| 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, |
| 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x69, 0x74, |
| 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2d, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, |
| 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x67, 0x6f, 0x6f, 0x67, |
| 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, |
| 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x73, 0x61, 0x66, 0x65, |
| 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, |
| 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, |
| 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, |
| 0x70, 0x69, 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, |
| 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, |
| 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, |
| 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, |
| 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x88, 0x03, 0x0a, 0x16, |
| 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, |
| 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, |
| 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, |
| 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, |
| 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, |
| 0x2f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x49, 0x0a, |
| 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, |
| 0x28, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, |
| 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, |
| 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x5c, 0x0a, 0x0f, 0x73, 0x61, 0x66, 0x65, |
| 0x74, 0x79, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, |
| 0x0b, 0x32, 0x2e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x61, 0x66, 0x65, 0x74, 0x79, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, |
| 0x67, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x52, 0x0e, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x53, 0x65, |
| 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x68, 0x0a, 0x11, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, |
| 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, |
| 0x0b, 0x32, 0x31, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, |
| 0x6e, 0x66, 0x69, 0x67, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x00, 0x52, 0x10, 0x67, 0x65, 0x6e, |
| 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x88, 0x01, 0x01, |
| 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, |
| 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xdf, 0x02, 0x0a, 0x10, 0x47, 0x65, 0x6e, 0x65, 0x72, |
| 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x0f, 0x63, |
| 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, |
| 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x61, 0x6e, |
| 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x2a, |
| 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, |
| 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x52, 0x0d, 0x73, 0x74, 0x6f, |
| 0x70, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x34, 0x0a, 0x11, 0x6d, 0x61, |
| 0x78, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, |
| 0x04, 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x01, 0x52, 0x0f, 0x6d, 0x61, |
| 0x78, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x88, 0x01, 0x01, |
| 0x12, 0x2a, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, |
| 0x05, 0x20, 0x01, 0x28, 0x02, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x02, 0x52, 0x0b, 0x74, 0x65, |
| 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x05, |
| 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x42, 0x03, 0xe0, 0x41, 0x01, |
| 0x48, 0x03, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x50, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x05, 0x74, |
| 0x6f, 0x70, 0x5f, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, |
| 0x04, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x88, 0x01, 0x01, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x63, |
| 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x14, |
| 0x0a, 0x12, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, |
| 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, |
| 0x74, 0x75, 0x72, 0x65, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x42, 0x08, |
| 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x81, 0x04, 0x0a, 0x17, 0x47, 0x65, 0x6e, |
| 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, |
| 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4a, 0x0a, 0x0a, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, |
| 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, |
| 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, 0x6e, 0x64, 0x69, |
| 0x64, 0x61, 0x74, 0x65, 0x52, 0x0a, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, |
| 0x12, 0x70, 0x0a, 0x0f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x62, |
| 0x61, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, |
| 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, |
| 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, |
| 0x6e, 0x73, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, |
| 0x63, 0x6b, 0x52, 0x0e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, |
| 0x63, 0x6b, 0x1a, 0xa7, 0x02, 0x0a, 0x0e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x46, 0x65, 0x65, |
| 0x64, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x7b, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x72, |
| 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, |
| 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, |
| 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x46, 0x65, 0x65, 0x64, |
| 0x62, 0x61, 0x63, 0x6b, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, |
| 0x42, 0x03, 0xe0, 0x41, 0x01, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x65, 0x61, 0x73, |
| 0x6f, 0x6e, 0x12, 0x54, 0x0a, 0x0e, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x5f, 0x72, 0x61, 0x74, |
| 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x61, 0x66, |
| 0x65, 0x74, 0x79, 0x52, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x0d, 0x73, 0x61, 0x66, 0x65, 0x74, |
| 0x79, 0x52, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x42, 0x0a, 0x0b, 0x42, 0x6c, 0x6f, 0x63, |
| 0x6b, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x18, 0x42, 0x4c, 0x4f, 0x43, 0x4b, |
| 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, |
| 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x41, 0x46, 0x45, 0x54, 0x59, 0x10, |
| 0x01, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x10, 0x02, 0x22, 0xb5, 0x04, 0x0a, |
| 0x09, 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x12, 0x1e, 0x0a, 0x05, 0x69, 0x6e, |
| 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x48, 0x00, |
| 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x47, 0x0a, 0x07, 0x63, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, |
| 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, |
| 0x65, 0x6e, 0x74, 0x12, 0x64, 0x0a, 0x0d, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x5f, 0x72, 0x65, |
| 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x37, 0x2e, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, 0x6e, |
| 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x52, 0x65, 0x61, |
| 0x73, 0x6f, 0x6e, 0x42, 0x06, 0xe0, 0x41, 0x01, 0xe0, 0x41, 0x03, 0x52, 0x0c, 0x66, 0x69, 0x6e, |
| 0x69, 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x54, 0x0a, 0x0e, 0x73, 0x61, 0x66, |
| 0x65, 0x74, 0x79, 0x5f, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, |
| 0x0b, 0x32, 0x2d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x61, 0x66, 0x65, 0x74, 0x79, 0x52, 0x61, 0x74, 0x69, 0x6e, 0x67, |
| 0x52, 0x0d, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, 0x52, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, |
| 0x63, 0x0a, 0x11, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x74, 0x61, |
| 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x69, 0x74, |
| 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x03, 0xe0, |
| 0x41, 0x03, 0x52, 0x10, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, |
| 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0b, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x63, 0x6f, |
| 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x0a, |
| 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x6e, 0x0a, 0x0c, 0x46, 0x69, |
| 0x6e, 0x69, 0x73, 0x68, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x1d, 0x0a, 0x19, 0x46, 0x49, |
| 0x4e, 0x49, 0x53, 0x48, 0x5f, 0x52, 0x45, 0x41, 0x53, 0x4f, 0x4e, 0x5f, 0x55, 0x4e, 0x53, 0x50, |
| 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, |
| 0x50, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x4d, 0x41, 0x58, 0x5f, 0x54, 0x4f, 0x4b, 0x45, 0x4e, |
| 0x53, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x41, 0x46, 0x45, 0x54, 0x59, 0x10, 0x03, 0x12, |
| 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x43, 0x49, 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x04, 0x12, |
| 0x09, 0x0a, 0x05, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x10, 0x05, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x69, |
| 0x6e, 0x64, 0x65, 0x78, 0x22, 0x88, 0x03, 0x0a, 0x13, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, |
| 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, |
| 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, |
| 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, |
| 0x64, 0x65, 0x6c, 0x12, 0x47, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, |
| 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, |
| 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, |
| 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x42, 0x03, |
| 0xe0, 0x41, 0x02, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x50, 0x0a, 0x09, |
| 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, |
| 0x29, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, |
| 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, |
| 0x00, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1e, |
| 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, |
| 0x41, 0x01, 0x48, 0x01, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x88, 0x01, 0x01, 0x12, 0x3d, |
| 0x0a, 0x15, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, |
| 0x6f, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, |
| 0x41, 0x01, 0x48, 0x02, 0x52, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x69, 0x6d, 0x65, |
| 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, |
| 0x0a, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x08, 0x0a, 0x06, 0x5f, |
| 0x74, 0x69, 0x74, 0x6c, 0x65, 0x42, 0x18, 0x0a, 0x16, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, |
| 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x22, |
| 0x2a, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, |
| 0x69, 0x6e, 0x67, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, |
| 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x6c, 0x0a, 0x14, 0x45, |
| 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, |
| 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x09, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, |
| 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, |
| 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, |
| 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, |
| 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x09, |
| 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x22, 0xb9, 0x01, 0x0a, 0x19, 0x42, 0x61, |
| 0x74, 0x63, 0x68, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, |
| 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, |
| 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, 0xfa, 0x41, 0x29, 0x0a, 0x27, |
| 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, |
| 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, |
| 0x6d, 0x2f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x55, |
| 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, |
| 0x32, 0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, |
| 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, |
| 0x76, 0x31, 0x2e, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, |
| 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x72, 0x65, 0x71, |
| 0x75, 0x65, 0x73, 0x74, 0x73, 0x22, 0x74, 0x0a, 0x1a, 0x42, 0x61, 0x74, 0x63, 0x68, 0x45, 0x6d, |
| 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, |
| 0x6e, 0x73, 0x65, 0x12, 0x56, 0x0a, 0x0a, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, |
| 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, |
| 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, |
| 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, |
| 0x74, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, |
| 0x0a, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, 0xa6, 0x01, 0x0a, 0x12, |
| 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, |
| 0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, |
| 0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, 0x65, 0x6e, 0x65, 0x72, |
| 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4d, 0x6f, 0x64, |
| 0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x49, 0x0a, 0x08, 0x63, 0x6f, 0x6e, |
| 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, |
| 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, |
| 0x65, 0x6e, 0x74, 0x73, 0x22, 0x38, 0x0a, 0x13, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x54, 0x6f, 0x6b, |
| 0x65, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x74, |
| 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, |
| 0x05, 0x52, 0x0b, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x2a, 0xbe, |
| 0x01, 0x0a, 0x08, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x54, |
| 0x41, 0x53, 0x4b, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, |
| 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x52, 0x45, 0x54, 0x52, 0x49, 0x45, |
| 0x56, 0x41, 0x4c, 0x5f, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, 0x52, |
| 0x45, 0x54, 0x52, 0x49, 0x45, 0x56, 0x41, 0x4c, 0x5f, 0x44, 0x4f, 0x43, 0x55, 0x4d, 0x45, 0x4e, |
| 0x54, 0x10, 0x02, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x45, 0x4d, 0x41, 0x4e, 0x54, 0x49, 0x43, 0x5f, |
| 0x53, 0x49, 0x4d, 0x49, 0x4c, 0x41, 0x52, 0x49, 0x54, 0x59, 0x10, 0x03, 0x12, 0x12, 0x0a, 0x0e, |
| 0x43, 0x4c, 0x41, 0x53, 0x53, 0x49, 0x46, 0x49, 0x43, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x04, |
| 0x12, 0x0e, 0x0a, 0x0a, 0x43, 0x4c, 0x55, 0x53, 0x54, 0x45, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x05, |
| 0x12, 0x16, 0x0a, 0x12, 0x51, 0x55, 0x45, 0x53, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x41, 0x4e, 0x53, |
| 0x57, 0x45, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x06, 0x12, 0x15, 0x0a, 0x11, 0x46, 0x41, 0x43, 0x54, |
| 0x5f, 0x56, 0x45, 0x52, 0x49, 0x46, 0x49, 0x43, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x07, 0x32, |
| 0xd3, 0x08, 0x0a, 0x11, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x53, 0x65, |
| 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xf6, 0x01, 0x0a, 0x0f, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, |
| 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x37, 0x2e, 0x67, 0x6f, 0x6f, 0x67, |
| 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, |
| 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, |
| 0x73, 0x74, 0x1a, 0x38, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, |
| 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, |
| 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, |
| 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x70, 0xda, 0x41, |
| 0x0e, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x82, |
| 0xd3, 0xe4, 0x93, 0x02, 0x59, 0x3a, 0x01, 0x2a, 0x5a, 0x2e, 0x3a, 0x01, 0x2a, 0x22, 0x29, 0x2f, |
| 0x76, 0x31, 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x74, 0x75, 0x6e, 0x65, 0x64, 0x4d, |
| 0x6f, 0x64, 0x65, 0x6c, 0x73, 0x2f, 0x2a, 0x7d, 0x3a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, |
| 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x24, 0x2f, 0x76, 0x31, 0x2f, 0x7b, 0x6d, |
| 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, 0x2f, 0x2a, 0x7d, 0x3a, 0x67, |
| 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0xd4, |
| 0x01, 0x0a, 0x15, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, |
| 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x37, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, |
| 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, |
| 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, |
| 0x74, 0x1a, 0x38, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, |
| 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x46, 0xda, 0x41, 0x0e, |
| 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x82, 0xd3, |
| 0xe4, 0x93, 0x02, 0x2f, 0x3a, 0x01, 0x2a, 0x22, 0x2a, 0x2f, 0x76, 0x31, 0x2f, 0x7b, 0x6d, 0x6f, |
| 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, 0x2f, 0x2a, 0x7d, 0x3a, 0x73, 0x74, |
| 0x72, 0x65, 0x61, 0x6d, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, |
| 0x65, 0x6e, 0x74, 0x30, 0x01, 0x12, 0xb9, 0x01, 0x0a, 0x0c, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, |
| 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, |
| 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, |
| 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x35, 0x2e, 0x67, |
| 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, |
| 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x45, |
| 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, |
| 0x6e, 0x73, 0x65, 0x22, 0x3c, 0xda, 0x41, 0x0d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x63, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x26, 0x3a, 0x01, 0x2a, 0x22, 0x21, |
| 0x2f, 0x76, 0x31, 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, |
| 0x73, 0x2f, 0x2a, 0x7d, 0x3a, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, |
| 0x74, 0x12, 0xd2, 0x01, 0x0a, 0x12, 0x42, 0x61, 0x74, 0x63, 0x68, 0x45, 0x6d, 0x62, 0x65, 0x64, |
| 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x3a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, |
| 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, |
| 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, |
| 0x75, 0x65, 0x73, 0x74, 0x1a, 0x3b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, |
| 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, |
| 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x45, 0x6d, 0x62, 0x65, |
| 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, |
| 0x65, 0x22, 0x43, 0xda, 0x41, 0x0e, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x72, 0x65, 0x71, 0x75, |
| 0x65, 0x73, 0x74, 0x73, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, 0x3a, 0x01, 0x2a, 0x22, 0x27, 0x2f, |
| 0x76, 0x31, 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, |
| 0x2f, 0x2a, 0x7d, 0x3a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x43, 0x6f, |
| 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0xb6, 0x01, 0x0a, 0x0b, 0x43, 0x6f, 0x75, 0x6e, 0x74, |
| 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x33, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, |
| 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, |
| 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x54, 0x6f, |
| 0x6b, 0x65, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x34, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, |
| 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, |
| 0x75, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, |
| 0x65, 0x22, 0x3c, 0xda, 0x41, 0x0e, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x74, |
| 0x65, 0x6e, 0x74, 0x73, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x25, 0x3a, 0x01, 0x2a, 0x22, 0x20, 0x2f, |
| 0x76, 0x31, 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, |
| 0x2f, 0x2a, 0x7d, 0x3a, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x1a, |
| 0x24, 0xca, 0x41, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, |
| 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, |
| 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x42, 0x9a, 0x01, 0x0a, 0x23, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, |
| 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x16, 0x47, |
| 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, |
| 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x59, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x67, |
| 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x2f, 0x61, 0x69, 0x2f, |
| 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, |
| 0x67, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x76, 0x31, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, |
| 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x70, 0x62, 0x3b, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, |
| } |
| |
| var ( |
| file_google_ai_generativelanguage_v1_generative_service_proto_rawDescOnce sync.Once |
| file_google_ai_generativelanguage_v1_generative_service_proto_rawDescData = file_google_ai_generativelanguage_v1_generative_service_proto_rawDesc |
| ) |
| |
| func file_google_ai_generativelanguage_v1_generative_service_proto_rawDescGZIP() []byte { |
| file_google_ai_generativelanguage_v1_generative_service_proto_rawDescOnce.Do(func() { |
| file_google_ai_generativelanguage_v1_generative_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ai_generativelanguage_v1_generative_service_proto_rawDescData) |
| }) |
| return file_google_ai_generativelanguage_v1_generative_service_proto_rawDescData |
| } |
| |
| var file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes = make([]protoimpl.EnumInfo, 3) |
| var file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes = make([]protoimpl.MessageInfo, 12) |
| var file_google_ai_generativelanguage_v1_generative_service_proto_goTypes = []interface{}{ |
| (TaskType)(0), // 0: google.ai.generativelanguage.v1.TaskType |
| (GenerateContentResponse_PromptFeedback_BlockReason)(0), // 1: google.ai.generativelanguage.v1.GenerateContentResponse.PromptFeedback.BlockReason |
| (Candidate_FinishReason)(0), // 2: google.ai.generativelanguage.v1.Candidate.FinishReason |
| (*GenerateContentRequest)(nil), // 3: google.ai.generativelanguage.v1.GenerateContentRequest |
| (*GenerationConfig)(nil), // 4: google.ai.generativelanguage.v1.GenerationConfig |
| (*GenerateContentResponse)(nil), // 5: google.ai.generativelanguage.v1.GenerateContentResponse |
| (*Candidate)(nil), // 6: google.ai.generativelanguage.v1.Candidate |
| (*EmbedContentRequest)(nil), // 7: google.ai.generativelanguage.v1.EmbedContentRequest |
| (*ContentEmbedding)(nil), // 8: google.ai.generativelanguage.v1.ContentEmbedding |
| (*EmbedContentResponse)(nil), // 9: google.ai.generativelanguage.v1.EmbedContentResponse |
| (*BatchEmbedContentsRequest)(nil), // 10: google.ai.generativelanguage.v1.BatchEmbedContentsRequest |
| (*BatchEmbedContentsResponse)(nil), // 11: google.ai.generativelanguage.v1.BatchEmbedContentsResponse |
| (*CountTokensRequest)(nil), // 12: google.ai.generativelanguage.v1.CountTokensRequest |
| (*CountTokensResponse)(nil), // 13: google.ai.generativelanguage.v1.CountTokensResponse |
| (*GenerateContentResponse_PromptFeedback)(nil), // 14: google.ai.generativelanguage.v1.GenerateContentResponse.PromptFeedback |
| (*Content)(nil), // 15: google.ai.generativelanguage.v1.Content |
| (*SafetySetting)(nil), // 16: google.ai.generativelanguage.v1.SafetySetting |
| (*SafetyRating)(nil), // 17: google.ai.generativelanguage.v1.SafetyRating |
| (*CitationMetadata)(nil), // 18: google.ai.generativelanguage.v1.CitationMetadata |
| } |
| var file_google_ai_generativelanguage_v1_generative_service_proto_depIdxs = []int32{ |
| 15, // 0: google.ai.generativelanguage.v1.GenerateContentRequest.contents:type_name -> google.ai.generativelanguage.v1.Content |
| 16, // 1: google.ai.generativelanguage.v1.GenerateContentRequest.safety_settings:type_name -> google.ai.generativelanguage.v1.SafetySetting |
| 4, // 2: google.ai.generativelanguage.v1.GenerateContentRequest.generation_config:type_name -> google.ai.generativelanguage.v1.GenerationConfig |
| 6, // 3: google.ai.generativelanguage.v1.GenerateContentResponse.candidates:type_name -> google.ai.generativelanguage.v1.Candidate |
| 14, // 4: google.ai.generativelanguage.v1.GenerateContentResponse.prompt_feedback:type_name -> google.ai.generativelanguage.v1.GenerateContentResponse.PromptFeedback |
| 15, // 5: google.ai.generativelanguage.v1.Candidate.content:type_name -> google.ai.generativelanguage.v1.Content |
| 2, // 6: google.ai.generativelanguage.v1.Candidate.finish_reason:type_name -> google.ai.generativelanguage.v1.Candidate.FinishReason |
| 17, // 7: google.ai.generativelanguage.v1.Candidate.safety_ratings:type_name -> google.ai.generativelanguage.v1.SafetyRating |
| 18, // 8: google.ai.generativelanguage.v1.Candidate.citation_metadata:type_name -> google.ai.generativelanguage.v1.CitationMetadata |
| 15, // 9: google.ai.generativelanguage.v1.EmbedContentRequest.content:type_name -> google.ai.generativelanguage.v1.Content |
| 0, // 10: google.ai.generativelanguage.v1.EmbedContentRequest.task_type:type_name -> google.ai.generativelanguage.v1.TaskType |
| 8, // 11: google.ai.generativelanguage.v1.EmbedContentResponse.embedding:type_name -> google.ai.generativelanguage.v1.ContentEmbedding |
| 7, // 12: google.ai.generativelanguage.v1.BatchEmbedContentsRequest.requests:type_name -> google.ai.generativelanguage.v1.EmbedContentRequest |
| 8, // 13: google.ai.generativelanguage.v1.BatchEmbedContentsResponse.embeddings:type_name -> google.ai.generativelanguage.v1.ContentEmbedding |
| 15, // 14: google.ai.generativelanguage.v1.CountTokensRequest.contents:type_name -> google.ai.generativelanguage.v1.Content |
| 1, // 15: google.ai.generativelanguage.v1.GenerateContentResponse.PromptFeedback.block_reason:type_name -> google.ai.generativelanguage.v1.GenerateContentResponse.PromptFeedback.BlockReason |
| 17, // 16: google.ai.generativelanguage.v1.GenerateContentResponse.PromptFeedback.safety_ratings:type_name -> google.ai.generativelanguage.v1.SafetyRating |
| 3, // 17: google.ai.generativelanguage.v1.GenerativeService.GenerateContent:input_type -> google.ai.generativelanguage.v1.GenerateContentRequest |
| 3, // 18: google.ai.generativelanguage.v1.GenerativeService.StreamGenerateContent:input_type -> google.ai.generativelanguage.v1.GenerateContentRequest |
| 7, // 19: google.ai.generativelanguage.v1.GenerativeService.EmbedContent:input_type -> google.ai.generativelanguage.v1.EmbedContentRequest |
| 10, // 20: google.ai.generativelanguage.v1.GenerativeService.BatchEmbedContents:input_type -> google.ai.generativelanguage.v1.BatchEmbedContentsRequest |
| 12, // 21: google.ai.generativelanguage.v1.GenerativeService.CountTokens:input_type -> google.ai.generativelanguage.v1.CountTokensRequest |
| 5, // 22: google.ai.generativelanguage.v1.GenerativeService.GenerateContent:output_type -> google.ai.generativelanguage.v1.GenerateContentResponse |
| 5, // 23: google.ai.generativelanguage.v1.GenerativeService.StreamGenerateContent:output_type -> google.ai.generativelanguage.v1.GenerateContentResponse |
| 9, // 24: google.ai.generativelanguage.v1.GenerativeService.EmbedContent:output_type -> google.ai.generativelanguage.v1.EmbedContentResponse |
| 11, // 25: google.ai.generativelanguage.v1.GenerativeService.BatchEmbedContents:output_type -> google.ai.generativelanguage.v1.BatchEmbedContentsResponse |
| 13, // 26: google.ai.generativelanguage.v1.GenerativeService.CountTokens:output_type -> google.ai.generativelanguage.v1.CountTokensResponse |
| 22, // [22:27] is the sub-list for method output_type |
| 17, // [17:22] is the sub-list for method input_type |
| 17, // [17:17] is the sub-list for extension type_name |
| 17, // [17:17] is the sub-list for extension extendee |
| 0, // [0:17] is the sub-list for field type_name |
| } |
| |
| func init() { file_google_ai_generativelanguage_v1_generative_service_proto_init() } |
| func file_google_ai_generativelanguage_v1_generative_service_proto_init() { |
| if File_google_ai_generativelanguage_v1_generative_service_proto != nil { |
| return |
| } |
| file_google_ai_generativelanguage_v1_citation_proto_init() |
| file_google_ai_generativelanguage_v1_content_proto_init() |
| file_google_ai_generativelanguage_v1_safety_proto_init() |
| if !protoimpl.UnsafeEnabled { |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*GenerateContentRequest); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*GenerationConfig); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*GenerateContentResponse); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*Candidate); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*EmbedContentRequest); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*ContentEmbedding); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*EmbedContentResponse); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*BatchEmbedContentsRequest); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*BatchEmbedContentsResponse); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*CountTokensRequest); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*CountTokensResponse); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*GenerateContentResponse_PromptFeedback); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| } |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[0].OneofWrappers = []interface{}{} |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[1].OneofWrappers = []interface{}{} |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[3].OneofWrappers = []interface{}{} |
| file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes[4].OneofWrappers = []interface{}{} |
| type x struct{} |
| out := protoimpl.TypeBuilder{ |
| File: protoimpl.DescBuilder{ |
| GoPackagePath: reflect.TypeOf(x{}).PkgPath(), |
| RawDescriptor: file_google_ai_generativelanguage_v1_generative_service_proto_rawDesc, |
| NumEnums: 3, |
| NumMessages: 12, |
| NumExtensions: 0, |
| NumServices: 1, |
| }, |
| GoTypes: file_google_ai_generativelanguage_v1_generative_service_proto_goTypes, |
| DependencyIndexes: file_google_ai_generativelanguage_v1_generative_service_proto_depIdxs, |
| EnumInfos: file_google_ai_generativelanguage_v1_generative_service_proto_enumTypes, |
| MessageInfos: file_google_ai_generativelanguage_v1_generative_service_proto_msgTypes, |
| }.Build() |
| File_google_ai_generativelanguage_v1_generative_service_proto = out.File |
| file_google_ai_generativelanguage_v1_generative_service_proto_rawDesc = nil |
| file_google_ai_generativelanguage_v1_generative_service_proto_goTypes = nil |
| file_google_ai_generativelanguage_v1_generative_service_proto_depIdxs = nil |
| } |
| |
| // Reference imports to suppress errors if they are not otherwise used. |
| var _ context.Context |
| var _ grpc.ClientConnInterface |
| |
| // This is a compile-time assertion to ensure that this generated file |
| // is compatible with the grpc package it is being compiled against. |
| const _ = grpc.SupportPackageIsVersion6 |
| |
| // GenerativeServiceClient is the client API for GenerativeService service. |
| // |
| // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. |
| type GenerativeServiceClient interface { |
| // Generates a response from the model given an input |
| // `GenerateContentRequest`. |
| GenerateContent(ctx context.Context, in *GenerateContentRequest, opts ...grpc.CallOption) (*GenerateContentResponse, error) |
| // Generates a streamed response from the model given an input |
| // `GenerateContentRequest`. |
| StreamGenerateContent(ctx context.Context, in *GenerateContentRequest, opts ...grpc.CallOption) (GenerativeService_StreamGenerateContentClient, error) |
| // Generates an embedding from the model given an input `Content`. |
| EmbedContent(ctx context.Context, in *EmbedContentRequest, opts ...grpc.CallOption) (*EmbedContentResponse, error) |
| // Generates multiple embeddings from the model given input text in a |
| // synchronous call. |
| BatchEmbedContents(ctx context.Context, in *BatchEmbedContentsRequest, opts ...grpc.CallOption) (*BatchEmbedContentsResponse, error) |
| // Runs a model's tokenizer on input content and returns the token count. |
| CountTokens(ctx context.Context, in *CountTokensRequest, opts ...grpc.CallOption) (*CountTokensResponse, error) |
| } |
| |
| type generativeServiceClient struct { |
| cc grpc.ClientConnInterface |
| } |
| |
| func NewGenerativeServiceClient(cc grpc.ClientConnInterface) GenerativeServiceClient { |
| return &generativeServiceClient{cc} |
| } |
| |
| func (c *generativeServiceClient) GenerateContent(ctx context.Context, in *GenerateContentRequest, opts ...grpc.CallOption) (*GenerateContentResponse, error) { |
| out := new(GenerateContentResponse) |
| err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1.GenerativeService/GenerateContent", in, out, opts...) |
| if err != nil { |
| return nil, err |
| } |
| return out, nil |
| } |
| |
| func (c *generativeServiceClient) StreamGenerateContent(ctx context.Context, in *GenerateContentRequest, opts ...grpc.CallOption) (GenerativeService_StreamGenerateContentClient, error) { |
| stream, err := c.cc.NewStream(ctx, &_GenerativeService_serviceDesc.Streams[0], "/google.ai.generativelanguage.v1.GenerativeService/StreamGenerateContent", opts...) |
| if err != nil { |
| return nil, err |
| } |
| x := &generativeServiceStreamGenerateContentClient{stream} |
| if err := x.ClientStream.SendMsg(in); err != nil { |
| return nil, err |
| } |
| if err := x.ClientStream.CloseSend(); err != nil { |
| return nil, err |
| } |
| return x, nil |
| } |
| |
| type GenerativeService_StreamGenerateContentClient interface { |
| Recv() (*GenerateContentResponse, error) |
| grpc.ClientStream |
| } |
| |
| type generativeServiceStreamGenerateContentClient struct { |
| grpc.ClientStream |
| } |
| |
| func (x *generativeServiceStreamGenerateContentClient) Recv() (*GenerateContentResponse, error) { |
| m := new(GenerateContentResponse) |
| if err := x.ClientStream.RecvMsg(m); err != nil { |
| return nil, err |
| } |
| return m, nil |
| } |
| |
| func (c *generativeServiceClient) EmbedContent(ctx context.Context, in *EmbedContentRequest, opts ...grpc.CallOption) (*EmbedContentResponse, error) { |
| out := new(EmbedContentResponse) |
| err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1.GenerativeService/EmbedContent", in, out, opts...) |
| if err != nil { |
| return nil, err |
| } |
| return out, nil |
| } |
| |
| func (c *generativeServiceClient) BatchEmbedContents(ctx context.Context, in *BatchEmbedContentsRequest, opts ...grpc.CallOption) (*BatchEmbedContentsResponse, error) { |
| out := new(BatchEmbedContentsResponse) |
| err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1.GenerativeService/BatchEmbedContents", in, out, opts...) |
| if err != nil { |
| return nil, err |
| } |
| return out, nil |
| } |
| |
| func (c *generativeServiceClient) CountTokens(ctx context.Context, in *CountTokensRequest, opts ...grpc.CallOption) (*CountTokensResponse, error) { |
| out := new(CountTokensResponse) |
| err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1.GenerativeService/CountTokens", in, out, opts...) |
| if err != nil { |
| return nil, err |
| } |
| return out, nil |
| } |
| |
| // GenerativeServiceServer is the server API for GenerativeService service. |
| type GenerativeServiceServer interface { |
| // Generates a response from the model given an input |
| // `GenerateContentRequest`. |
| GenerateContent(context.Context, *GenerateContentRequest) (*GenerateContentResponse, error) |
| // Generates a streamed response from the model given an input |
| // `GenerateContentRequest`. |
| StreamGenerateContent(*GenerateContentRequest, GenerativeService_StreamGenerateContentServer) error |
| // Generates an embedding from the model given an input `Content`. |
| EmbedContent(context.Context, *EmbedContentRequest) (*EmbedContentResponse, error) |
| // Generates multiple embeddings from the model given input text in a |
| // synchronous call. |
| BatchEmbedContents(context.Context, *BatchEmbedContentsRequest) (*BatchEmbedContentsResponse, error) |
| // Runs a model's tokenizer on input content and returns the token count. |
| CountTokens(context.Context, *CountTokensRequest) (*CountTokensResponse, error) |
| } |
| |
| // UnimplementedGenerativeServiceServer can be embedded to have forward compatible implementations. |
| type UnimplementedGenerativeServiceServer struct { |
| } |
| |
| func (*UnimplementedGenerativeServiceServer) GenerateContent(context.Context, *GenerateContentRequest) (*GenerateContentResponse, error) { |
| return nil, status.Errorf(codes.Unimplemented, "method GenerateContent not implemented") |
| } |
| func (*UnimplementedGenerativeServiceServer) StreamGenerateContent(*GenerateContentRequest, GenerativeService_StreamGenerateContentServer) error { |
| return status.Errorf(codes.Unimplemented, "method StreamGenerateContent not implemented") |
| } |
| func (*UnimplementedGenerativeServiceServer) EmbedContent(context.Context, *EmbedContentRequest) (*EmbedContentResponse, error) { |
| return nil, status.Errorf(codes.Unimplemented, "method EmbedContent not implemented") |
| } |
| func (*UnimplementedGenerativeServiceServer) BatchEmbedContents(context.Context, *BatchEmbedContentsRequest) (*BatchEmbedContentsResponse, error) { |
| return nil, status.Errorf(codes.Unimplemented, "method BatchEmbedContents not implemented") |
| } |
| func (*UnimplementedGenerativeServiceServer) CountTokens(context.Context, *CountTokensRequest) (*CountTokensResponse, error) { |
| return nil, status.Errorf(codes.Unimplemented, "method CountTokens not implemented") |
| } |
| |
| func RegisterGenerativeServiceServer(s *grpc.Server, srv GenerativeServiceServer) { |
| s.RegisterService(&_GenerativeService_serviceDesc, srv) |
| } |
| |
| func _GenerativeService_GenerateContent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { |
| in := new(GenerateContentRequest) |
| if err := dec(in); err != nil { |
| return nil, err |
| } |
| if interceptor == nil { |
| return srv.(GenerativeServiceServer).GenerateContent(ctx, in) |
| } |
| info := &grpc.UnaryServerInfo{ |
| Server: srv, |
| FullMethod: "/google.ai.generativelanguage.v1.GenerativeService/GenerateContent", |
| } |
| handler := func(ctx context.Context, req interface{}) (interface{}, error) { |
| return srv.(GenerativeServiceServer).GenerateContent(ctx, req.(*GenerateContentRequest)) |
| } |
| return interceptor(ctx, in, info, handler) |
| } |
| |
| func _GenerativeService_StreamGenerateContent_Handler(srv interface{}, stream grpc.ServerStream) error { |
| m := new(GenerateContentRequest) |
| if err := stream.RecvMsg(m); err != nil { |
| return err |
| } |
| return srv.(GenerativeServiceServer).StreamGenerateContent(m, &generativeServiceStreamGenerateContentServer{stream}) |
| } |
| |
| type GenerativeService_StreamGenerateContentServer interface { |
| Send(*GenerateContentResponse) error |
| grpc.ServerStream |
| } |
| |
| type generativeServiceStreamGenerateContentServer struct { |
| grpc.ServerStream |
| } |
| |
| func (x *generativeServiceStreamGenerateContentServer) Send(m *GenerateContentResponse) error { |
| return x.ServerStream.SendMsg(m) |
| } |
| |
| func _GenerativeService_EmbedContent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { |
| in := new(EmbedContentRequest) |
| if err := dec(in); err != nil { |
| return nil, err |
| } |
| if interceptor == nil { |
| return srv.(GenerativeServiceServer).EmbedContent(ctx, in) |
| } |
| info := &grpc.UnaryServerInfo{ |
| Server: srv, |
| FullMethod: "/google.ai.generativelanguage.v1.GenerativeService/EmbedContent", |
| } |
| handler := func(ctx context.Context, req interface{}) (interface{}, error) { |
| return srv.(GenerativeServiceServer).EmbedContent(ctx, req.(*EmbedContentRequest)) |
| } |
| return interceptor(ctx, in, info, handler) |
| } |
| |
| func _GenerativeService_BatchEmbedContents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { |
| in := new(BatchEmbedContentsRequest) |
| if err := dec(in); err != nil { |
| return nil, err |
| } |
| if interceptor == nil { |
| return srv.(GenerativeServiceServer).BatchEmbedContents(ctx, in) |
| } |
| info := &grpc.UnaryServerInfo{ |
| Server: srv, |
| FullMethod: "/google.ai.generativelanguage.v1.GenerativeService/BatchEmbedContents", |
| } |
| handler := func(ctx context.Context, req interface{}) (interface{}, error) { |
| return srv.(GenerativeServiceServer).BatchEmbedContents(ctx, req.(*BatchEmbedContentsRequest)) |
| } |
| return interceptor(ctx, in, info, handler) |
| } |
| |
| func _GenerativeService_CountTokens_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { |
| in := new(CountTokensRequest) |
| if err := dec(in); err != nil { |
| return nil, err |
| } |
| if interceptor == nil { |
| return srv.(GenerativeServiceServer).CountTokens(ctx, in) |
| } |
| info := &grpc.UnaryServerInfo{ |
| Server: srv, |
| FullMethod: "/google.ai.generativelanguage.v1.GenerativeService/CountTokens", |
| } |
| handler := func(ctx context.Context, req interface{}) (interface{}, error) { |
| return srv.(GenerativeServiceServer).CountTokens(ctx, req.(*CountTokensRequest)) |
| } |
| return interceptor(ctx, in, info, handler) |
| } |
| |
| var _GenerativeService_serviceDesc = grpc.ServiceDesc{ |
| ServiceName: "google.ai.generativelanguage.v1.GenerativeService", |
| HandlerType: (*GenerativeServiceServer)(nil), |
| Methods: []grpc.MethodDesc{ |
| { |
| MethodName: "GenerateContent", |
| Handler: _GenerativeService_GenerateContent_Handler, |
| }, |
| { |
| MethodName: "EmbedContent", |
| Handler: _GenerativeService_EmbedContent_Handler, |
| }, |
| { |
| MethodName: "BatchEmbedContents", |
| Handler: _GenerativeService_BatchEmbedContents_Handler, |
| }, |
| { |
| MethodName: "CountTokens", |
| Handler: _GenerativeService_CountTokens_Handler, |
| }, |
| }, |
| Streams: []grpc.StreamDesc{ |
| { |
| StreamName: "StreamGenerateContent", |
| Handler: _GenerativeService_StreamGenerateContent_Handler, |
| ServerStreams: true, |
| }, |
| }, |
| Metadata: "google/ai/generativelanguage/v1/generative_service.proto", |
| } |