1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
|
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.contactcenterinsights.v1;
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
option csharp_namespace = "Google.Cloud.ContactCenterInsights.V1";
option go_package = "google.golang.org/genproto/googleapis/cloud/contactcenterinsights/v1;contactcenterinsights";
option java_multiple_files = true;
option java_outer_classname = "ResourcesProto";
option java_package = "com.google.cloud.contactcenterinsights.v1";
option php_namespace = "Google\\Cloud\\ContactCenterInsights\\V1";
option ruby_package = "Google::Cloud::ContactCenterInsights::V1";
option (google.api.resource_definition) = {
type: "dialogflow.googleapis.com/Participant"
pattern: "projects/{project}/conversations/{conversation}/participants/{participant}"
pattern: "projects/{project}/locations/{location}/conversations/{conversation}/participants/{participant}"
};
// The conversation resource.
message Conversation {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/Conversation"
pattern: "projects/{project}/locations/{location}/conversations/{conversation}"
};
// Call-specific metadata.
message CallMetadata {
// The audio channel that contains the customer.
int32 customer_channel = 1;
// The audio channel that contains the agent.
int32 agent_channel = 2;
}
// A message representing the transcript of a conversation.
message Transcript {
// A segment of a full transcript.
message TranscriptSegment {
// Word-level info for words in a transcript.
message WordInfo {
// Time offset of the start of this word relative to the beginning of
// the total conversation.
google.protobuf.Duration start_offset = 1;
// Time offset of the end of this word relative to the beginning of the
// total conversation.
google.protobuf.Duration end_offset = 2;
// The word itself. Includes punctuation marks that surround the word.
string word = 3;
// A confidence estimate between 0.0 and 1.0 of the fidelity of this
// word. A default value of 0.0 indicates that the value is unset.
float confidence = 4;
}
// Metadata from Dialogflow relating to the current transcript segment.
message DialogflowSegmentMetadata {
// Whether the transcript segment was covered under the configured smart
// reply allowlist in Agent Assist.
bool smart_reply_allowlist_covered = 1;
}
// The time that the message occurred, if provided.
google.protobuf.Timestamp message_time = 6;
// The text of this segment.
string text = 1;
// A confidence estimate between 0.0 and 1.0 of the fidelity of this
// segment. A default value of 0.0 indicates that the value is unset.
float confidence = 2;
// A list of the word-specific information for each word in the segment.
repeated WordInfo words = 3;
// The language code of this segment as a
// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
// Example: "en-US".
string language_code = 4;
// For conversations derived from multi-channel audio, this is the channel
// number corresponding to the audio from that channel. For
// audioChannelCount = N, its output values can range from '1' to 'N'. A
// channel tag of 0 indicates that the audio is mono.
int32 channel_tag = 5;
// The participant of this segment.
ConversationParticipant segment_participant = 9;
// CCAI metadata relating to the current transcript segment.
DialogflowSegmentMetadata dialogflow_segment_metadata = 10;
// The sentiment for this transcript segment.
SentimentData sentiment = 11;
}
// A list of sequential transcript segments that comprise the conversation.
repeated TranscriptSegment transcript_segments = 1;
}
// Possible media for the conversation.
enum Medium {
// Default value, if unspecified will default to PHONE_CALL.
MEDIUM_UNSPECIFIED = 0;
// The format for conversations that took place over the phone.
PHONE_CALL = 1;
// The format for conversations that took place over chat.
CHAT = 2;
}
// Metadata that applies to the conversation.
oneof metadata {
// Call-specific metadata.
CallMetadata call_metadata = 7;
}
// A time to live expiration setting, can be either a specified timestamp or a
// duration from the time that the conversation creation request was received.
// Conversations with an expiration set will be removed up to 24 hours after
// the specified time.
oneof expiration {
// The time at which this conversation should expire. After this time, the
// conversation data and any associated analyses will be deleted.
google.protobuf.Timestamp expire_time = 15;
// Input only. The TTL for this resource. If specified, then this TTL will
// be used to calculate the expire time.
google.protobuf.Duration ttl = 16 [(google.api.field_behavior) = INPUT_ONLY];
}
// Immutable. The resource name of the conversation.
// Format:
// projects/{project}/locations/{location}/conversations/{conversation}
string name = 1 [(google.api.field_behavior) = IMMUTABLE];
// The source of the audio and transcription for the conversation.
ConversationDataSource data_source = 2;
// Output only. The time at which the conversation was created.
google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The most recent time at which the conversation was updated.
google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
// The time at which the conversation started.
google.protobuf.Timestamp start_time = 17;
// A user-specified language code for the conversation.
string language_code = 14;
// An opaque, user-specified string representing the human agent who handled
// the conversation.
string agent_id = 5;
// A map for the user to specify any custom fields. A maximum of 20 labels per
// conversation is allowed, with a maximum of 256 characters per entry.
map<string, string> labels = 6;
// Output only. The conversation transcript.
Transcript transcript = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
// Immutable. The conversation medium, if unspecified will default to PHONE_CALL.
Medium medium = 9 [(google.api.field_behavior) = IMMUTABLE];
// Output only. The duration of the conversation.
google.protobuf.Duration duration = 10 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The number of turns in the conversation.
int32 turn_count = 11 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The conversation's latest analysis, if one exists.
Analysis latest_analysis = 12 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The annotations that were generated during the customer and agent
// interaction.
repeated RuntimeAnnotation runtime_annotations = 13 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. All the matched Dialogflow intents in the call. The key corresponds to a
// Dialogflow intent, format:
// projects/{project}/agent/{agent}/intents/{intent}
map<string, DialogflowIntent> dialogflow_intents = 18 [(google.api.field_behavior) = OUTPUT_ONLY];
// Obfuscated user ID which the customer sent to us.
string obfuscated_user_id = 21;
}
// The analysis resource.
message Analysis {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/Analysis"
pattern: "projects/{project}/locations/{location}/conversations/{conversation}/analyses/{analysis}"
};
// Immutable. The resource name of the analysis.
// Format:
// projects/{project}/locations/{location}/conversations/{conversation}/analyses/{analysis}
string name = 1 [(google.api.field_behavior) = IMMUTABLE];
// Output only. The time at which the analysis was requested.
google.protobuf.Timestamp request_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The time at which the analysis was created, which occurs when the
// long-running operation completes.
google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The result of the analysis, which is populated when the analysis
// finishes.
AnalysisResult analysis_result = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
}
// The conversation source, which is a combination of transcript and audio.
message ConversationDataSource {
// The source of the conversation.
oneof source {
// A Cloud Storage location specification for the audio and transcript.
GcsSource gcs_source = 1;
// The source when the conversation comes from Dialogflow.
DialogflowSource dialogflow_source = 3;
}
}
// A Cloud Storage source of conversation data.
message GcsSource {
// Cloud Storage URI that points to a file that contains the conversation
// audio.
string audio_uri = 1;
// Immutable. Cloud Storage URI that points to a file that contains the conversation
// transcript.
string transcript_uri = 2 [(google.api.field_behavior) = IMMUTABLE];
}
// A Dialogflow source of conversation data.
message DialogflowSource {
// Output only. The name of the Dialogflow conversation that this conversation
// resource is derived from. Format:
// projects/{project}/locations/{location}/conversations/{conversation}
string dialogflow_conversation = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
// Cloud Storage URI that points to a file that contains the conversation
// audio.
string audio_uri = 3;
}
// The result of an analysis.
message AnalysisResult {
// Call-specific metadata created during analysis.
message CallAnalysisMetadata {
// A list of call annotations that apply to this call.
repeated CallAnnotation annotations = 2;
// All the entities in the call.
map<string, Entity> entities = 3;
// Overall conversation-level sentiment for each channel of the call.
repeated ConversationLevelSentiment sentiments = 4;
// All the matched intents in the call.
map<string, Intent> intents = 6;
// All the matched phrase matchers in the call.
map<string, PhraseMatchData> phrase_matchers = 7;
// Overall conversation-level issue modeling result.
IssueModelResult issue_model_result = 8;
}
// Metadata discovered during analysis.
oneof metadata {
// Call-specific metadata created by the analysis.
CallAnalysisMetadata call_analysis_metadata = 2;
}
// The time at which the analysis ended.
google.protobuf.Timestamp end_time = 1;
}
// Issue Modeling result on a conversation.
message IssueModelResult {
// Issue model that generates the result.
// Format: projects/{project}/locations/{location}/issueModels/{issue_model}
string issue_model = 1 [(google.api.resource_reference) = {
type: "contactcenterinsights.googleapis.com/IssueModel"
}];
// All the matched issues.
repeated IssueAssignment issues = 2;
}
// One channel of conversation-level sentiment data.
message ConversationLevelSentiment {
// The channel of the audio that the data applies to.
int32 channel_tag = 1;
// Data specifying sentiment.
SentimentData sentiment_data = 2;
}
// Information about the issue.
message IssueAssignment {
// Resource name of the assigned issue.
string issue = 1;
// Score indicating the likelihood of the issue assignment.
// currently bounded on [0,1].
double score = 2;
// Immutable. Display name of the assigned issue. This field is set at time of analyis
// and immutable since then.
string display_name = 3 [(google.api.field_behavior) = IMMUTABLE];
}
// A piece of metadata that applies to a window of a call.
message CallAnnotation {
// The data in the annotation.
oneof data {
// Data specifying an interruption.
InterruptionData interruption_data = 10;
// Data specifying sentiment.
SentimentData sentiment_data = 11;
// Data specifying silence.
SilenceData silence_data = 12;
// Data specifying a hold.
HoldData hold_data = 13;
// Data specifying an entity mention.
EntityMentionData entity_mention_data = 15;
// Data specifying an intent match.
IntentMatchData intent_match_data = 16;
// Data specifying a phrase match.
PhraseMatchData phrase_match_data = 17;
}
// The channel of the audio where the annotation occurs. For single-channel
// audio, this field is not populated.
int32 channel_tag = 1;
// The boundary in the conversation where the annotation starts, inclusive.
AnnotationBoundary annotation_start_boundary = 4;
// The boundary in the conversation where the annotation ends, inclusive.
AnnotationBoundary annotation_end_boundary = 5;
}
// A point in a conversation that marks the start or the end of an annotation.
message AnnotationBoundary {
// A detailed boundary, which describes a more specific point.
oneof detailed_boundary {
// The word index of this boundary with respect to the first word in the
// transcript piece. This index starts at zero.
int32 word_index = 3;
}
// The index in the sequence of transcribed pieces of the conversation where
// the boundary is located. This index starts at zero.
int32 transcript_index = 1;
}
// The data for an entity annotation.
// Represents a phrase in the conversation that is a known entity, such
// as a person, an organization, or location.
message Entity {
// The type of the entity. For most entity types, the associated metadata is a
// Wikipedia URL (`wikipedia_url`) and Knowledge Graph MID (`mid`). The table
// below lists the associated fields for entities that have different
// metadata.
enum Type {
// Unspecified.
TYPE_UNSPECIFIED = 0;
// Person.
PERSON = 1;
// Location.
LOCATION = 2;
// Organization.
ORGANIZATION = 3;
// Event.
EVENT = 4;
// Artwork.
WORK_OF_ART = 5;
// Consumer product.
CONSUMER_GOOD = 6;
// Other types of entities.
OTHER = 7;
// Phone number.
//
// The metadata lists the phone number (formatted according to local
// convention), plus whichever additional elements appear in the text:
//
// * `number` - The actual number, broken down into sections according to
// local convention.
// * `national_prefix` - Country code, if detected.
// * `area_code` - Region or area code, if detected.
// * `extension` - Phone extension (to be dialed after connection), if
// detected.
PHONE_NUMBER = 9;
// Address.
//
// The metadata identifies the street number and locality plus whichever
// additional elements appear in the text:
//
// * `street_number` - Street number.
// * `locality` - City or town.
// * `street_name` - Street/route name, if detected.
// * `postal_code` - Postal code, if detected.
// * `country` - Country, if detected.
// * `broad_region` - Administrative area, such as the state, if detected.
// * `narrow_region` - Smaller administrative area, such as county, if
// detected.
// * `sublocality` - Used in Asian addresses to demark a district within a
// city, if detected.
ADDRESS = 10;
// Date.
//
// The metadata identifies the components of the date:
//
// * `year` - Four digit year, if detected.
// * `month` - Two digit month number, if detected.
// * `day` - Two digit day number, if detected.
DATE = 11;
// Number.
//
// The metadata is the number itself.
NUMBER = 12;
// Price.
//
// The metadata identifies the `value` and `currency`.
PRICE = 13;
}
// The representative name for the entity.
string display_name = 1;
// The entity type.
Type type = 2;
// Metadata associated with the entity.
//
// For most entity types, the metadata is a Wikipedia URL (`wikipedia_url`)
// and Knowledge Graph MID (`mid`), if they are available. For the metadata
// associated with other entity types, see the Type table below.
map<string, string> metadata = 3;
// The salience score associated with the entity in the [0, 1.0] range.
//
// The salience score for an entity provides information about the
// importance or centrality of that entity to the entire document text.
// Scores closer to 0 are less salient, while scores closer to 1.0 are highly
// salient.
float salience = 4;
// The aggregate sentiment expressed for this entity in the conversation.
SentimentData sentiment = 5;
}
// The data for an intent.
// Represents a detected intent in the conversation, for example MAKES_PROMISE.
message Intent {
// The unique identifier of the intent.
string id = 1;
// The human-readable name of the intent.
string display_name = 2;
}
// The data for a matched phrase matcher.
// Represents information identifying a phrase matcher for a given match.
message PhraseMatchData {
// The unique identifier (the resource name) of the phrase matcher.
string phrase_matcher = 1;
// The human-readable name of the phrase matcher.
string display_name = 2;
}
// The data for a Dialogflow intent.
// Represents a detected intent in the conversation, e.g. MAKES_PROMISE.
message DialogflowIntent {
// The human-readable name of the intent.
string display_name = 1;
}
// The data for an interruption annotation.
message InterruptionData {
}
// The data for a silence annotation.
message SilenceData {
}
// The data for a hold annotation.
message HoldData {
}
// The data for an entity mention annotation.
// This represents a mention of an `Entity` in the conversation.
message EntityMentionData {
// The supported types of mentions.
enum MentionType {
// Unspecified.
MENTION_TYPE_UNSPECIFIED = 0;
// Proper noun.
PROPER = 1;
// Common noun (or noun compound).
COMMON = 2;
}
// The key of this entity in conversation entities.
// Can be used to retrieve the exact `Entity` this mention is attached to.
string entity_unique_id = 1;
// The type of the entity mention.
MentionType type = 2;
// Sentiment expressed for this mention of the entity.
SentimentData sentiment = 3;
}
// The data for an intent match.
// Represents an intent match for a text segment in the conversation. A text
// segment can be part of a sentence, a complete sentence, or an utterance
// with multiple sentences.
message IntentMatchData {
// The id of the matched intent.
// Can be used to retrieve the corresponding intent information.
string intent_unique_id = 1;
}
// The data for a sentiment annotation.
message SentimentData {
// A non-negative number from 0 to infinity which represents the abolute
// magnitude of sentiment regardless of score.
float magnitude = 1;
// The sentiment score between -1.0 (negative) and 1.0 (positive).
float score = 2;
}
// The issue model resource.
message IssueModel {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/IssueModel"
pattern: "projects/{project}/locations/{location}/issueModels/{issue_model}"
};
// Configs for the input data used to create the issue model.
message InputDataConfig {
// Medium of conversations used in training data. This field is being
// deprecated. To specify the medium to be used in training a new issue
// model, set the `medium` field on `filter`.
Conversation.Medium medium = 1 [deprecated = true];
// Output only. Number of conversations used in training. Output only.
int64 training_conversations_count = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
// A filter to reduce the conversations used for training the model to a
// specific subset.
string filter = 3;
}
// State of the model.
enum State {
// Unspecified.
STATE_UNSPECIFIED = 0;
// Model is not deployed but is ready to deploy.
UNDEPLOYED = 1;
// Model is being deployed.
DEPLOYING = 2;
// Model is deployed and is ready to be used. A model can only be used in
// analysis if it's in this state.
DEPLOYED = 3;
// Model is being undeployed.
UNDEPLOYING = 4;
// Model is being deleted.
DELETING = 5;
}
// Immutable. The resource name of the issue model.
// Format:
// projects/{project}/locations/{location}/issueModels/{issue_model}
string name = 1 [(google.api.field_behavior) = IMMUTABLE];
// The representative name for the issue model.
string display_name = 2;
// Output only. The time at which this issue model was created.
google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The most recent time at which the issue model was updated.
google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. State of the model.
State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
// Configs for the input data that used to create the issue model.
InputDataConfig input_data_config = 6;
// Output only. Immutable. The issue model's label statistics on its training data.
IssueModelLabelStats training_stats = 7 [
(google.api.field_behavior) = OUTPUT_ONLY,
(google.api.field_behavior) = IMMUTABLE
];
}
// The issue resource.
message Issue {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/Issue"
pattern: "projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}"
};
// Immutable. The resource name of the issue.
// Format:
// projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}
string name = 1 [(google.api.field_behavior) = IMMUTABLE];
// The representative name for the issue.
string display_name = 2;
// Output only. The time at which this issue was created.
google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The most recent time that this issue was updated.
google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
}
// Aggregated statistics about an issue model.
message IssueModelLabelStats {
// Aggregated statistics about an issue.
message IssueStats {
// Issue resource.
// Format:
// projects/{project}/locations/{location}/issueModels/{issue_model}/issues/{issue}
string issue = 1;
// Number of conversations attached to the issue at this point in time.
int64 labeled_conversations_count = 2;
// Display name of the issue.
string display_name = 3;
}
// Number of conversations the issue model has analyzed at this point in time.
int64 analyzed_conversations_count = 1;
// Number of analyzed conversations for which no issue was applicable at this
// point in time.
int64 unclassified_conversations_count = 2;
// Statistics on each issue. Key is the issue's resource name.
map<string, IssueStats> issue_stats = 3;
}
// The phrase matcher resource.
message PhraseMatcher {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/PhraseMatcher"
pattern: "projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher}"
};
// Specifies how to combine each phrase match rule group to determine whether
// there is a match.
enum PhraseMatcherType {
// Unspecified.
PHRASE_MATCHER_TYPE_UNSPECIFIED = 0;
// Must meet all phrase match rule groups or there is no match.
ALL_OF = 1;
// If any of the phrase match rule groups are met, there is a match.
ANY_OF = 2;
}
// The resource name of the phrase matcher.
// Format:
// projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher}
string name = 1;
// Output only. Immutable. The revision ID of the phrase matcher.
// A new revision is committed whenever the matcher is changed, except when it
// is activated or deactivated. A server generated random ID will be used.
// Example: locations/global/phraseMatchers/my-first-matcher@1234567
string revision_id = 2 [
(google.api.field_behavior) = IMMUTABLE,
(google.api.field_behavior) = OUTPUT_ONLY
];
// The customized version tag to use for the phrase matcher. If not specified,
// it will default to `revision_id`.
string version_tag = 3;
// Output only. The timestamp of when the revision was created. It is also the create time
// when a new matcher is added.
google.protobuf.Timestamp revision_create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
// The human-readable name of the phrase matcher.
string display_name = 5;
// Required. The type of this phrase matcher.
PhraseMatcherType type = 6 [(google.api.field_behavior) = REQUIRED];
// Applies the phrase matcher only when it is active.
bool active = 7;
// A list of phase match rule groups that are included in this matcher.
repeated PhraseMatchRuleGroup phrase_match_rule_groups = 8;
// Output only. The most recent time at which the activation status was updated.
google.protobuf.Timestamp activation_update_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY];
// The role whose utterances the phrase matcher should be matched
// against. If the role is ROLE_UNSPECIFIED it will be matched against any
// utterances in the transcript.
ConversationParticipant.Role role_match = 10;
// Output only. The most recent time at which the phrase matcher was updated.
google.protobuf.Timestamp update_time = 11 [(google.api.field_behavior) = OUTPUT_ONLY];
}
// A message representing a rule in the phrase matcher.
message PhraseMatchRuleGroup {
// Specifies how to combine each phrase match rule for whether there is a
// match.
enum PhraseMatchRuleGroupType {
// Unspecified.
PHRASE_MATCH_RULE_GROUP_TYPE_UNSPECIFIED = 0;
// Must meet all phrase match rules or there is no match.
ALL_OF = 1;
// If any of the phrase match rules are met, there is a match.
ANY_OF = 2;
}
// Required. The type of this phrase match rule group.
PhraseMatchRuleGroupType type = 1 [(google.api.field_behavior) = REQUIRED];
// A list of phase match rules that are included in this group.
repeated PhraseMatchRule phrase_match_rules = 2;
}
// The data for a phrase match rule.
message PhraseMatchRule {
// Required. The phrase to be matched.
string query = 1 [(google.api.field_behavior) = REQUIRED];
// Specifies whether the phrase must be missing from the transcript segment or
// present in the transcript segment.
bool negated = 2;
// Provides additional information about the rule that specifies how to apply
// the rule.
PhraseMatchRuleConfig config = 3;
}
// Configuration information of a phrase match rule.
message PhraseMatchRuleConfig {
// The configuration of the phrase match rule.
oneof config {
// The configuration for the exact match rule.
ExactMatchConfig exact_match_config = 1;
}
}
// Exact match configuration.
message ExactMatchConfig {
// Whether to consider case sensitivity when performing an exact match.
bool case_sensitive = 1;
}
// The settings resource.
message Settings {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/Settings"
pattern: "projects/{project}/locations/{location}/settings"
};
// Default configuration when creating Analyses in Insights.
message AnalysisConfig {
// Percentage of conversations created using Dialogflow runtime integration
// to analyze automatically, between [0, 100].
double runtime_integration_analysis_percentage = 1;
}
// Immutable. The resource name of the settings resource.
// Format:
// projects/{project}/locations/{location}/settings
string name = 1 [(google.api.field_behavior) = IMMUTABLE];
// Output only. The time at which the settings was created.
google.protobuf.Timestamp create_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The time at which the settings were last updated.
google.protobuf.Timestamp update_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// A language code to be applied to each transcript segment unless the segment
// already specifies a language code. Language code defaults to "en-US" if it
// is neither specified on the segment nor here.
string language_code = 4;
// The default TTL for newly-created conversations. If a conversation has a
// specified expiration, that value will be used instead. Changing this
// value will not change the expiration of existing conversations.
// Conversations with no expire time persist until they are deleted.
google.protobuf.Duration conversation_ttl = 5;
// A map that maps a notification trigger to a Pub/Sub topic. Each time a
// specified trigger occurs, Insights will notify the corresponding Pub/Sub
// topic.
//
// Keys are notification triggers. Supported keys are:
//
// * "all-triggers": Notify each time any of the supported triggers occurs.
// * "create-analysis": Notify each time an analysis is created.
// * "create-conversation": Notify each time a conversation is created.
// * "export-insights-data": Notify each time an export is complete.
// * "update-conversation": Notify each time a conversation is updated via
// UpdateConversation.
//
// Values are Pub/Sub topics. The format of each Pub/Sub topic is:
// projects/{project}/topics/{topic}
map<string, string> pubsub_notification_settings = 6;
// Default analysis settings.
AnalysisConfig analysis_config = 7;
}
// An annotation that was generated during the customer and agent interaction.
message RuntimeAnnotation {
// The data in the annotation.
oneof data {
// Agent Assist Article Suggestion data.
ArticleSuggestionData article_suggestion = 6;
// Agent Assist FAQ answer data.
FaqAnswerData faq_answer = 7;
// Agent Assist Smart Reply data.
SmartReplyData smart_reply = 8;
// Agent Assist Smart Compose suggestion data.
SmartComposeSuggestionData smart_compose_suggestion = 9;
// Dialogflow interaction data.
DialogflowInteractionData dialogflow_interaction = 10;
}
// The unique identifier of the annotation.
// Format:
// projects/{project}/locations/{location}/conversationDatasets/{dataset}/conversationDataItems/{data_item}/conversationAnnotations/{annotation}
string annotation_id = 1;
// The time at which this annotation was created.
google.protobuf.Timestamp create_time = 2;
// The boundary in the conversation where the annotation starts, inclusive.
AnnotationBoundary start_boundary = 3;
// The boundary in the conversation where the annotation ends, inclusive.
AnnotationBoundary end_boundary = 4;
// The feedback that the customer has about the answer in `data`.
AnswerFeedback answer_feedback = 5;
}
// The feedback that the customer has about a certain answer in the
// conversation.
message AnswerFeedback {
// The correctness level of an answer.
enum CorrectnessLevel {
// Correctness level unspecified.
CORRECTNESS_LEVEL_UNSPECIFIED = 0;
// Answer is totally wrong.
NOT_CORRECT = 1;
// Answer is partially correct.
PARTIALLY_CORRECT = 2;
// Answer is fully correct.
FULLY_CORRECT = 3;
}
// The correctness level of an answer.
CorrectnessLevel correctness_level = 1;
// Indicates whether an answer or item was clicked by the human agent.
bool clicked = 2;
// Indicates whether an answer or item was displayed to the human agent in the
// agent desktop UI.
bool displayed = 3;
}
// Agent Assist Article Suggestion data.
message ArticleSuggestionData {
// Article title.
string title = 1;
// Article URI.
string uri = 2;
// The system's confidence score that this article is a good match for this
// conversation, ranging from 0.0 (completely uncertain) to 1.0 (completely
// certain).
float confidence_score = 3;
// Map that contains metadata about the Article Suggestion and the document
// that it originates from.
map<string, string> metadata = 4;
// The name of the answer record.
// Format:
// projects/{project}/locations/{location}/answerRecords/{answer_record}
string query_record = 5;
// The knowledge document that this answer was extracted from.
// Format:
// projects/{project}/knowledgeBases/{knowledge_base}/documents/{document}
string source = 6;
}
// Agent Assist frequently-asked-question answer data.
message FaqAnswerData {
// The piece of text from the `source` knowledge base document.
string answer = 1;
// The system's confidence score that this answer is a good match for this
// conversation, ranging from 0.0 (completely uncertain) to 1.0 (completely
// certain).
float confidence_score = 2;
// The corresponding FAQ question.
string question = 3;
// Map that contains metadata about the FAQ answer and the document that
// it originates from.
map<string, string> metadata = 4;
// The name of the answer record.
// Format:
// projects/{project}/locations/{location}/answerRecords/{answer_record}
string query_record = 5;
// The knowledge document that this answer was extracted from.
// Format:
// projects/{project}/knowledgeBases/{knowledge_base}/documents/{document}.
string source = 6;
}
// Agent Assist Smart Reply data.
message SmartReplyData {
// The content of the reply.
string reply = 1;
// The system's confidence score that this reply is a good match for this
// conversation, ranging from 0.0 (completely uncertain) to 1.0 (completely
// certain).
double confidence_score = 2;
// Map that contains metadata about the Smart Reply and the document from
// which it originates.
map<string, string> metadata = 3;
// The name of the answer record.
// Format:
// projects/{project}/locations/{location}/answerRecords/{answer_record}
string query_record = 4;
}
// Agent Assist Smart Compose suggestion data.
message SmartComposeSuggestionData {
// The content of the suggestion.
string suggestion = 1;
// The system's confidence score that this suggestion is a good match for this
// conversation, ranging from 0.0 (completely uncertain) to 1.0 (completely
// certain).
double confidence_score = 2;
// Map that contains metadata about the Smart Compose suggestion and the
// document from which it originates.
map<string, string> metadata = 3;
// The name of the answer record.
// Format:
// projects/{project}/locations/{location}/answerRecords/{answer_record}
string query_record = 4;
}
// Dialogflow interaction data.
message DialogflowInteractionData {
// The Dialogflow intent resource path. Format:
// projects/{project}/agent/{agent}/intents/{intent}
string dialogflow_intent_id = 1;
// The confidence of the match ranging from 0.0 (completely uncertain) to 1.0
// (completely certain).
float confidence = 2;
}
// The call participant speaking for a given utterance.
message ConversationParticipant {
// The role of the participant.
enum Role {
// Participant's role is not set.
ROLE_UNSPECIFIED = 0;
// Participant is a human agent.
HUMAN_AGENT = 1;
// Participant is an automated agent.
AUTOMATED_AGENT = 2;
// Participant is an end user who conversed with the contact center.
END_USER = 3;
// Participant is either a human or automated agent.
ANY_AGENT = 4;
}
oneof participant {
// The name of the participant provided by Dialogflow. Format:
// projects/{project}/locations/{location}/conversations/{conversation}/participants/{participant}
string dialogflow_participant_name = 5 [(google.api.resource_reference) = {
type: "dialogflow.googleapis.com/Participant"
}];
// A user-specified ID representing the participant.
string user_id = 6;
}
// Deprecated. Use `dialogflow_participant_name` instead.
// The name of the Dialogflow participant. Format:
// projects/{project}/locations/{location}/conversations/{conversation}/participants/{participant}
string dialogflow_participant = 1 [deprecated = true];
// Obfuscated user ID from Dialogflow.
string obfuscated_external_user_id = 3;
// The role of the participant.
Role role = 2;
}
// The View resource.
message View {
option (google.api.resource) = {
type: "contactcenterinsights.googleapis.com/View"
pattern: "projects/{project}/locations/{location}/views/{view}"
};
// Immutable. The resource name of the view.
// Format:
// projects/{project}/locations/{location}/views/{view}
string name = 1 [(google.api.field_behavior) = IMMUTABLE];
// The human-readable display name of the view.
string display_name = 2;
// Output only. The time at which this view was created.
google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The most recent time at which the view was updated.
google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
// String with specific view properties.
string value = 5;
}
|