-
Notifications
You must be signed in to change notification settings - Fork 22
/
data.proto
1223 lines (1021 loc) · 42.6 KB
/
data.proto
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// Copyright 2023 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.events.cloud.video.transcoder.v1;
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";
option csharp_namespace = "Google.Events.Protobuf.Cloud.Video.Transcoder.V1";
option php_namespace = "Google\\Events\\Cloud\\Video\\Transcoder\\V1";
option ruby_package = "Google::Events::Cloud::Video::Transcoder::V1";
// Transcoding job resource.
message Job {
// The current state of the job.
enum ProcessingState {
// The processing state is not specified.
PROCESSING_STATE_UNSPECIFIED = 0;
// The job is enqueued and will be picked up for processing soon.
PENDING = 1;
// The job is being processed.
RUNNING = 2;
// The job has been completed successfully.
SUCCEEDED = 3;
// The job has failed. For additional information, see `failure_reason` and
// `failure_details`
FAILED = 4;
}
// The processing mode of the job.
enum ProcessingMode {
// The job processing mode is not specified.
PROCESSING_MODE_UNSPECIFIED = 0;
// The job processing mode is interactive mode.
// Interactive job will either be ran or rejected if quota does not allow
// for it.
PROCESSING_MODE_INTERACTIVE = 1;
// The job processing mode is batch mode.
// Batch mode allows queuing of jobs.
PROCESSING_MODE_BATCH = 2;
}
// The resource name of the job.
// Format: `projects/{project_number}/locations/{location}/jobs/{job}`
string name = 1;
// Specify the `job_config` for the transcoding job. If you don't specify the
// `job_config`, the API selects `templateId`; this template ID is set to
// `preset/web-hd` by default. When you use a `template_id` to create a job,
// the `Job.config` is populated by the `JobTemplate.config`.<br>
oneof job_config {
// The configuration for this job.
JobConfig config = 5;
}
// Output only. The current state of the job.
ProcessingState state = 8;
// Output only. The time the job was created.
google.protobuf.Timestamp create_time = 12;
// Output only. The time the transcoding started.
google.protobuf.Timestamp start_time = 13;
// Output only. The time the transcoding finished.
google.protobuf.Timestamp end_time = 14;
// Job time to live value in days, which will be effective after job
// completion. Job should be deleted automatically after the given TTL. Enter
// a value between 1 and 90. The default is 30.
int32 ttl_after_completion_days = 15;
// The labels associated with this job. You can use these to organize and
// group your jobs.
map<string, string> labels = 16;
// Output only. An error object that describes the reason for the failure.
// This property is always present when `state` is `FAILED`.
google.rpc.Status error = 17;
// The processing mode of the job.
// The default is `PROCESSING_MODE_INTERACTIVE`.
ProcessingMode mode = 20;
}
// Transcoding job template resource.
message JobTemplate {
// The resource name of the job template.
// Format:
// `projects/{project_number}/locations/{location}/jobTemplates/{job_template}`
string name = 1;
// The configuration for this template.
JobConfig config = 2;
// The labels associated with this job template. You can use these to organize
// and group your job templates.
map<string, string> labels = 3;
}
// Job configuration
message JobConfig {
// List of input assets stored in Cloud Storage.
repeated Input inputs = 1;
// List of `Edit atom`s. Defines the ultimate timeline of the resulting
// file or manifest.
repeated EditAtom edit_list = 2;
// List of elementary streams.
repeated ElementaryStream elementary_streams = 3;
// List of multiplexing settings for output streams.
repeated MuxStream mux_streams = 4;
// List of output manifests.
repeated Manifest manifests = 5;
// Output configuration.
Output output = 6;
// List of ad breaks. Specifies where to insert ad break tags in the output
// manifests.
repeated AdBreak ad_breaks = 7;
// Destination on Pub/Sub.
PubsubDestination pubsub_destination = 8;
// List of output sprite sheets.
// Spritesheets require at least one VideoStream in the Jobconfig.
repeated SpriteSheet sprite_sheets = 9;
// List of overlays on the output video, in descending Z-order.
repeated Overlay overlays = 10;
}
// Input asset.
message Input {
// A unique key for this input. Must be specified when using advanced
// mapping and edit lists.
string key = 1;
// URI of the media. Input files must be at least 5 seconds in duration and
// stored in Cloud Storage (for example, `gs://bucket/inputs/file.mp4`).
// If empty, the value is populated from `Job.input_uri`. See
// [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
string uri = 2;
// Preprocessing configurations.
PreprocessingConfig preprocessing_config = 3;
}
// Location of output file(s) in a Cloud Storage bucket.
message Output {
// URI for the output file(s). For example, `gs://my-bucket/outputs/`.
// If empty, the value is populated from `Job.output_uri`. See
// [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
string uri = 1;
}
// Edit atom.
message EditAtom {
// A unique key for this atom. Must be specified when using advanced
// mapping.
string key = 1;
// List of `Input.key`s identifying files that should be used in this atom.
// The listed `inputs` must have the same timeline.
repeated string inputs = 2;
// End time in seconds for the atom, relative to the input file timeline.
// When `end_time_offset` is not specified, the `inputs` are used until
// the end of the atom.
google.protobuf.Duration end_time_offset = 3;
// Start time in seconds for the atom, relative to the input file timeline.
// The default is `0s`.
google.protobuf.Duration start_time_offset = 4;
}
// Ad break.
message AdBreak {
// Start time in seconds for the ad break, relative to the output file
// timeline. The default is `0s`.
google.protobuf.Duration start_time_offset = 1;
}
// Encoding of an input file such as an audio, video, or text track.
// Elementary streams must be packaged before
// mapping and sharing between different output formats.
message ElementaryStream {
// A unique key for this elementary stream.
string key = 4;
// Encoding of an audio, video, or text track.
oneof elementary_stream {
// Encoding of a video stream.
VideoStream video_stream = 1;
// Encoding of an audio stream.
AudioStream audio_stream = 2;
// Encoding of a text stream. For example, closed captions or subtitles.
TextStream text_stream = 3;
}
}
// Multiplexing settings for output stream.
message MuxStream {
// A unique key for this multiplexed stream. HLS media manifests will be
// named `MuxStream.key` with the `.m3u8` extension suffix.
string key = 1;
// The name of the generated file. The default is `MuxStream.key` with the
// extension suffix corresponding to the `MuxStream.container`.
//
// Individual segments also have an incremental 10-digit zero-padded suffix
// starting from 0 before the extension, such as `mux_stream0000000123.ts`.
string file_name = 2;
// The container format. The default is `mp4`
//
// Supported container formats:
//
// - `ts`
// - `fmp4`- the corresponding file extension is `.m4s`
// - `mp4`
// - `vtt`
//
// See also:
// [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats)
string container = 3;
// List of `ElementaryStream.key`s multiplexed in this stream.
repeated string elementary_streams = 4;
// Segment settings for `ts`, `fmp4` and `vtt`.
SegmentSettings segment_settings = 5;
}
// Manifest configuration.
message Manifest {
// The manifest type, which corresponds to the adaptive streaming format used.
enum ManifestType {
// The manifest type is not specified.
MANIFEST_TYPE_UNSPECIFIED = 0;
// Create an HLS manifest. The corresponding file extension is `.m3u8`.
HLS = 1;
// Create an MPEG-DASH manifest. The corresponding file extension is `.mpd`.
DASH = 2;
}
// The name of the generated file. The default is `manifest` with the
// extension suffix corresponding to the `Manifest.type`.
string file_name = 1;
// Required. Type of the manifest.
ManifestType type = 2;
// Required. List of user given `MuxStream.key`s that should appear in this
// manifest.
//
// When `Manifest.type` is `HLS`, a media manifest with name `MuxStream.key`
// and `.m3u8` extension is generated for each element of the
// `Manifest.mux_streams`.
repeated string mux_streams = 3;
}
// A Pub/Sub destination.
message PubsubDestination {
// The name of the Pub/Sub topic to publish job completion notification
// to. For example: `projects/{project}/topics/{topic}`.
string topic = 1;
}
// Sprite sheet configuration.
message SpriteSheet {
// Format type. The default is `jpeg`.
//
// Supported formats:
//
// - `jpeg`
string format = 1;
// Required. File name prefix for the generated sprite sheets.
//
// Each sprite sheet has an incremental 10-digit zero-padded suffix starting
// from 0 before the extension, such as `sprite_sheet0000000123.jpeg`.
string file_prefix = 2;
// Required. The width of sprite in pixels. Must be an even integer. To
// preserve the source aspect ratio, set the
// [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
// field or the
// [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
// field, but not both (the API will automatically calculate the missing
// field).
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 sprite_width_pixels = 3;
// Required. The height of sprite in pixels. Must be an even integer. To
// preserve the source aspect ratio, set the
// [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
// field or the
// [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
// field, but not both (the API will automatically calculate the missing
// field).
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 sprite_height_pixels = 4;
// The maximum number of sprites per row in a sprite sheet. The default is 0,
// which indicates no maximum limit.
int32 column_count = 5;
// The maximum number of rows per sprite sheet. When the sprite sheet is full,
// a new sprite sheet is created. The default is 0, which indicates no maximum
// limit.
int32 row_count = 6;
// Start time in seconds, relative to the output file timeline. Determines the
// first sprite to pick. The default is `0s`.
google.protobuf.Duration start_time_offset = 7;
// End time in seconds, relative to the output file timeline. When
// `end_time_offset` is not specified, the sprites are generated until the end
// of the output file.
google.protobuf.Duration end_time_offset = 8;
// Specify either total number of sprites or interval to create sprites.
oneof extraction_strategy {
// Total number of sprites. Create the specified number of sprites
// distributed evenly across the timeline of the output media. The default
// is 100.
int32 total_count = 9;
// Starting from `0s`, create sprites at regular intervals. Specify the
// interval value in seconds.
google.protobuf.Duration interval = 10;
}
// The quality of the generated sprite sheet. Enter a value between 1
// and 100, where 1 is the lowest quality and 100 is the highest quality.
// The default is 100. A high quality value corresponds to a low image data
// compression ratio.
int32 quality = 11;
}
// Overlay configuration.
message Overlay {
// 2D normalized coordinates. Default: `{0.0, 0.0}`
message NormalizedCoordinate {
// Normalized x coordinate.
double x = 1;
// Normalized y coordinate.
double y = 2;
}
// Overlaid image.
message Image {
// Required. URI of the image in Cloud Storage. For example,
// `gs://bucket/inputs/image.png`. Only PNG and JPEG images are supported.
string uri = 1;
// Normalized image resolution, based on output video resolution. Valid
// values: `0.0`–`1.0`. To respect the original image aspect ratio, set
// either `x` or `y` to `0.0`. To use the original image resolution, set
// both `x` and `y` to `0.0`.
NormalizedCoordinate resolution = 2;
// Target image opacity. Valid values are from `1.0` (solid, default) to
// `0.0` (transparent), exclusive. Set this to a value greater than `0.0`.
double alpha = 3;
}
// Display static overlay object.
message AnimationStatic {
// Normalized coordinates based on output video resolution. Valid
// values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay
// object. For example, use the x and y coordinates {0,0} to position the
// top-left corner of the overlay animation in the top-left corner of the
// output video.
NormalizedCoordinate xy = 1;
// The time to start displaying the overlay object, in seconds. Default: 0
google.protobuf.Duration start_time_offset = 2;
}
// Display overlay object with fade animation.
message AnimationFade {
// Required. Type of fade animation: `FADE_IN` or `FADE_OUT`.
FadeType fade_type = 1;
// Normalized coordinates based on output video resolution. Valid
// values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay
// object. For example, use the x and y coordinates {0,0} to position the
// top-left corner of the overlay animation in the top-left corner of the
// output video.
NormalizedCoordinate xy = 2;
// The time to start the fade animation, in seconds. Default: 0
google.protobuf.Duration start_time_offset = 3;
// The time to end the fade animation, in seconds. Default:
// `start_time_offset` + 1s
google.protobuf.Duration end_time_offset = 4;
}
// End previous overlay animation from the video. Without AnimationEnd, the
// overlay object will keep the state of previous animation until the end of
// the video.
message AnimationEnd {
// The time to end overlay object, in seconds. Default: 0
google.protobuf.Duration start_time_offset = 1;
}
// Animation types.
message Animation {
// Animations can be static or fade, or they can end the previous animation.
oneof animation_type {
// Display static overlay object.
AnimationStatic animation_static = 1;
// Display overlay object with fade animation.
AnimationFade animation_fade = 2;
// End previous animation.
AnimationEnd animation_end = 3;
}
}
// Fade type for the overlay: `FADE_IN` or `FADE_OUT`.
enum FadeType {
// The fade type is not specified.
FADE_TYPE_UNSPECIFIED = 0;
// Fade the overlay object into view.
FADE_IN = 1;
// Fade the overlay object out of view.
FADE_OUT = 2;
}
// Image overlay.
Image image = 1;
// List of Animations. The list should be chronological, without any time
// overlap.
repeated Animation animations = 2;
}
// Preprocessing configurations.
message PreprocessingConfig {
// Color preprocessing configuration.
//
// **Note:** This configuration is not supported.
message Color {
// Control color saturation of the video. Enter a value between -1 and 1,
// where -1 is fully desaturated and 1 is maximum saturation. 0 is no
// change. The default is 0.
double saturation = 1;
// Control black and white contrast of the video. Enter a value between -1
// and 1, where -1 is minimum contrast and 1 is maximum contrast. 0 is no
// change. The default is 0.
double contrast = 2;
// Control brightness of the video. Enter a value between -1 and 1, where -1
// is minimum brightness and 1 is maximum brightness. 0 is no change. The
// default is 0.
double brightness = 3;
}
// Denoise preprocessing configuration.
//
// **Note:** This configuration is not supported.
message Denoise {
// Set strength of the denoise. Enter a value between 0 and 1. The higher
// the value, the smoother the image. 0 is no denoising. The default is 0.
double strength = 1;
// Set the denoiser mode. The default is `standard`.
//
// Supported denoiser modes:
//
// - `standard`
// - `grain`
string tune = 2;
}
// Deblock preprocessing configuration.
//
// **Note:** This configuration is not supported.
message Deblock {
// Set strength of the deblocker. Enter a value between 0 and 1. The higher
// the value, the stronger the block removal. 0 is no deblocking. The
// default is 0.
double strength = 1;
// Enable deblocker. The default is `false`.
bool enabled = 2;
}
// Audio preprocessing configuration.
message Audio {
// Specify audio loudness normalization in loudness units relative to full
// scale (LUFS). Enter a value between -24 and 0 (the default), where:
//
// * -24 is the Advanced Television Systems Committee (ATSC A/85) standard
// * -23 is the EU R128 broadcast standard
// * -19 is the prior standard for online mono audio
// * -18 is the ReplayGain standard
// * -16 is the prior standard for stereo audio
// * -14 is the new online audio standard recommended by Spotify, as well
// as Amazon Echo
// * 0 disables normalization
double lufs = 1;
// Enable boosting high frequency components. The default is `false`.
//
// **Note:** This field is not supported.
bool high_boost = 2;
// Enable boosting low frequency components. The default is `false`.
//
// **Note:** This field is not supported.
bool low_boost = 3;
}
// Video cropping configuration for the input video. The cropped input video
// is scaled to match the output resolution.
message Crop {
// The number of pixels to crop from the top. The default is 0.
int32 top_pixels = 1;
// The number of pixels to crop from the bottom. The default is 0.
int32 bottom_pixels = 2;
// The number of pixels to crop from the left. The default is 0.
int32 left_pixels = 3;
// The number of pixels to crop from the right. The default is 0.
int32 right_pixels = 4;
}
// Pad filter configuration for the input video. The padded input video
// is scaled after padding with black to match the output resolution.
message Pad {
// The number of pixels to add to the top. The default is 0.
int32 top_pixels = 1;
// The number of pixels to add to the bottom. The default is 0.
int32 bottom_pixels = 2;
// The number of pixels to add to the left. The default is 0.
int32 left_pixels = 3;
// The number of pixels to add to the right. The default is 0.
int32 right_pixels = 4;
}
// Deinterlace configuration for input video.
message Deinterlace {
// Yet Another Deinterlacing Filter Configuration.
message YadifConfig {
// Specifies the deinterlacing mode to adopt.
// The default is `send_frame`.
// Supported values:
//
// - `send_frame`: Output one frame for each frame
// - `send_field`: Output one frame for each field
string mode = 1;
// Disable spacial interlacing.
// The default is `false`.
bool disable_spatial_interlacing = 2;
// The picture field parity assumed for the input interlaced video.
// The default is `auto`.
// Supported values:
//
// - `tff`: Assume the top field is first
// - `bff`: Assume the bottom field is first
// - `auto`: Enable automatic detection of field parity
string parity = 3;
// Deinterlace all frames rather than just the frames identified as
// interlaced. The default is `false`.
bool deinterlace_all_frames = 4;
}
// Bob Weaver Deinterlacing Filter Configuration.
message BwdifConfig {
// Specifies the deinterlacing mode to adopt.
// The default is `send_frame`.
// Supported values:
//
// - `send_frame`: Output one frame for each frame
// - `send_field`: Output one frame for each field
string mode = 1;
// The picture field parity assumed for the input interlaced video.
// The default is `auto`.
// Supported values:
//
// - `tff`: Assume the top field is first
// - `bff`: Assume the bottom field is first
// - `auto`: Enable automatic detection of field parity
string parity = 2;
// Deinterlace all frames rather than just the frames identified as
// interlaced. The default is `false`.
bool deinterlace_all_frames = 3;
}
// Specify the video deinterlacing filter. The default is `yadif`.
oneof deinterlacing_filter {
// Specifies the Yet Another Deinterlacing Filter Configuration.
YadifConfig yadif = 1;
// Specifies the Bob Weaver Deinterlacing Filter Configuration.
BwdifConfig bwdif = 2;
}
}
// Color preprocessing configuration.
Color color = 1;
// Denoise preprocessing configuration.
Denoise denoise = 2;
// Deblock preprocessing configuration.
Deblock deblock = 3;
// Audio preprocessing configuration.
Audio audio = 4;
// Specify the video cropping configuration.
Crop crop = 5;
// Specify the video pad filter configuration.
Pad pad = 6;
// Specify the video deinterlace configuration.
Deinterlace deinterlace = 7;
}
// Video stream resource.
message VideoStream {
// H264 codec settings.
message H264CodecSettings {
// The width of the video in pixels. Must be an even integer.
// When not specified, the width is adjusted to match the specified height
// and input aspect ratio. If both are omitted, the input width is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 width_pixels = 1;
// The height of the video in pixels. Must be an even integer.
// When not specified, the height is adjusted to match the specified width
// and input aspect ratio. If both are omitted, the input height is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 height_pixels = 2;
// Required. The target video frame rate in frames per second (FPS). Must be
// less than or equal to 120. Will default to the input frame rate if larger
// than the input frame rate. The API will generate an output FPS that is
// divisible by the input FPS, and smaller or equal to the target FPS. See
// [Calculating frame
// rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
// more information.
double frame_rate = 3;
// Required. The video bitrate in bits per second. The minimum value is
// 1,000. The maximum value is 800,000,000.
int32 bitrate_bps = 4;
// Pixel format to use. The default is `yuv420p`.
//
// Supported pixel formats:
//
// - `yuv420p` pixel format
// - `yuv422p` pixel format
// - `yuv444p` pixel format
// - `yuv420p10` 10-bit HDR pixel format
// - `yuv422p10` 10-bit HDR pixel format
// - `yuv444p10` 10-bit HDR pixel format
// - `yuv420p12` 12-bit HDR pixel format
// - `yuv422p12` 12-bit HDR pixel format
// - `yuv444p12` 12-bit HDR pixel format
string pixel_format = 5;
// Specify the `rate_control_mode`. The default is `vbr`.
//
// Supported rate control modes:
//
// - `vbr` - variable bitrate
// - `crf` - constant rate factor
string rate_control_mode = 6;
// Target CRF level. Must be between 10 and 36, where 10 is the highest
// quality and 36 is the most efficient compression. The default is 21.
int32 crf_level = 7;
// Specifies whether an open Group of Pictures (GOP) structure should be
// allowed or not. The default is `false`.
bool allow_open_gop = 8;
// GOP mode can be either by frame count or duration.
oneof gop_mode {
// Select the GOP size based on the specified frame count. Must be greater
// than zero.
int32 gop_frame_count = 9;
// Select the GOP size based on the specified duration. The default is
// `3s`. Note that `gopDuration` must be less than or equal to
// [`segmentDuration`](#SegmentSettings), and
// [`segmentDuration`](#SegmentSettings) must be divisible by
// `gopDuration`.
google.protobuf.Duration gop_duration = 10;
}
// Use two-pass encoding strategy to achieve better video quality.
// `VideoStream.rate_control_mode` must be `vbr`. The default is `false`.
bool enable_two_pass = 11;
// Size of the Video Buffering Verifier (VBV) buffer in bits. Must be
// greater than zero. The default is equal to `VideoStream.bitrate_bps`.
int32 vbv_size_bits = 12;
// Initial fullness of the Video Buffering Verifier (VBV) buffer in bits.
// Must be greater than zero. The default is equal to 90% of
// `VideoStream.vbv_size_bits`.
int32 vbv_fullness_bits = 13;
// The entropy coder to use. The default is `cabac`.
//
// Supported entropy coders:
//
// - `cavlc`
// - `cabac`
string entropy_coder = 14;
// Allow B-pyramid for reference frame selection. This may not be supported
// on all decoders. The default is `false`.
bool b_pyramid = 15;
// The number of consecutive B-frames. Must be greater than or equal to
// zero. Must be less than `VideoStream.gop_frame_count` if set. The default
// is 0.
int32 b_frame_count = 16;
// Specify the intensity of the adaptive quantizer (AQ). Must be between 0
// and 1, where 0 disables the quantizer and 1 maximizes the quantizer. A
// higher value equals a lower bitrate but smoother image. The default is 0.
double aq_strength = 17;
// Enforces the specified codec profile. The following profiles are
// supported:
//
// * `baseline`
// * `main`
// * `high` (default)
//
// The available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H264CodecSettings`
// message.
string profile = 18;
// Enforces the specified codec tune. The available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H264CodecSettings`
// message.
string tune = 19;
// Enforces the specified codec preset. The default is `veryfast`. The
// available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Preset).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H264CodecSettings`
// message.
string preset = 20;
}
// H265 codec settings.
message H265CodecSettings {
// The width of the video in pixels. Must be an even integer.
// When not specified, the width is adjusted to match the specified height
// and input aspect ratio. If both are omitted, the input width is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 width_pixels = 1;
// The height of the video in pixels. Must be an even integer.
// When not specified, the height is adjusted to match the specified width
// and input aspect ratio. If both are omitted, the input height is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 height_pixels = 2;
// Required. The target video frame rate in frames per second (FPS). Must be
// less than or equal to 120. Will default to the input frame rate if larger
// than the input frame rate. The API will generate an output FPS that is
// divisible by the input FPS, and smaller or equal to the target FPS. See
// [Calculating frame
// rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
// more information.
double frame_rate = 3;
// Required. The video bitrate in bits per second. The minimum value is
// 1,000. The maximum value is 800,000,000.
int32 bitrate_bps = 4;
// Pixel format to use. The default is `yuv420p`.
//
// Supported pixel formats:
//
// - `yuv420p` pixel format
// - `yuv422p` pixel format
// - `yuv444p` pixel format
// - `yuv420p10` 10-bit HDR pixel format
// - `yuv422p10` 10-bit HDR pixel format
// - `yuv444p10` 10-bit HDR pixel format
// - `yuv420p12` 12-bit HDR pixel format
// - `yuv422p12` 12-bit HDR pixel format
// - `yuv444p12` 12-bit HDR pixel format
string pixel_format = 5;
// Specify the `rate_control_mode`. The default is `vbr`.
//
// Supported rate control modes:
//
// - `vbr` - variable bitrate
// - `crf` - constant rate factor
string rate_control_mode = 6;
// Target CRF level. Must be between 10 and 36, where 10 is the highest
// quality and 36 is the most efficient compression. The default is 21.
int32 crf_level = 7;
// Specifies whether an open Group of Pictures (GOP) structure should be
// allowed or not. The default is `false`.
bool allow_open_gop = 8;
// GOP mode can be either by frame count or duration.
oneof gop_mode {
// Select the GOP size based on the specified frame count. Must be greater
// than zero.
int32 gop_frame_count = 9;
// Select the GOP size based on the specified duration. The default is
// `3s`. Note that `gopDuration` must be less than or equal to
// [`segmentDuration`](#SegmentSettings), and
// [`segmentDuration`](#SegmentSettings) must be divisible by
// `gopDuration`.
google.protobuf.Duration gop_duration = 10;
}
// Use two-pass encoding strategy to achieve better video quality.
// `VideoStream.rate_control_mode` must be `vbr`. The default is `false`.
bool enable_two_pass = 11;
// Size of the Video Buffering Verifier (VBV) buffer in bits. Must be
// greater than zero. The default is equal to `VideoStream.bitrate_bps`.
int32 vbv_size_bits = 12;
// Initial fullness of the Video Buffering Verifier (VBV) buffer in bits.
// Must be greater than zero. The default is equal to 90% of
// `VideoStream.vbv_size_bits`.
int32 vbv_fullness_bits = 13;
// Allow B-pyramid for reference frame selection. This may not be supported
// on all decoders. The default is `false`.
bool b_pyramid = 14;
// The number of consecutive B-frames. Must be greater than or equal to
// zero. Must be less than `VideoStream.gop_frame_count` if set. The default
// is 0.
int32 b_frame_count = 15;
// Specify the intensity of the adaptive quantizer (AQ). Must be between 0
// and 1, where 0 disables the quantizer and 1 maximizes the quantizer. A
// higher value equals a lower bitrate but smoother image. The default is 0.
double aq_strength = 16;
// Enforces the specified codec profile. The following profiles are
// supported:
//
// * 8-bit profiles
// * `main` (default)
// * `main-intra`
// * `mainstillpicture`
// * 10-bit profiles
// * `main10` (default)
// * `main10-intra`
// * `main422-10`
// * `main422-10-intra`
// * `main444-10`
// * `main444-10-intra`
// * 12-bit profiles
// * `main12` (default)
// * `main12-intra`
// * `main422-12`
// * `main422-12-intra`
// * `main444-12`
// * `main444-12-intra`
//
// The available options are
// [FFmpeg-compatible](https://x265.readthedocs.io/).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H265CodecSettings`
// message.
string profile = 17;
// Enforces the specified codec tune. The available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H265CodecSettings`
// message.
string tune = 18;
// Enforces the specified codec preset. The default is `veryfast`. The
// available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H265CodecSettings`
// message.
string preset = 19;
}
// VP9 codec settings.
message Vp9CodecSettings {
// The width of the video in pixels. Must be an even integer.
// When not specified, the width is adjusted to match the specified height
// and input aspect ratio. If both are omitted, the input width is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 width_pixels = 1;
// The height of the video in pixels. Must be an even integer.
// When not specified, the height is adjusted to match the specified width
// and input aspect ratio. If both are omitted, the input height is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 height_pixels = 2;