• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2021 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4//
5// **** DO NOT EDIT - this .proto was automatically generated. ****
6syntax = "proto3";
7
8package cast.media;
9
10import "google/protobuf/duration.proto";
11import "google/protobuf/empty.proto";
12
13option optimize_for = LITE_RUNTIME;
14
15enum PipelineState {
16  PIPELINE_STATE_UNINITIALIZED = 0;
17  PIPELINE_STATE_STOPPED = 1;
18  PIPELINE_STATE_PLAYING = 2;
19  PIPELINE_STATE_PAUSED = 3;
20}
21
22enum CastAudioDecoderMode {
23  // Both multiroom and audio rendering is enabled.
24  CAST_AUDIO_DECODER_MODE_ALL = 0;
25
26  // Only multiroom is enabled and audio rendering is disabled.  This should
27  // be used if the runtime is taking over responsibility for rendering audio.
28  CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY = 1;
29
30  // Only audio rendering is enabled and multiroom is disabled.
31  CAST_AUDIO_DECODER_MODE_AUDIO_ONLY = 2;
32}
33
34message AudioConfiguration {
35  enum AudioCodec {
36    AUDIO_CODEC_UNKNOWN = 0;
37    AUDIO_CODEC_AAC = 1;
38    AUDIO_CODEC_MP3 = 2;
39    AUDIO_CODEC_PCM = 3;
40    AUDIO_CODEC_PCM_S16BE = 4;
41    AUDIO_CODEC_VORBIS = 5;
42    AUDIO_CODEC_OPUS = 6;
43    AUDIO_CODEC_EAC3 = 7;
44    AUDIO_CODEC_AC3 = 8;
45    AUDIO_CODEC_DTS = 9;
46    AUDIO_CODEC_FLAC = 10;
47    AUDIO_CODEC_MPEG_H_AUDIO = 11;
48  }
49
50  enum ChannelLayout {
51    CHANNEL_LAYOUT_UNSUPPORTED = 0;
52
53    // Front C
54    CHANNEL_LAYOUT_MONO = 1;
55
56    // Front L, Front R
57    CHANNEL_LAYOUT_STEREO = 2;
58
59    // Front L, Front R, Front C, LFE, Side L, Side R
60    CHANNEL_LAYOUT_SURROUND_5_1 = 3;
61
62    // Actual channel layout is specified in the bitstream and the actual
63    // channel count is unknown at Chromium media pipeline level (useful for
64    // audio pass-through mode).
65    CHANNEL_LAYOUT_BITSTREAM = 4;
66
67    // Channels are not explicitly mapped to speakers.
68    CHANNEL_LAYOUT_DISCRETE = 5;
69  }
70
71  enum SampleFormat {
72    SAMPLE_FORMAT_UNKNOWN = 0;
73    SAMPLE_FORMAT_U8 = 1;          // Unsigned 8-bit w/ bias of 128.
74    SAMPLE_FORMAT_S16 = 2;         // Signed 16-bit.
75    SAMPLE_FORMAT_S32 = 3;         // Signed 32-bit.
76    SAMPLE_FORMAT_F32 = 4;         // Float 32-bit.
77    SAMPLE_FORMAT_PLANAR_S16 = 5;  // Signed 16-bit planar.
78    SAMPLE_FORMAT_PLANAR_F32 = 6;  // Float 32-bit planar.
79    SAMPLE_FORMAT_PLANAR_S32 = 7;  // Signed 32-bit planar.
80    SAMPLE_FORMAT_S24 = 8;         // Signed 24-bit.
81  }
82
83  // Audio codec.
84  AudioCodec codec = 1;
85
86  // Audio channel layout.
87  ChannelLayout channel_layout = 2;
88
89  // The format of each audio sample.
90  SampleFormat sample_format = 3;
91
92  // Number of bytes in each channel.
93  int64 bytes_per_channel = 4;
94
95  // Number of channels in this audio stream.
96  int32 channel_number = 5;
97
98  // Number of audio samples per second.
99  int64 samples_per_second = 6;
100
101  // Extra data buffer for certain codec initialization.
102  bytes extra_data = 7;
103}
104
105// The data buffer associated with a single frame of audio data.
106message AudioDecoderBuffer {
107  // The PTS of the frame in microseconds. This is a property of the audio frame
108  // and is used by the receiver to correctly order the audio frames and to
109  // determine when they should be decoded.
110  int64 pts_micros = 1;
111
112  // A single frame of audio data as a byte array.
113  bytes data = 2;
114
115  // Indicates if this is a special frame that indicates the end of the stream.
116  // If true, functions to access the frame content cannot be called.
117  bool end_of_stream = 3;
118
119  // Unique identifier.  This field should be greater than equal to 0 and
120  // incremented by one for each PushBuffeRequest.
121  int64 id = 4;
122}
123
124message MediaTime {
125  // The currents PTS that has been rendered.
126  int64 current_pts_micros = 1;
127
128  // The end of stream has been rendered.
129  bool end_of_stream = 2;
130
131  // Capture time with respect to CLOCK_MONOTONIC_RAW at which the delay
132  // measurement was taken.
133  google.protobuf.Duration capture_time = 3;
134}
135
136message TimestampInfo {
137  // System timestamp with respect to CLOCK_MONOTONIC_RAW at which the
138  // corresponding buffer is expected to be rendered.
139  google.protobuf.Duration system_timestamp = 1;
140
141  // AudioDecoderBuffer.id associated with the |system_timestamp|.
142  int64 buffer_id = 2;
143}
144
145message InitializeRequest {
146  // Cast session ID.
147  string cast_session_id = 1;
148
149  // Configures how the server should operate.
150  CastAudioDecoderMode mode = 2;
151}
152
153message GetMinimumBufferingDelayResponse {
154  // The minimum buffering delay in microseconds.
155  int64 delay_micros = 1;
156}
157
158message StartRequest {
159  // The start presentation timestamp in microseconds.
160  int64 pts_micros = 1;
161
162  // Timestamp information associated with the request.
163  // This field is optional and only used when this service is configured
164  // for CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY.
165  TimestampInfo timestamp_info = 2;
166}
167
168message StopRequest {}
169
170message PauseRequest {}
171
172message ResumeRequest {
173  // Timestamp information associated with the request.
174  // This field is optional and only used when this service is configured
175  // for CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY.
176  TimestampInfo resume_timestamp_info = 1;
177}
178
179message TimestampUpdateRequest {
180  TimestampInfo timestamp_info = 1;
181}
182
183message StateChangeRequest {
184  oneof request {
185    StartRequest start = 1;
186    StopRequest stop = 2;
187    PauseRequest pause = 3;
188    ResumeRequest resume = 4;
189    TimestampUpdateRequest timestamp_update = 5;
190  }
191}
192
193message StateChangeResponse {
194  // Pipeline state after state change.
195  PipelineState state = 1;
196}
197
198message PushBufferRequest {
199  AudioDecoderBuffer buffer = 1;
200
201  // Audio configuration for this buffer and all subsequent buffers. This
202  // field must be populated for the first request or if there is an audio
203  // configuration change.
204  AudioConfiguration audio_config = 2;
205}
206
207message PushBufferResponse {
208  // The total number of  decoded bytes.
209  int64 decoded_bytes = 1;
210}
211
212message SetVolumeRequest {
213  // The multiplier is in the range [0.0, 1.0].
214  float multiplier = 1;
215}
216message SetPlaybackRateRequest {
217  // Playback rate greater than 0.
218  double rate = 1;
219}
220
221message GetMediaTimeResponse {
222  // The current media time that has been rendered.
223  MediaTime media_time = 1;
224}
225
226// Cast audio service hosted by Cast Core.
227//
228// It defines a state machine with the following states:
229// - Uninitialized
230// - Playing
231// - Stopped
232// - Paused
233//
234// Note that the received ordering between different RPC calls is not
235// guaranteed to match the sent order.
236service CastRuntimeAudioChannel {
237  // Initializes the service and places the pipeline into the 'Stopped' state.
238  // This must be the first call received by the server, and no other calls
239  // may be sent prior to receiving this call's response.
240  rpc Initialize(InitializeRequest) returns (google.protobuf.Empty);
241
242  // Returns the minimum buffering delay (min_delay) required by Cast.  This is
243  // a constant value and only needs to be queried once for each service.
244  // During a StartRequest or ResumeRequest, the system timestamp must be
245  // greater than this delay and the current time in order for the buffer to be
246  // successfully rendered on remote devices.
247  rpc GetMinimumBufferDelay(google.protobuf.Empty)
248      returns (GetMinimumBufferingDelayResponse);
249
250  // Update the pipeline state.
251  //
252  // StartRequest:
253  //   Places pipeline into 'Playing' state. Playback will start at the
254  //   specified buffer and system timestamp.
255  //
256  //   May only be called in the 'Stopped' state, and following this call the
257  //   state machine will be in the 'Playing' state.
258  //
259  // StopRequest
260  //   Stops media playback and drops all pushed buffers which have not yet been
261  //   played.
262  //
263  //   May only be called in the 'Playing' or 'Paused' states, and following
264  //   this call the state machine will be in the 'Stopped' state.
265  //
266  // PauseRequest
267  //   Pauses media playback.
268  //
269  //   May only be called in the 'Playing' state, and following this call the
270  //   state machine will be in the 'Paused' state.
271  //
272  // ResumeRequest
273  //   Resumes media playback at the specified buffer and system timestamp.
274  //
275  //   May only be called in the 'Paused' state, and following this call the
276  //   state machine will be in the 'Playing'' state.
277  //
278  // TimestampUpdateRequest
279  //   Sends a timestamp update for a specified buffer for audio
280  //   synchronization. This should be called when operating in
281  //   CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY when the runtime has detected a
282  //   discrepancy in the system clock or pipeline delay from the original
283  //   playback schedule.  See example below:
284  //
285  //   Assume all buffers have duration of 100us.
286  //
287  //   StartRequest(id=1, system_timestamp=0);
288  //   -> Cast expects id=1 to play at 0, id=2 at 100us, id=3 at 200 us...
289  //
290  //   TimestampUpdateRequest(id=4, system_timestamp=405us);
291  //   -> Cast expects id=4 to play at 405, id=5 at 505us, id=6 at 605 us...
292  //
293  //   May be called from any state.
294  //
295  // A state transition may only occur after a successful PushBuffer()
296  // call has been made with a valid configuration.
297  rpc StateChange(StateChangeRequest) returns (StateChangeResponse);
298
299  // Sets the volume multiplier for this audio stream.
300  // The multiplier is in the range [0.0, 1.0].  If not called, a default
301  // multiplier of 1.0 is assumed.
302  //
303  // May be called in any state, and following this call the state machine
304  // will be in the same state.
305  rpc SetVolume(SetVolumeRequest) returns (google.protobuf.Empty);
306
307  // Sets the playback rate for this audio stream.
308  //
309  // May be called in any state, and following this call the state machine
310  // will be in the same state.
311  rpc SetPlayback(SetPlaybackRateRequest) returns (google.protobuf.Empty);
312
313  // Sends decoded bits and responses to the audio service. The client must
314  // wait for a response from the server before sending another
315  // PushBufferRequest.
316  //
317  // May only be called in the 'Playing' or 'Paused' states, and following
318  // this call the state machine will remain the same state.
319  //
320  // TODO(b/178523159): validate that this isn't a performance bottleneck as a
321  // non-streaming API. If it is, we should make this a bidirectional stream.
322  rpc PushBuffer(PushBufferRequest) returns (PushBufferResponse);
323
324  // Returns the current media time that has been rendered.
325  rpc GetMediaTime(google.protobuf.Empty) returns (GetMediaTimeResponse);
326}
327