• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1<html><body>
2<style>
3
4body, h1, h2, h3, div, span, p, pre, a {
5  margin: 0;
6  padding: 0;
7  border: 0;
8  font-weight: inherit;
9  font-style: inherit;
10  font-size: 100%;
11  font-family: inherit;
12  vertical-align: baseline;
13}
14
15body {
16  font-size: 13px;
17  padding: 1em;
18}
19
20h1 {
21  font-size: 26px;
22  margin-bottom: 1em;
23}
24
25h2 {
26  font-size: 24px;
27  margin-bottom: 1em;
28}
29
30h3 {
31  font-size: 20px;
32  margin-bottom: 1em;
33  margin-top: 1em;
34}
35
36pre, code {
37  line-height: 1.5;
38  font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
39}
40
41pre {
42  margin-top: 0.5em;
43}
44
45h1, h2, h3, p {
46  font-family: Arial, sans serif;
47}
48
49h1, h2, h3 {
50  border-bottom: solid #CCC 1px;
51}
52
53.toc_element {
54  margin-top: 0.5em;
55}
56
57.firstline {
58  margin-left: 2 em;
59}
60
61.method  {
62  margin-top: 1em;
63  border: solid 1px #CCC;
64  padding: 1em;
65  background: #EEE;
66}
67
68.details {
69  font-weight: bold;
70  font-size: 14px;
71}
72
73</style>
74
75<h1><a href="bigquerydatatransfer_v1.html">BigQuery Data Transfer Service API</a> . <a href="bigquerydatatransfer_v1.projects.html">projects</a> . <a href="bigquerydatatransfer_v1.projects.transferConfigs.html">transferConfigs</a></h1>
76<h2>Instance Methods</h2>
77<p class="toc_element">
78  <code><a href="bigquerydatatransfer_v1.projects.transferConfigs.runs.html">runs()</a></code>
79</p>
80<p class="firstline">Returns the runs Resource.</p>
81
82<p class="toc_element">
83  <code><a href="#create">create(parent, body, authorizationCode=None, x__xgafv=None)</a></code></p>
84<p class="firstline">Creates a new data transfer configuration.</p>
85<p class="toc_element">
86  <code><a href="#delete">delete(name, x__xgafv=None)</a></code></p>
87<p class="firstline">Deletes a data transfer configuration,</p>
88<p class="toc_element">
89  <code><a href="#get">get(name, x__xgafv=None)</a></code></p>
90<p class="firstline">Returns information about a data transfer config.</p>
91<p class="toc_element">
92  <code><a href="#list">list(parent, pageSize=None, dataSourceIds=None, pageToken=None, x__xgafv=None)</a></code></p>
93<p class="firstline">Returns information about all data transfers in the project.</p>
94<p class="toc_element">
95  <code><a href="#list_next">list_next(previous_request, previous_response)</a></code></p>
96<p class="firstline">Retrieves the next page of results.</p>
97<p class="toc_element">
98  <code><a href="#patch">patch(name, body, authorizationCode=None, updateMask=None, x__xgafv=None)</a></code></p>
99<p class="firstline">Updates a data transfer configuration.</p>
100<p class="toc_element">
101  <code><a href="#scheduleRuns">scheduleRuns(parent, body, x__xgafv=None)</a></code></p>
102<p class="firstline">Creates transfer runs for a time range [range_start_time, range_end_time].</p>
103<h3>Method Details</h3>
104<div class="method">
105    <code class="details" id="create">create(parent, body, authorizationCode=None, x__xgafv=None)</code>
106  <pre>Creates a new data transfer configuration.
107
108Args:
109  parent: string, The BigQuery project id where the transfer configuration should be created. (required)
110  body: object, The request body. (required)
111    The object takes the form of:
112
113{ # Represents a data transfer configuration. A transfer configuration
114    # contains all metadata needed to perform a data transfer. For example,
115    # `destination_dataset_id` specifies where data should be stored.
116    # When a new transfer configuration is created, the specified
117    # `destination_dataset_id` is created when needed and shared with the
118    # appropriate data source service account.
119  "status": "A String", # Status of the most recently updated transfer run.
120      # @OutputOnly
121  "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
122      # For example, if `data_refresh_window_days = 10`, then every day
123      # BigQuery reingests data for [today-10, today-1], rather than ingesting data
124      # for just [today-1].
125      # Only valid if the data source supports the feature. Set the value to  0
126      # to use the default value.
127  "updateTime": "A String", # Data transfer modification time. Ignored by server on input.
128      # @OutputOnly
129  "destinationDatasetId": "A String", # The BigQuery target dataset id.
130  "displayName": "A String", # User specified display name for the data transfer.
131  "name": "A String", # The resource name of the transfer run.
132      # Transfer run names have the form
133      # `projects/{project_id}/transferConfigs/{config_id}`.
134      # Where `config_id` is usually a uuid, even though it is not
135      # guaranteed or required. The name is ignored when creating a transfer run.
136  "schedule": "A String", # Data transfer schedule in GROC format.
137      # If the data source does not support a custom schedule, this should be
138      # empty. If it is empty, the default value for the data source will be
139      # used.
140      # The specified times are in UTC.
141      # Examples of valid GROC include:
142      # `1st,3rd monday of month 15:30`,
143      # `every wed,fri of jan,jun 13:15`, and
144      # `first sunday of quarter 00:00`.
145  "userId": "A String", # GaiaID of the user on whose behalf transfer is done. Applicable only
146      # to data sources that do not support service accounts. When set to 0,
147      # the data source service account credentials are used.
148      # @OutputOnly
149  "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
150      # for a given transfer.
151  "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
152  "nextRunTime": "A String", # Next time when data transfer will run. Output only. Applicable
153      # only for batch data transfers.
154      # @OutputOnly
155  "params": { # Data transfer specific parameters.
156    "a_key": "", # Properties of the object.
157  },
158}
159
160  authorizationCode: string, Optional OAuth2 authorization code to use with this transfer configuration.
161This is required if new credentials are needed, as indicated by
162`CheckValidCreds`.
163In order to obtain authorization_code, please make a
164request to
165https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
166
167* client_id should be OAuth client_id of BigQuery DTS API for the given
168  data source returned by ListDataSources method.
169* data_source_scopes are the scopes returned by ListDataSources method.
170* redirect_uri is an optional parameter. If not specified, then
171  authorization code is posted to the opener of authorization flow window.
172  Otherwise it will be sent to the redirect uri. A special value of
173  urn:ietf:wg:oauth:2.0:oob means that authorization code should be
174  returned in the title bar of the browser, with the page text prompting
175  the user to copy the code and paste it in the application.
176  x__xgafv: string, V1 error format.
177    Allowed values
178      1 - v1 error format
179      2 - v2 error format
180
181Returns:
182  An object of the form:
183
184    { # Represents a data transfer configuration. A transfer configuration
185      # contains all metadata needed to perform a data transfer. For example,
186      # `destination_dataset_id` specifies where data should be stored.
187      # When a new transfer configuration is created, the specified
188      # `destination_dataset_id` is created when needed and shared with the
189      # appropriate data source service account.
190    "status": "A String", # Status of the most recently updated transfer run.
191        # @OutputOnly
192    "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
193        # For example, if `data_refresh_window_days = 10`, then every day
194        # BigQuery reingests data for [today-10, today-1], rather than ingesting data
195        # for just [today-1].
196        # Only valid if the data source supports the feature. Set the value to  0
197        # to use the default value.
198    "updateTime": "A String", # Data transfer modification time. Ignored by server on input.
199        # @OutputOnly
200    "destinationDatasetId": "A String", # The BigQuery target dataset id.
201    "displayName": "A String", # User specified display name for the data transfer.
202    "name": "A String", # The resource name of the transfer run.
203        # Transfer run names have the form
204        # `projects/{project_id}/transferConfigs/{config_id}`.
205        # Where `config_id` is usually a uuid, even though it is not
206        # guaranteed or required. The name is ignored when creating a transfer run.
207    "schedule": "A String", # Data transfer schedule in GROC format.
208        # If the data source does not support a custom schedule, this should be
209        # empty. If it is empty, the default value for the data source will be
210        # used.
211        # The specified times are in UTC.
212        # Examples of valid GROC include:
213        # `1st,3rd monday of month 15:30`,
214        # `every wed,fri of jan,jun 13:15`, and
215        # `first sunday of quarter 00:00`.
216    "userId": "A String", # GaiaID of the user on whose behalf transfer is done. Applicable only
217        # to data sources that do not support service accounts. When set to 0,
218        # the data source service account credentials are used.
219        # @OutputOnly
220    "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
221        # for a given transfer.
222    "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
223    "nextRunTime": "A String", # Next time when data transfer will run. Output only. Applicable
224        # only for batch data transfers.
225        # @OutputOnly
226    "params": { # Data transfer specific parameters.
227      "a_key": "", # Properties of the object.
228    },
229  }</pre>
230</div>
231
232<div class="method">
233    <code class="details" id="delete">delete(name, x__xgafv=None)</code>
234  <pre>Deletes a data transfer configuration,
235including any associated transfer runs and logs.
236
237Args:
238  name: string, The field will contain name of the resource requested, for example:
239`projects/{project_id}/transferConfigs/{config_id}` (required)
240  x__xgafv: string, V1 error format.
241    Allowed values
242      1 - v1 error format
243      2 - v2 error format
244
245Returns:
246  An object of the form:
247
248    { # A generic empty message that you can re-use to avoid defining duplicated
249      # empty messages in your APIs. A typical example is to use it as the request
250      # or the response type of an API method. For instance:
251      #
252      #     service Foo {
253      #       rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
254      #     }
255      #
256      # The JSON representation for `Empty` is empty JSON object `{}`.
257  }</pre>
258</div>
259
260<div class="method">
261    <code class="details" id="get">get(name, x__xgafv=None)</code>
262  <pre>Returns information about a data transfer config.
263
264Args:
265  name: string, The field will contain name of the resource requested, for example:
266`projects/{project_id}/transferConfigs/{config_id}` (required)
267  x__xgafv: string, V1 error format.
268    Allowed values
269      1 - v1 error format
270      2 - v2 error format
271
272Returns:
273  An object of the form:
274
275    { # Represents a data transfer configuration. A transfer configuration
276      # contains all metadata needed to perform a data transfer. For example,
277      # `destination_dataset_id` specifies where data should be stored.
278      # When a new transfer configuration is created, the specified
279      # `destination_dataset_id` is created when needed and shared with the
280      # appropriate data source service account.
281    "status": "A String", # Status of the most recently updated transfer run.
282        # @OutputOnly
283    "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
284        # For example, if `data_refresh_window_days = 10`, then every day
285        # BigQuery reingests data for [today-10, today-1], rather than ingesting data
286        # for just [today-1].
287        # Only valid if the data source supports the feature. Set the value to  0
288        # to use the default value.
289    "updateTime": "A String", # Data transfer modification time. Ignored by server on input.
290        # @OutputOnly
291    "destinationDatasetId": "A String", # The BigQuery target dataset id.
292    "displayName": "A String", # User specified display name for the data transfer.
293    "name": "A String", # The resource name of the transfer run.
294        # Transfer run names have the form
295        # `projects/{project_id}/transferConfigs/{config_id}`.
296        # Where `config_id` is usually a uuid, even though it is not
297        # guaranteed or required. The name is ignored when creating a transfer run.
298    "schedule": "A String", # Data transfer schedule in GROC format.
299        # If the data source does not support a custom schedule, this should be
300        # empty. If it is empty, the default value for the data source will be
301        # used.
302        # The specified times are in UTC.
303        # Examples of valid GROC include:
304        # `1st,3rd monday of month 15:30`,
305        # `every wed,fri of jan,jun 13:15`, and
306        # `first sunday of quarter 00:00`.
307    "userId": "A String", # GaiaID of the user on whose behalf transfer is done. Applicable only
308        # to data sources that do not support service accounts. When set to 0,
309        # the data source service account credentials are used.
310        # @OutputOnly
311    "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
312        # for a given transfer.
313    "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
314    "nextRunTime": "A String", # Next time when data transfer will run. Output only. Applicable
315        # only for batch data transfers.
316        # @OutputOnly
317    "params": { # Data transfer specific parameters.
318      "a_key": "", # Properties of the object.
319    },
320  }</pre>
321</div>
322
323<div class="method">
324    <code class="details" id="list">list(parent, pageSize=None, dataSourceIds=None, pageToken=None, x__xgafv=None)</code>
325  <pre>Returns information about all data transfers in the project.
326
327Args:
328  parent: string, The BigQuery project id for which data sources
329should be returned: `projects/{project_id}`. (required)
330  pageSize: integer, Page size. The default page size is the maximum value of 1000 results.
331  dataSourceIds: string, When specified, only configurations of requested data sources are returned. (repeated)
332  pageToken: string, Pagination token, which can be used to request a specific page
333of `ListTransfersRequest` list results. For multiple-page
334results, `ListTransfersResponse` outputs
335a `next_page` token, which can be used as the
336`page_token` value to request the next page of list results.
337  x__xgafv: string, V1 error format.
338    Allowed values
339      1 - v1 error format
340      2 - v2 error format
341
342Returns:
343  An object of the form:
344
345    { # The returned list of pipelines in the project.
346    "nextPageToken": "A String", # The next-pagination token. For multiple-page list results,
347        # this token can be used as the
348        # `ListTransferConfigsRequest.page_token`
349        # to request the next page of list results.
350        # @OutputOnly
351    "transferConfigs": [ # The stored pipeline transfer configurations.
352        # @OutputOnly
353      { # Represents a data transfer configuration. A transfer configuration
354          # contains all metadata needed to perform a data transfer. For example,
355          # `destination_dataset_id` specifies where data should be stored.
356          # When a new transfer configuration is created, the specified
357          # `destination_dataset_id` is created when needed and shared with the
358          # appropriate data source service account.
359        "status": "A String", # Status of the most recently updated transfer run.
360            # @OutputOnly
361        "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
362            # For example, if `data_refresh_window_days = 10`, then every day
363            # BigQuery reingests data for [today-10, today-1], rather than ingesting data
364            # for just [today-1].
365            # Only valid if the data source supports the feature. Set the value to  0
366            # to use the default value.
367        "updateTime": "A String", # Data transfer modification time. Ignored by server on input.
368            # @OutputOnly
369        "destinationDatasetId": "A String", # The BigQuery target dataset id.
370        "displayName": "A String", # User specified display name for the data transfer.
371        "name": "A String", # The resource name of the transfer run.
372            # Transfer run names have the form
373            # `projects/{project_id}/transferConfigs/{config_id}`.
374            # Where `config_id` is usually a uuid, even though it is not
375            # guaranteed or required. The name is ignored when creating a transfer run.
376        "schedule": "A String", # Data transfer schedule in GROC format.
377            # If the data source does not support a custom schedule, this should be
378            # empty. If it is empty, the default value for the data source will be
379            # used.
380            # The specified times are in UTC.
381            # Examples of valid GROC include:
382            # `1st,3rd monday of month 15:30`,
383            # `every wed,fri of jan,jun 13:15`, and
384            # `first sunday of quarter 00:00`.
385        "userId": "A String", # GaiaID of the user on whose behalf transfer is done. Applicable only
386            # to data sources that do not support service accounts. When set to 0,
387            # the data source service account credentials are used.
388            # @OutputOnly
389        "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
390            # for a given transfer.
391        "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
392        "nextRunTime": "A String", # Next time when data transfer will run. Output only. Applicable
393            # only for batch data transfers.
394            # @OutputOnly
395        "params": { # Data transfer specific parameters.
396          "a_key": "", # Properties of the object.
397        },
398      },
399    ],
400  }</pre>
401</div>
402
403<div class="method">
404    <code class="details" id="list_next">list_next(previous_request, previous_response)</code>
405  <pre>Retrieves the next page of results.
406
407Args:
408  previous_request: The request for the previous page. (required)
409  previous_response: The response from the request for the previous page. (required)
410
411Returns:
412  A request object that you can call 'execute()' on to request the next
413  page. Returns None if there are no more items in the collection.
414    </pre>
415</div>
416
417<div class="method">
418    <code class="details" id="patch">patch(name, body, authorizationCode=None, updateMask=None, x__xgafv=None)</code>
419  <pre>Updates a data transfer configuration.
420All fields must be set, even if they are not updated.
421
422Args:
423  name: string, The resource name of the transfer run.
424Transfer run names have the form
425`projects/{project_id}/transferConfigs/{config_id}`.
426Where `config_id` is usually a uuid, even though it is not
427guaranteed or required. The name is ignored when creating a transfer run. (required)
428  body: object, The request body. (required)
429    The object takes the form of:
430
431{ # Represents a data transfer configuration. A transfer configuration
432    # contains all metadata needed to perform a data transfer. For example,
433    # `destination_dataset_id` specifies where data should be stored.
434    # When a new transfer configuration is created, the specified
435    # `destination_dataset_id` is created when needed and shared with the
436    # appropriate data source service account.
437  "status": "A String", # Status of the most recently updated transfer run.
438      # @OutputOnly
439  "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
440      # For example, if `data_refresh_window_days = 10`, then every day
441      # BigQuery reingests data for [today-10, today-1], rather than ingesting data
442      # for just [today-1].
443      # Only valid if the data source supports the feature. Set the value to  0
444      # to use the default value.
445  "updateTime": "A String", # Data transfer modification time. Ignored by server on input.
446      # @OutputOnly
447  "destinationDatasetId": "A String", # The BigQuery target dataset id.
448  "displayName": "A String", # User specified display name for the data transfer.
449  "name": "A String", # The resource name of the transfer run.
450      # Transfer run names have the form
451      # `projects/{project_id}/transferConfigs/{config_id}`.
452      # Where `config_id` is usually a uuid, even though it is not
453      # guaranteed or required. The name is ignored when creating a transfer run.
454  "schedule": "A String", # Data transfer schedule in GROC format.
455      # If the data source does not support a custom schedule, this should be
456      # empty. If it is empty, the default value for the data source will be
457      # used.
458      # The specified times are in UTC.
459      # Examples of valid GROC include:
460      # `1st,3rd monday of month 15:30`,
461      # `every wed,fri of jan,jun 13:15`, and
462      # `first sunday of quarter 00:00`.
463  "userId": "A String", # GaiaID of the user on whose behalf transfer is done. Applicable only
464      # to data sources that do not support service accounts. When set to 0,
465      # the data source service account credentials are used.
466      # @OutputOnly
467  "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
468      # for a given transfer.
469  "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
470  "nextRunTime": "A String", # Next time when data transfer will run. Output only. Applicable
471      # only for batch data transfers.
472      # @OutputOnly
473  "params": { # Data transfer specific parameters.
474    "a_key": "", # Properties of the object.
475  },
476}
477
478  authorizationCode: string, Optional OAuth2 authorization code to use with this transfer configuration.
479If it is provided, the transfer configuration will be associated with the
480gaia id of the authorizing user.
481In order to obtain authorization_code, please make a
482request to
483https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
484
485* client_id should be OAuth client_id of BigQuery DTS API for the given
486  data source returned by ListDataSources method.
487* data_source_scopes are the scopes returned by ListDataSources method.
488* redirect_uri is an optional parameter. If not specified, then
489  authorization code is posted to the opener of authorization flow window.
490  Otherwise it will be sent to the redirect uri. A special value of
491  urn:ietf:wg:oauth:2.0:oob means that authorization code should be
492  returned in the title bar of the browser, with the page text prompting
493  the user to copy the code and paste it in the application.
494  updateMask: string, Required list of fields to be updated in this request.
495  x__xgafv: string, V1 error format.
496    Allowed values
497      1 - v1 error format
498      2 - v2 error format
499
500Returns:
501  An object of the form:
502
503    { # Represents a data transfer configuration. A transfer configuration
504      # contains all metadata needed to perform a data transfer. For example,
505      # `destination_dataset_id` specifies where data should be stored.
506      # When a new transfer configuration is created, the specified
507      # `destination_dataset_id` is created when needed and shared with the
508      # appropriate data source service account.
509    "status": "A String", # Status of the most recently updated transfer run.
510        # @OutputOnly
511    "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
512        # For example, if `data_refresh_window_days = 10`, then every day
513        # BigQuery reingests data for [today-10, today-1], rather than ingesting data
514        # for just [today-1].
515        # Only valid if the data source supports the feature. Set the value to  0
516        # to use the default value.
517    "updateTime": "A String", # Data transfer modification time. Ignored by server on input.
518        # @OutputOnly
519    "destinationDatasetId": "A String", # The BigQuery target dataset id.
520    "displayName": "A String", # User specified display name for the data transfer.
521    "name": "A String", # The resource name of the transfer run.
522        # Transfer run names have the form
523        # `projects/{project_id}/transferConfigs/{config_id}`.
524        # Where `config_id` is usually a uuid, even though it is not
525        # guaranteed or required. The name is ignored when creating a transfer run.
526    "schedule": "A String", # Data transfer schedule in GROC format.
527        # If the data source does not support a custom schedule, this should be
528        # empty. If it is empty, the default value for the data source will be
529        # used.
530        # The specified times are in UTC.
531        # Examples of valid GROC include:
532        # `1st,3rd monday of month 15:30`,
533        # `every wed,fri of jan,jun 13:15`, and
534        # `first sunday of quarter 00:00`.
535    "userId": "A String", # GaiaID of the user on whose behalf transfer is done. Applicable only
536        # to data sources that do not support service accounts. When set to 0,
537        # the data source service account credentials are used.
538        # @OutputOnly
539    "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
540        # for a given transfer.
541    "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
542    "nextRunTime": "A String", # Next time when data transfer will run. Output only. Applicable
543        # only for batch data transfers.
544        # @OutputOnly
545    "params": { # Data transfer specific parameters.
546      "a_key": "", # Properties of the object.
547    },
548  }</pre>
549</div>
550
551<div class="method">
552    <code class="details" id="scheduleRuns">scheduleRuns(parent, body, x__xgafv=None)</code>
553  <pre>Creates transfer runs for a time range [range_start_time, range_end_time].
554For each date - or whatever granularity the data source supports - in the
555range, one transfer run is created.
556Note that runs are created per UTC time in the time range.
557
558Args:
559  parent: string, Transfer configuration name in the form:
560`projects/{project_id}/transferConfigs/{config_id}`. (required)
561  body: object, The request body. (required)
562    The object takes the form of:
563
564{ # A request to schedule transfer runs for a time range.
565    "rangeStartTime": "A String", # Start time of the range of transfer runs.
566    "rangeEndTime": "A String", # End time of the range of transfer runs.
567  }
568
569  x__xgafv: string, V1 error format.
570    Allowed values
571      1 - v1 error format
572      2 - v2 error format
573
574Returns:
575  An object of the form:
576
577    { # A response to schedule transfer runs for a time range.
578    "createdRuns": [ # The transfer runs that were created.
579      { # Represents a data transfer run.
580        "status": "A String", # Data transfer run status. Ignored for input requests.
581            # @OutputOnly
582        "updateTime": "A String", # Last time the data transfer run status was updated.
583            # @OutputOnly
584        "destinationDatasetId": "A String", # The BigQuery target dataset id.
585        "name": "A String", # The resource name of the transfer run.
586            # Transfer run names have the form
587            # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`.
588            # The name is ignored when creating a transfer run.
589        "schedule": "A String", # Describes the schedule of this transfer run if it was created as part of
590            # a regular schedule. For batch transfer runs that are directly created,
591            # this is empty.
592            # NOTE: the system might choose to delay the schedule depending on the
593            # current load, so `schedule_time` doesn't always matches this.
594            # @OutputOnly
595        "scheduleTime": "A String", # Minimum time after which a transfer run can be started.
596        "userId": "A String", # The user id for this transfer run.
597            # @OutputOnly
598        "dataSourceId": "A String", # Data source id.
599            # @OutputOnly
600        "startTime": "A String", # Time when transfer run was started. Parameter ignored by server for input
601            # requests.
602            # @OutputOnly
603        "params": { # Data transfer specific parameters.
604          "a_key": "", # Properties of the object.
605        },
606        "runTime": "A String", # For batch transfer runs, specifies the date and time that
607            # data should be ingested.
608        "endTime": "A String", # Time when transfer run ended. Parameter ignored by server for input
609            # requests.
610            # @OutputOnly
611      },
612    ],
613  }</pre>
614</div>
615
616</body></html>