• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1<html><body>
2<style>
3
4body, h1, h2, h3, div, span, p, pre, a {
5  margin: 0;
6  padding: 0;
7  border: 0;
8  font-weight: inherit;
9  font-style: inherit;
10  font-size: 100%;
11  font-family: inherit;
12  vertical-align: baseline;
13}
14
15body {
16  font-size: 13px;
17  padding: 1em;
18}
19
20h1 {
21  font-size: 26px;
22  margin-bottom: 1em;
23}
24
25h2 {
26  font-size: 24px;
27  margin-bottom: 1em;
28}
29
30h3 {
31  font-size: 20px;
32  margin-bottom: 1em;
33  margin-top: 1em;
34}
35
36pre, code {
37  line-height: 1.5;
38  font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
39}
40
41pre {
42  margin-top: 0.5em;
43}
44
45h1, h2, h3, p {
46  font-family: Arial, sans serif;
47}
48
49h1, h2, h3 {
50  border-bottom: solid #CCC 1px;
51}
52
53.toc_element {
54  margin-top: 0.5em;
55}
56
57.firstline {
58  margin-left: 2 em;
59}
60
61.method  {
62  margin-top: 1em;
63  border: solid 1px #CCC;
64  padding: 1em;
65  background: #EEE;
66}
67
68.details {
69  font-weight: bold;
70  font-size: 14px;
71}
72
73</style>
74
75<h1><a href="bigquerydatatransfer_v1.html">BigQuery Data Transfer API</a> . <a href="bigquerydatatransfer_v1.projects.html">projects</a> . <a href="bigquerydatatransfer_v1.projects.locations.html">locations</a> . <a href="bigquerydatatransfer_v1.projects.locations.transferConfigs.html">transferConfigs</a></h1>
76<h2>Instance Methods</h2>
77<p class="toc_element">
78  <code><a href="bigquerydatatransfer_v1.projects.locations.transferConfigs.runs.html">runs()</a></code>
79</p>
80<p class="firstline">Returns the runs Resource.</p>
81
82<p class="toc_element">
83  <code><a href="#create">create(parent, body, authorizationCode=None, versionInfo=None, x__xgafv=None)</a></code></p>
84<p class="firstline">Creates a new data transfer configuration.</p>
85<p class="toc_element">
86  <code><a href="#delete">delete(name, x__xgafv=None)</a></code></p>
87<p class="firstline">Deletes a data transfer configuration,</p>
88<p class="toc_element">
89  <code><a href="#get">get(name, x__xgafv=None)</a></code></p>
90<p class="firstline">Returns information about a data transfer config.</p>
91<p class="toc_element">
92  <code><a href="#list">list(parent, pageSize=None, dataSourceIds=None, pageToken=None, x__xgafv=None)</a></code></p>
93<p class="firstline">Returns information about all data transfers in the project.</p>
94<p class="toc_element">
95  <code><a href="#list_next">list_next(previous_request, previous_response)</a></code></p>
96<p class="firstline">Retrieves the next page of results.</p>
97<p class="toc_element">
98  <code><a href="#patch">patch(name, body, authorizationCode=None, updateMask=None, versionInfo=None, x__xgafv=None)</a></code></p>
99<p class="firstline">Updates a data transfer configuration.</p>
100<p class="toc_element">
101  <code><a href="#scheduleRuns">scheduleRuns(parent, body, x__xgafv=None)</a></code></p>
102<p class="firstline">Creates transfer runs for a time range [start_time, end_time].</p>
103<p class="toc_element">
104  <code><a href="#startManualRuns">startManualRuns(parent, body, x__xgafv=None)</a></code></p>
105<p class="firstline">Start manual transfer runs to be executed now with schedule_time equal to</p>
106<h3>Method Details</h3>
107<div class="method">
108    <code class="details" id="create">create(parent, body, authorizationCode=None, versionInfo=None, x__xgafv=None)</code>
109  <pre>Creates a new data transfer configuration.
110
111Args:
112  parent: string, The BigQuery project id where the transfer configuration should be created.
113Must be in the format projects/{project_id}/locations/{location_id}
114If specified location and location of the destination bigquery dataset
115do not match - the request will fail. (required)
116  body: object, The request body. (required)
117    The object takes the form of:
118
119{ # Represents a data transfer configuration. A transfer configuration
120    # contains all metadata needed to perform a data transfer. For example,
121    # `destination_dataset_id` specifies where data should be stored.
122    # When a new transfer configuration is created, the specified
123    # `destination_dataset_id` is created when needed and shared with the
124    # appropriate data source service account.
125  "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
126      # For example, if `data_refresh_window_days = 10`, then every day
127      # BigQuery reingests data for [today-10, today-1], rather than ingesting data
128      # for just [today-1].
129      # Only valid if the data source supports the feature. Set the value to  0
130      # to use the default value.
131  "updateTime": "A String", # Output only. Data transfer modification time. Ignored by server on input.
132  "destinationDatasetId": "A String", # The BigQuery target dataset id.
133  "displayName": "A String", # User specified display name for the data transfer.
134  "name": "A String", # The resource name of the transfer config.
135      # Transfer config names have the form of
136      # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
137      # The name is automatically generated based on the config_id specified in
138      # CreateTransferConfigRequest along with project_id and region. If config_id
139      # is not provided, usually a uuid, even though it is not guaranteed or
140      # required, will be generated for config_id.
141  "schedule": "A String", # Data transfer schedule.
142      # If the data source does not support a custom schedule, this should be
143      # empty. If it is empty, the default value for the data source will be
144      # used.
145      # The specified times are in UTC.
146      # Examples of valid format:
147      # `1st,3rd monday of month 15:30`,
148      # `every wed,fri of jan,jun 13:15`, and
149      # `first sunday of quarter 00:00`.
150      # See more explanation about the format here:
151      # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
152      # NOTE: the granularity should be at least 8 hours, or less frequent.
153  "datasetRegion": "A String", # Output only. Region in which BigQuery dataset is located.
154  "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
155      # for a given transfer.
156  "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
157  "scheduleOptions": { # Options customizing the data transfer schedule. # Options customizing the data transfer schedule.
158    "disableAutoScheduling": True or False, # If true, automatic scheduling of data transfer runs for this configuration
159        # will be disabled. The runs can be started on ad-hoc basis using
160        # StartManualTransferRuns API. When automatic scheduling is disabled, the
161        # TransferConfig.schedule field will be ignored.
162    "endTime": "A String", # Defines time to stop scheduling transfer runs. A transfer run cannot be
163        # scheduled at or after the end time. The end time can be changed at any
164        # moment. The time when a data transfer can be trigerred manually is not
165        # limited by this option.
166    "startTime": "A String", # Specifies time to start scheduling transfer runs. The first run will be
167        # scheduled at or after the start time according to a recurrence pattern
168        # defined in the schedule string. The start time can be changed at any
169        # moment. The time when a data transfer can be trigerred manually is not
170        # limited by this option.
171  },
172  "state": "A String", # Output only. State of the most recently updated transfer run.
173  "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
174  "nextRunTime": "A String", # Output only. Next time when data transfer will run.
175  "params": { # Data transfer specific parameters.
176    "a_key": "", # Properties of the object.
177  },
178}
179
180  authorizationCode: string, Optional OAuth2 authorization code to use with this transfer configuration.
181This is required if new credentials are needed, as indicated by
182`CheckValidCreds`.
183In order to obtain authorization_code, please make a
184request to
185https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
186
187* client_id should be OAuth client_id of BigQuery DTS API for the given
188  data source returned by ListDataSources method.
189* data_source_scopes are the scopes returned by ListDataSources method.
190* redirect_uri is an optional parameter. If not specified, then
191  authorization code is posted to the opener of authorization flow window.
192  Otherwise it will be sent to the redirect uri. A special value of
193  urn:ietf:wg:oauth:2.0:oob means that authorization code should be
194  returned in the title bar of the browser, with the page text prompting
195  the user to copy the code and paste it in the application.
196  versionInfo: string, Optional version info. If users want to find a very recent access token,
197that is, immediately after approving access, users have to set the
198version_info claim in the token request. To obtain the version_info, users
199must use the “none+gsession” response type. which be return a
200version_info back in the authorization response which be be put in a JWT
201claim in the token request.
202  x__xgafv: string, V1 error format.
203    Allowed values
204      1 - v1 error format
205      2 - v2 error format
206
207Returns:
208  An object of the form:
209
210    { # Represents a data transfer configuration. A transfer configuration
211      # contains all metadata needed to perform a data transfer. For example,
212      # `destination_dataset_id` specifies where data should be stored.
213      # When a new transfer configuration is created, the specified
214      # `destination_dataset_id` is created when needed and shared with the
215      # appropriate data source service account.
216    "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
217        # For example, if `data_refresh_window_days = 10`, then every day
218        # BigQuery reingests data for [today-10, today-1], rather than ingesting data
219        # for just [today-1].
220        # Only valid if the data source supports the feature. Set the value to  0
221        # to use the default value.
222    "updateTime": "A String", # Output only. Data transfer modification time. Ignored by server on input.
223    "destinationDatasetId": "A String", # The BigQuery target dataset id.
224    "displayName": "A String", # User specified display name for the data transfer.
225    "name": "A String", # The resource name of the transfer config.
226        # Transfer config names have the form of
227        # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
228        # The name is automatically generated based on the config_id specified in
229        # CreateTransferConfigRequest along with project_id and region. If config_id
230        # is not provided, usually a uuid, even though it is not guaranteed or
231        # required, will be generated for config_id.
232    "schedule": "A String", # Data transfer schedule.
233        # If the data source does not support a custom schedule, this should be
234        # empty. If it is empty, the default value for the data source will be
235        # used.
236        # The specified times are in UTC.
237        # Examples of valid format:
238        # `1st,3rd monday of month 15:30`,
239        # `every wed,fri of jan,jun 13:15`, and
240        # `first sunday of quarter 00:00`.
241        # See more explanation about the format here:
242        # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
243        # NOTE: the granularity should be at least 8 hours, or less frequent.
244    "datasetRegion": "A String", # Output only. Region in which BigQuery dataset is located.
245    "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
246        # for a given transfer.
247    "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
248    "scheduleOptions": { # Options customizing the data transfer schedule. # Options customizing the data transfer schedule.
249      "disableAutoScheduling": True or False, # If true, automatic scheduling of data transfer runs for this configuration
250          # will be disabled. The runs can be started on ad-hoc basis using
251          # StartManualTransferRuns API. When automatic scheduling is disabled, the
252          # TransferConfig.schedule field will be ignored.
253      "endTime": "A String", # Defines time to stop scheduling transfer runs. A transfer run cannot be
254          # scheduled at or after the end time. The end time can be changed at any
255          # moment. The time when a data transfer can be trigerred manually is not
256          # limited by this option.
257      "startTime": "A String", # Specifies time to start scheduling transfer runs. The first run will be
258          # scheduled at or after the start time according to a recurrence pattern
259          # defined in the schedule string. The start time can be changed at any
260          # moment. The time when a data transfer can be trigerred manually is not
261          # limited by this option.
262    },
263    "state": "A String", # Output only. State of the most recently updated transfer run.
264    "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
265    "nextRunTime": "A String", # Output only. Next time when data transfer will run.
266    "params": { # Data transfer specific parameters.
267      "a_key": "", # Properties of the object.
268    },
269  }</pre>
270</div>
271
272<div class="method">
273    <code class="details" id="delete">delete(name, x__xgafv=None)</code>
274  <pre>Deletes a data transfer configuration,
275including any associated transfer runs and logs.
276
277Args:
278  name: string, The field will contain name of the resource requested, for example:
279`projects/{project_id}/transferConfigs/{config_id}` (required)
280  x__xgafv: string, V1 error format.
281    Allowed values
282      1 - v1 error format
283      2 - v2 error format
284
285Returns:
286  An object of the form:
287
288    { # A generic empty message that you can re-use to avoid defining duplicated
289      # empty messages in your APIs. A typical example is to use it as the request
290      # or the response type of an API method. For instance:
291      #
292      #     service Foo {
293      #       rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
294      #     }
295      #
296      # The JSON representation for `Empty` is empty JSON object `{}`.
297  }</pre>
298</div>
299
300<div class="method">
301    <code class="details" id="get">get(name, x__xgafv=None)</code>
302  <pre>Returns information about a data transfer config.
303
304Args:
305  name: string, The field will contain name of the resource requested, for example:
306`projects/{project_id}/transferConfigs/{config_id}` (required)
307  x__xgafv: string, V1 error format.
308    Allowed values
309      1 - v1 error format
310      2 - v2 error format
311
312Returns:
313  An object of the form:
314
315    { # Represents a data transfer configuration. A transfer configuration
316      # contains all metadata needed to perform a data transfer. For example,
317      # `destination_dataset_id` specifies where data should be stored.
318      # When a new transfer configuration is created, the specified
319      # `destination_dataset_id` is created when needed and shared with the
320      # appropriate data source service account.
321    "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
322        # For example, if `data_refresh_window_days = 10`, then every day
323        # BigQuery reingests data for [today-10, today-1], rather than ingesting data
324        # for just [today-1].
325        # Only valid if the data source supports the feature. Set the value to  0
326        # to use the default value.
327    "updateTime": "A String", # Output only. Data transfer modification time. Ignored by server on input.
328    "destinationDatasetId": "A String", # The BigQuery target dataset id.
329    "displayName": "A String", # User specified display name for the data transfer.
330    "name": "A String", # The resource name of the transfer config.
331        # Transfer config names have the form of
332        # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
333        # The name is automatically generated based on the config_id specified in
334        # CreateTransferConfigRequest along with project_id and region. If config_id
335        # is not provided, usually a uuid, even though it is not guaranteed or
336        # required, will be generated for config_id.
337    "schedule": "A String", # Data transfer schedule.
338        # If the data source does not support a custom schedule, this should be
339        # empty. If it is empty, the default value for the data source will be
340        # used.
341        # The specified times are in UTC.
342        # Examples of valid format:
343        # `1st,3rd monday of month 15:30`,
344        # `every wed,fri of jan,jun 13:15`, and
345        # `first sunday of quarter 00:00`.
346        # See more explanation about the format here:
347        # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
348        # NOTE: the granularity should be at least 8 hours, or less frequent.
349    "datasetRegion": "A String", # Output only. Region in which BigQuery dataset is located.
350    "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
351        # for a given transfer.
352    "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
353    "scheduleOptions": { # Options customizing the data transfer schedule. # Options customizing the data transfer schedule.
354      "disableAutoScheduling": True or False, # If true, automatic scheduling of data transfer runs for this configuration
355          # will be disabled. The runs can be started on ad-hoc basis using
356          # StartManualTransferRuns API. When automatic scheduling is disabled, the
357          # TransferConfig.schedule field will be ignored.
358      "endTime": "A String", # Defines time to stop scheduling transfer runs. A transfer run cannot be
359          # scheduled at or after the end time. The end time can be changed at any
360          # moment. The time when a data transfer can be trigerred manually is not
361          # limited by this option.
362      "startTime": "A String", # Specifies time to start scheduling transfer runs. The first run will be
363          # scheduled at or after the start time according to a recurrence pattern
364          # defined in the schedule string. The start time can be changed at any
365          # moment. The time when a data transfer can be trigerred manually is not
366          # limited by this option.
367    },
368    "state": "A String", # Output only. State of the most recently updated transfer run.
369    "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
370    "nextRunTime": "A String", # Output only. Next time when data transfer will run.
371    "params": { # Data transfer specific parameters.
372      "a_key": "", # Properties of the object.
373    },
374  }</pre>
375</div>
376
377<div class="method">
378    <code class="details" id="list">list(parent, pageSize=None, dataSourceIds=None, pageToken=None, x__xgafv=None)</code>
379  <pre>Returns information about all data transfers in the project.
380
381Args:
382  parent: string, The BigQuery project id for which data sources
383should be returned: `projects/{project_id}`. (required)
384  pageSize: integer, Page size. The default page size is the maximum value of 1000 results.
385  dataSourceIds: string, When specified, only configurations of requested data sources are returned. (repeated)
386  pageToken: string, Pagination token, which can be used to request a specific page
387of `ListTransfersRequest` list results. For multiple-page
388results, `ListTransfersResponse` outputs
389a `next_page` token, which can be used as the
390`page_token` value to request the next page of list results.
391  x__xgafv: string, V1 error format.
392    Allowed values
393      1 - v1 error format
394      2 - v2 error format
395
396Returns:
397  An object of the form:
398
399    { # The returned list of pipelines in the project.
400    "nextPageToken": "A String", # Output only. The next-pagination token. For multiple-page list results,
401        # this token can be used as the
402        # `ListTransferConfigsRequest.page_token`
403        # to request the next page of list results.
404    "transferConfigs": [ # Output only. The stored pipeline transfer configurations.
405      { # Represents a data transfer configuration. A transfer configuration
406          # contains all metadata needed to perform a data transfer. For example,
407          # `destination_dataset_id` specifies where data should be stored.
408          # When a new transfer configuration is created, the specified
409          # `destination_dataset_id` is created when needed and shared with the
410          # appropriate data source service account.
411        "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
412            # For example, if `data_refresh_window_days = 10`, then every day
413            # BigQuery reingests data for [today-10, today-1], rather than ingesting data
414            # for just [today-1].
415            # Only valid if the data source supports the feature. Set the value to  0
416            # to use the default value.
417        "updateTime": "A String", # Output only. Data transfer modification time. Ignored by server on input.
418        "destinationDatasetId": "A String", # The BigQuery target dataset id.
419        "displayName": "A String", # User specified display name for the data transfer.
420        "name": "A String", # The resource name of the transfer config.
421            # Transfer config names have the form of
422            # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
423            # The name is automatically generated based on the config_id specified in
424            # CreateTransferConfigRequest along with project_id and region. If config_id
425            # is not provided, usually a uuid, even though it is not guaranteed or
426            # required, will be generated for config_id.
427        "schedule": "A String", # Data transfer schedule.
428            # If the data source does not support a custom schedule, this should be
429            # empty. If it is empty, the default value for the data source will be
430            # used.
431            # The specified times are in UTC.
432            # Examples of valid format:
433            # `1st,3rd monday of month 15:30`,
434            # `every wed,fri of jan,jun 13:15`, and
435            # `first sunday of quarter 00:00`.
436            # See more explanation about the format here:
437            # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
438            # NOTE: the granularity should be at least 8 hours, or less frequent.
439        "datasetRegion": "A String", # Output only. Region in which BigQuery dataset is located.
440        "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
441            # for a given transfer.
442        "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
443        "scheduleOptions": { # Options customizing the data transfer schedule. # Options customizing the data transfer schedule.
444          "disableAutoScheduling": True or False, # If true, automatic scheduling of data transfer runs for this configuration
445              # will be disabled. The runs can be started on ad-hoc basis using
446              # StartManualTransferRuns API. When automatic scheduling is disabled, the
447              # TransferConfig.schedule field will be ignored.
448          "endTime": "A String", # Defines time to stop scheduling transfer runs. A transfer run cannot be
449              # scheduled at or after the end time. The end time can be changed at any
450              # moment. The time when a data transfer can be trigerred manually is not
451              # limited by this option.
452          "startTime": "A String", # Specifies time to start scheduling transfer runs. The first run will be
453              # scheduled at or after the start time according to a recurrence pattern
454              # defined in the schedule string. The start time can be changed at any
455              # moment. The time when a data transfer can be trigerred manually is not
456              # limited by this option.
457        },
458        "state": "A String", # Output only. State of the most recently updated transfer run.
459        "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
460        "nextRunTime": "A String", # Output only. Next time when data transfer will run.
461        "params": { # Data transfer specific parameters.
462          "a_key": "", # Properties of the object.
463        },
464      },
465    ],
466  }</pre>
467</div>
468
469<div class="method">
470    <code class="details" id="list_next">list_next(previous_request, previous_response)</code>
471  <pre>Retrieves the next page of results.
472
473Args:
474  previous_request: The request for the previous page. (required)
475  previous_response: The response from the request for the previous page. (required)
476
477Returns:
478  A request object that you can call 'execute()' on to request the next
479  page. Returns None if there are no more items in the collection.
480    </pre>
481</div>
482
483<div class="method">
484    <code class="details" id="patch">patch(name, body, authorizationCode=None, updateMask=None, versionInfo=None, x__xgafv=None)</code>
485  <pre>Updates a data transfer configuration.
486All fields must be set, even if they are not updated.
487
488Args:
489  name: string, The resource name of the transfer config.
490Transfer config names have the form of
491`projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
492The name is automatically generated based on the config_id specified in
493CreateTransferConfigRequest along with project_id and region. If config_id
494is not provided, usually a uuid, even though it is not guaranteed or
495required, will be generated for config_id. (required)
496  body: object, The request body. (required)
497    The object takes the form of:
498
499{ # Represents a data transfer configuration. A transfer configuration
500    # contains all metadata needed to perform a data transfer. For example,
501    # `destination_dataset_id` specifies where data should be stored.
502    # When a new transfer configuration is created, the specified
503    # `destination_dataset_id` is created when needed and shared with the
504    # appropriate data source service account.
505  "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
506      # For example, if `data_refresh_window_days = 10`, then every day
507      # BigQuery reingests data for [today-10, today-1], rather than ingesting data
508      # for just [today-1].
509      # Only valid if the data source supports the feature. Set the value to  0
510      # to use the default value.
511  "updateTime": "A String", # Output only. Data transfer modification time. Ignored by server on input.
512  "destinationDatasetId": "A String", # The BigQuery target dataset id.
513  "displayName": "A String", # User specified display name for the data transfer.
514  "name": "A String", # The resource name of the transfer config.
515      # Transfer config names have the form of
516      # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
517      # The name is automatically generated based on the config_id specified in
518      # CreateTransferConfigRequest along with project_id and region. If config_id
519      # is not provided, usually a uuid, even though it is not guaranteed or
520      # required, will be generated for config_id.
521  "schedule": "A String", # Data transfer schedule.
522      # If the data source does not support a custom schedule, this should be
523      # empty. If it is empty, the default value for the data source will be
524      # used.
525      # The specified times are in UTC.
526      # Examples of valid format:
527      # `1st,3rd monday of month 15:30`,
528      # `every wed,fri of jan,jun 13:15`, and
529      # `first sunday of quarter 00:00`.
530      # See more explanation about the format here:
531      # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
532      # NOTE: the granularity should be at least 8 hours, or less frequent.
533  "datasetRegion": "A String", # Output only. Region in which BigQuery dataset is located.
534  "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
535      # for a given transfer.
536  "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
537  "scheduleOptions": { # Options customizing the data transfer schedule. # Options customizing the data transfer schedule.
538    "disableAutoScheduling": True or False, # If true, automatic scheduling of data transfer runs for this configuration
539        # will be disabled. The runs can be started on ad-hoc basis using
540        # StartManualTransferRuns API. When automatic scheduling is disabled, the
541        # TransferConfig.schedule field will be ignored.
542    "endTime": "A String", # Defines time to stop scheduling transfer runs. A transfer run cannot be
543        # scheduled at or after the end time. The end time can be changed at any
544        # moment. The time when a data transfer can be trigerred manually is not
545        # limited by this option.
546    "startTime": "A String", # Specifies time to start scheduling transfer runs. The first run will be
547        # scheduled at or after the start time according to a recurrence pattern
548        # defined in the schedule string. The start time can be changed at any
549        # moment. The time when a data transfer can be trigerred manually is not
550        # limited by this option.
551  },
552  "state": "A String", # Output only. State of the most recently updated transfer run.
553  "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
554  "nextRunTime": "A String", # Output only. Next time when data transfer will run.
555  "params": { # Data transfer specific parameters.
556    "a_key": "", # Properties of the object.
557  },
558}
559
560  authorizationCode: string, Optional OAuth2 authorization code to use with this transfer configuration.
561If it is provided, the transfer configuration will be associated with the
562authorizing user.
563In order to obtain authorization_code, please make a
564request to
565https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
566
567* client_id should be OAuth client_id of BigQuery DTS API for the given
568  data source returned by ListDataSources method.
569* data_source_scopes are the scopes returned by ListDataSources method.
570* redirect_uri is an optional parameter. If not specified, then
571  authorization code is posted to the opener of authorization flow window.
572  Otherwise it will be sent to the redirect uri. A special value of
573  urn:ietf:wg:oauth:2.0:oob means that authorization code should be
574  returned in the title bar of the browser, with the page text prompting
575  the user to copy the code and paste it in the application.
576  updateMask: string, Required list of fields to be updated in this request.
577  versionInfo: string, Optional version info. If users want to find a very recent access token,
578that is, immediately after approving access, users have to set the
579version_info claim in the token request. To obtain the version_info, users
580must use the “none+gsession” response type. which be return a
581version_info back in the authorization response which be be put in a JWT
582claim in the token request.
583  x__xgafv: string, V1 error format.
584    Allowed values
585      1 - v1 error format
586      2 - v2 error format
587
588Returns:
589  An object of the form:
590
591    { # Represents a data transfer configuration. A transfer configuration
592      # contains all metadata needed to perform a data transfer. For example,
593      # `destination_dataset_id` specifies where data should be stored.
594      # When a new transfer configuration is created, the specified
595      # `destination_dataset_id` is created when needed and shared with the
596      # appropriate data source service account.
597    "dataRefreshWindowDays": 42, # The number of days to look back to automatically refresh the data.
598        # For example, if `data_refresh_window_days = 10`, then every day
599        # BigQuery reingests data for [today-10, today-1], rather than ingesting data
600        # for just [today-1].
601        # Only valid if the data source supports the feature. Set the value to  0
602        # to use the default value.
603    "updateTime": "A String", # Output only. Data transfer modification time. Ignored by server on input.
604    "destinationDatasetId": "A String", # The BigQuery target dataset id.
605    "displayName": "A String", # User specified display name for the data transfer.
606    "name": "A String", # The resource name of the transfer config.
607        # Transfer config names have the form of
608        # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
609        # The name is automatically generated based on the config_id specified in
610        # CreateTransferConfigRequest along with project_id and region. If config_id
611        # is not provided, usually a uuid, even though it is not guaranteed or
612        # required, will be generated for config_id.
613    "schedule": "A String", # Data transfer schedule.
614        # If the data source does not support a custom schedule, this should be
615        # empty. If it is empty, the default value for the data source will be
616        # used.
617        # The specified times are in UTC.
618        # Examples of valid format:
619        # `1st,3rd monday of month 15:30`,
620        # `every wed,fri of jan,jun 13:15`, and
621        # `first sunday of quarter 00:00`.
622        # See more explanation about the format here:
623        # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
624        # NOTE: the granularity should be at least 8 hours, or less frequent.
625    "datasetRegion": "A String", # Output only. Region in which BigQuery dataset is located.
626    "disabled": True or False, # Is this config disabled. When set to true, no runs are scheduled
627        # for a given transfer.
628    "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
629    "scheduleOptions": { # Options customizing the data transfer schedule. # Options customizing the data transfer schedule.
630      "disableAutoScheduling": True or False, # If true, automatic scheduling of data transfer runs for this configuration
631          # will be disabled. The runs can be started on ad-hoc basis using
632          # StartManualTransferRuns API. When automatic scheduling is disabled, the
633          # TransferConfig.schedule field will be ignored.
634      "endTime": "A String", # Defines time to stop scheduling transfer runs. A transfer run cannot be
635          # scheduled at or after the end time. The end time can be changed at any
636          # moment. The time when a data transfer can be trigerred manually is not
637          # limited by this option.
638      "startTime": "A String", # Specifies time to start scheduling transfer runs. The first run will be
639          # scheduled at or after the start time according to a recurrence pattern
640          # defined in the schedule string. The start time can be changed at any
641          # moment. The time when a data transfer can be trigerred manually is not
642          # limited by this option.
643    },
644    "state": "A String", # Output only. State of the most recently updated transfer run.
645    "dataSourceId": "A String", # Data source id. Cannot be changed once data transfer is created.
646    "nextRunTime": "A String", # Output only. Next time when data transfer will run.
647    "params": { # Data transfer specific parameters.
648      "a_key": "", # Properties of the object.
649    },
650  }</pre>
651</div>
652
653<div class="method">
654    <code class="details" id="scheduleRuns">scheduleRuns(parent, body, x__xgafv=None)</code>
655  <pre>Creates transfer runs for a time range [start_time, end_time].
656For each date - or whatever granularity the data source supports - in the
657range, one transfer run is created.
658Note that runs are created per UTC time in the time range.
659DEPRECATED: use StartManualTransferRuns instead.
660
661Args:
662  parent: string, Transfer configuration name in the form:
663`projects/{project_id}/transferConfigs/{config_id}`. (required)
664  body: object, The request body. (required)
665    The object takes the form of:
666
667{ # A request to schedule transfer runs for a time range.
668    "endTime": "A String", # End time of the range of transfer runs. For example,
669        # `"2017-05-30T00:00:00+00:00"`.
670    "startTime": "A String", # Start time of the range of transfer runs. For example,
671        # `"2017-05-25T00:00:00+00:00"`.
672  }
673
674  x__xgafv: string, V1 error format.
675    Allowed values
676      1 - v1 error format
677      2 - v2 error format
678
679Returns:
680  An object of the form:
681
682    { # A response to schedule transfer runs for a time range.
683    "runs": [ # The transfer runs that were scheduled.
684      { # Represents a data transfer run.
685        "updateTime": "A String", # Output only. Last time the data transfer run state was updated.
686        "destinationDatasetId": "A String", # Output only. The BigQuery target dataset id.
687        "name": "A String", # The resource name of the transfer run.
688            # Transfer run names have the form
689            # `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
690            # The name is ignored when creating a transfer run.
691        "schedule": "A String", # Output only. Describes the schedule of this transfer run if it was
692            # created as part of a regular schedule. For batch transfer runs that are
693            # scheduled manually, this is empty.
694            # NOTE: the system might choose to delay the schedule depending on the
695            # current load, so `schedule_time` doesn't always match this.
696        "scheduleTime": "A String", # Minimum time after which a transfer run can be started.
697        "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
698        "state": "A String", # Data transfer run state. Ignored for input requests.
699        "errorStatus": { # The `Status` type defines a logical error model that is suitable for # Status of the transfer run.
700            # different programming environments, including REST APIs and RPC APIs. It is
701            # used by [gRPC](https://github.com/grpc). Each `Status` message contains
702            # three pieces of data: error code, error message, and error details.
703            #
704            # You can find out more about this error model and how to work with it in the
705            # [API Design Guide](https://cloud.google.com/apis/design/errors).
706          "message": "A String", # A developer-facing error message, which should be in English. Any
707              # user-facing error message should be localized and sent in the
708              # google.rpc.Status.details field, or localized by the client.
709          "code": 42, # The status code, which should be an enum value of google.rpc.Code.
710          "details": [ # A list of messages that carry the error details.  There is a common set of
711              # message types for APIs to use.
712            {
713              "a_key": "", # Properties of the object. Contains field @type with type URL.
714            },
715          ],
716        },
717        "params": { # Output only. Data transfer specific parameters.
718          "a_key": "", # Properties of the object.
719        },
720        "startTime": "A String", # Output only. Time when transfer run was started.
721            # Parameter ignored by server for input requests.
722        "dataSourceId": "A String", # Output only. Data source id.
723        "runTime": "A String", # For batch transfer runs, specifies the date and time that
724            # data should be ingested.
725        "endTime": "A String", # Output only. Time when transfer run ended.
726            # Parameter ignored by server for input requests.
727      },
728    ],
729  }</pre>
730</div>
731
732<div class="method">
733    <code class="details" id="startManualRuns">startManualRuns(parent, body, x__xgafv=None)</code>
734  <pre>Start manual transfer runs to be executed now with schedule_time equal to
735current time. The transfer runs can be created for a time range where the
736run_time is between start_time (inclusive) and end_time (exclusive), or for
737a specific run_time.
738
739Args:
740  parent: string, Transfer configuration name in the form:
741`projects/{project_id}/transferConfigs/{config_id}`. (required)
742  body: object, The request body. (required)
743    The object takes the form of:
744
745{ # A request to start manual transfer runs.
746    "requestedTimeRange": { # A specification for a time range, this will request transfer runs with # Time range for the transfer runs that should be started.
747        # run_time between start_time (inclusive) and end_time (exclusive).
748      "endTime": "A String", # End time of the range of transfer runs. For example,
749          # `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future.
750          # Creates transfer runs where run_time is in the range betwen start_time
751          # (inclusive) and end_time (exlusive).
752      "startTime": "A String", # Start time of the range of transfer runs. For example,
753          # `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than
754          # the end_time. Creates transfer runs where run_time is in the range betwen
755          # start_time (inclusive) and end_time (exlusive).
756    },
757    "requestedRunTime": "A String", # Specific run_time for a transfer run to be started. The
758        # requested_run_time must not be in the future.
759  }
760
761  x__xgafv: string, V1 error format.
762    Allowed values
763      1 - v1 error format
764      2 - v2 error format
765
766Returns:
767  An object of the form:
768
769    { # A response to start manual transfer runs.
770    "runs": [ # The transfer runs that were created.
771      { # Represents a data transfer run.
772        "updateTime": "A String", # Output only. Last time the data transfer run state was updated.
773        "destinationDatasetId": "A String", # Output only. The BigQuery target dataset id.
774        "name": "A String", # The resource name of the transfer run.
775            # Transfer run names have the form
776            # `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
777            # The name is ignored when creating a transfer run.
778        "schedule": "A String", # Output only. Describes the schedule of this transfer run if it was
779            # created as part of a regular schedule. For batch transfer runs that are
780            # scheduled manually, this is empty.
781            # NOTE: the system might choose to delay the schedule depending on the
782            # current load, so `schedule_time` doesn't always match this.
783        "scheduleTime": "A String", # Minimum time after which a transfer run can be started.
784        "userId": "A String", # Deprecated. Unique ID of the user on whose behalf transfer is done.
785        "state": "A String", # Data transfer run state. Ignored for input requests.
786        "errorStatus": { # The `Status` type defines a logical error model that is suitable for # Status of the transfer run.
787            # different programming environments, including REST APIs and RPC APIs. It is
788            # used by [gRPC](https://github.com/grpc). Each `Status` message contains
789            # three pieces of data: error code, error message, and error details.
790            #
791            # You can find out more about this error model and how to work with it in the
792            # [API Design Guide](https://cloud.google.com/apis/design/errors).
793          "message": "A String", # A developer-facing error message, which should be in English. Any
794              # user-facing error message should be localized and sent in the
795              # google.rpc.Status.details field, or localized by the client.
796          "code": 42, # The status code, which should be an enum value of google.rpc.Code.
797          "details": [ # A list of messages that carry the error details.  There is a common set of
798              # message types for APIs to use.
799            {
800              "a_key": "", # Properties of the object. Contains field @type with type URL.
801            },
802          ],
803        },
804        "params": { # Output only. Data transfer specific parameters.
805          "a_key": "", # Properties of the object.
806        },
807        "startTime": "A String", # Output only. Time when transfer run was started.
808            # Parameter ignored by server for input requests.
809        "dataSourceId": "A String", # Output only. Data source id.
810        "runTime": "A String", # For batch transfer runs, specifies the date and time that
811            # data should be ingested.
812        "endTime": "A String", # Output only. Time when transfer run ended.
813            # Parameter ignored by server for input requests.
814      },
815    ],
816  }</pre>
817</div>
818
819</body></html>