22
22
23
23
24
24
# [START dlp_create_stored_info_type]
25
- def create_stored_info_type_from_gcs_files (project , gcs_input_file_path ,
26
- gcs_output_path , stored_info_type_id = None ,
27
- display_name = None , description = None ):
25
+ def create_stored_info_type_from_gcs_files (
26
+ project , gcs_input_file_path ,
27
+ gcs_output_path , stored_info_type_id = None ,
28
+ display_name = None , description = None ):
28
29
"""Creates a scheduled Data Loss Prevention API stored infoType from a set
29
30
of GCS files.
30
31
Args:
@@ -46,15 +47,15 @@ def create_stored_info_type_from_gcs_files(project, gcs_input_file_path,
46
47
'output_path' : {'path' : gcs_output_path },
47
48
'cloud_storage_file_set' : {'url' : gcs_input_file_path },
48
49
}
49
- create_stored_info_type (project , dictionary_config ,
50
- stored_info_type_id = stored_info_type_id ,
50
+ create_stored_info_type (
51
+ project , dictionary_config , stored_info_type_id = stored_info_type_id ,
51
52
display_name = display_name , description = description )
52
53
53
- def create_stored_info_type_from_bq_table ( project , bq_input_project_id ,
54
- bq_input_dataset_id , bq_input_table_id ,
55
- bq_input_table_field , gcs_output_path ,
56
- stored_info_type_id = None , display_name = None ,
57
- description = None ):
54
+
55
+ def create_stored_info_type_from_bq_table (
56
+ project , bq_input_project_id , bq_input_dataset_id ,
57
+ bq_input_table_id , bq_input_table_field , gcs_output_path ,
58
+ stored_info_type_id = None , display_name = None , description = None ):
58
59
"""Creates a scheduled Data Loss Prevention API stored infoType from a
59
60
column of a BigQuery.
60
61
Args:
@@ -88,12 +89,13 @@ def create_stored_info_type_from_bq_table(project, bq_input_project_id,
88
89
}
89
90
}
90
91
create_stored_info_type (project , dictionary_config ,
91
- stored_info_type_id = stored_info_type_id ,
92
- display_name = display_name , description = description )
92
+ stored_info_type_id = stored_info_type_id ,
93
+ display_name = display_name , description = description )
94
+
93
95
94
96
def create_stored_info_type (project , dictionary_config ,
95
- stored_info_type_id = None , display_name = None ,
96
- description = None ):
97
+ stored_info_type_id = None , display_name = None ,
98
+ description = None ):
97
99
"""Creates a scheduled Data Loss Prevention API stored infoType from a
98
100
column of a BigQuery.
99
101
Args:
@@ -220,8 +222,9 @@ def delete_stored_info_type(project, stored_info_type_id):
220
222
dest = 'action' , help = 'Select which action to perform.' )
221
223
subparsers .required = True
222
224
223
- parser_create = subparsers .add_parser ('create' ,
224
- help = 'Create a stored infoType.' )
225
+ parser_create = subparsers .add_parser (
226
+ 'create' ,
227
+ help = 'Create a stored infoType.' )
225
228
parser_create .add_argument (
226
229
'--gcs_input_file_path' ,
227
230
help = 'GCS path of the input files containing the dictionary words.' )
@@ -256,14 +259,16 @@ def delete_stored_info_type(project, stored_info_type_id):
256
259
help = 'The Google Cloud project id to use as a parent resource.' ,
257
260
default = default_project )
258
261
259
- parser_list = subparsers .add_parser ('list' ,
262
+ parser_list = subparsers .add_parser (
263
+ 'list' ,
260
264
help = 'List all stored infoTypes.' )
261
265
parser_list .add_argument (
262
266
'--project' ,
263
267
help = 'The Google Cloud project id to use as a parent resource.' ,
264
268
default = default_project )
265
269
266
- parser_delete = subparsers .add_parser ('delete' ,
270
+ parser_delete = subparsers .add_parser (
271
+ 'delete' ,
267
272
help = 'Delete a stored infoType.' )
268
273
parser_delete .add_argument (
269
274
'stored_info_type_id' ,
0 commit comments