• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15import numpy as np
16import mindspore.dataset as ds
17import mindspore.dataset.text as text
18import mindspore.dataset.vision.c_transforms as vision
19
20DATA_DIR = "../data/dataset/testCOCO/train/"
21DATA_DIR_2 = "../data/dataset/testCOCO/train"
22ANNOTATION_FILE = "../data/dataset/testCOCO/annotations/train.json"
23KEYPOINT_FILE = "../data/dataset/testCOCO/annotations/key_point.json"
24PANOPTIC_FILE = "../data/dataset/testCOCO/annotations/panoptic.json"
25INVALID_FILE = "../data/dataset/testCOCO/annotations/invalid.json"
26LACKOFIMAGE_FILE = "../data/dataset/testCOCO/annotations/lack_of_images.json"
27INVALID_CATEGORY_ID_FILE = "../data/dataset/testCOCO/annotations/invalid_category_id.json"
28
29
30def test_coco_detection():
31    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection",
32                           decode=True, shuffle=False, extra_metadata=True)
33    data1 = data1.rename("_meta-filename", "filename")
34    num_iter = 0
35    file_name = []
36    image_shape = []
37    bbox = []
38    category_id = []
39    for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
40        file_name.append(text.to_str(data["filename"]))
41        image_shape.append(data["image"].shape)
42        bbox.append(data["bbox"])
43        category_id.append(data["category_id"])
44        num_iter += 1
45    assert num_iter == 6
46    assert file_name == ["000000391895", "000000318219", "000000554625", "000000574769",
47                         "000000060623", "000000309022"]
48    assert image_shape[0] == (2268, 4032, 3)
49    assert image_shape[1] == (561, 595, 3)
50    assert image_shape[2] == (607, 585, 3)
51    assert image_shape[3] == (642, 675, 3)
52    assert image_shape[4] == (2268, 4032, 3)
53    assert image_shape[5] == (2268, 4032, 3)
54    np.testing.assert_array_equal(np.array([[10., 10., 10., 10.], [70., 70., 70., 70.]]), bbox[0])
55    np.testing.assert_array_equal(np.array([[20., 20., 20., 20.], [80., 80., 80.0, 80.]]), bbox[1])
56    np.testing.assert_array_equal(np.array([[30.0, 30.0, 30.0, 30.]]), bbox[2])
57    np.testing.assert_array_equal(np.array([[40., 40., 40., 40.]]), bbox[3])
58    np.testing.assert_array_equal(np.array([[50., 50., 50., 50.]]), bbox[4])
59    np.testing.assert_array_equal(np.array([[60., 60., 60., 60.]]), bbox[5])
60    np.testing.assert_array_equal(np.array([[1], [7]]), category_id[0])
61    np.testing.assert_array_equal(np.array([[2], [8]]), category_id[1])
62    np.testing.assert_array_equal(np.array([[3]]), category_id[2])
63    np.testing.assert_array_equal(np.array([[4]]), category_id[3])
64    np.testing.assert_array_equal(np.array([[5]]), category_id[4])
65    np.testing.assert_array_equal(np.array([[6]]), category_id[5])
66
67
68def test_coco_stuff():
69    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff",
70                           decode=True, shuffle=False, extra_metadata=True)
71    data1 = data1.rename("_meta-filename", "filename")
72    num_iter = 0
73    file_name = []
74    image_shape = []
75    segmentation = []
76    iscrowd = []
77    for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
78        file_name.append(text.to_str(data["filename"]))
79        image_shape.append(data["image"].shape)
80        segmentation.append(data["segmentation"])
81        iscrowd.append(data["iscrowd"])
82        num_iter += 1
83    assert num_iter == 6
84    assert file_name == ["000000391895", "000000318219", "000000554625", "000000574769",
85                         "000000060623", "000000309022"]
86    assert image_shape[0] == (2268, 4032, 3)
87    assert image_shape[1] == (561, 595, 3)
88    assert image_shape[2] == (607, 585, 3)
89    assert image_shape[3] == (642, 675, 3)
90    assert image_shape[4] == (2268, 4032, 3)
91    assert image_shape[5] == (2268, 4032, 3)
92    np.testing.assert_array_equal(np.array([[10., 12., 13., 14., 15., 16., 17., 18., 19., 20.],
93                                            [70., 72., 73., 74., 75., -1., -1., -1., -1., -1.]]),
94                                  segmentation[0])
95    np.testing.assert_array_equal(np.array([[0], [0]]), iscrowd[0])
96    np.testing.assert_array_equal(np.array([[20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0],
97                                            [10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, -1.0]]),
98                                  segmentation[1])
99    np.testing.assert_array_equal(np.array([[0], [1]]), iscrowd[1])
100    np.testing.assert_array_equal(np.array([[40., 42., 43., 44., 45., 46., 47., 48., 49., 40., 41., 42.]]),
101                                  segmentation[2])
102    np.testing.assert_array_equal(np.array([[0]]), iscrowd[2])
103    np.testing.assert_array_equal(np.array([[50., 52., 53., 54., 55., 56., 57., 58., 59., 60., 61., 62., 63.]]),
104                                  segmentation[3])
105    np.testing.assert_array_equal(np.array([[0]]), iscrowd[3])
106    np.testing.assert_array_equal(np.array([[60., 62., 63., 64., 65., 66., 67., 68., 69., 70., 71., 72., 73., 74.]]),
107                                  segmentation[4])
108    np.testing.assert_array_equal(np.array([[0]]), iscrowd[4])
109    np.testing.assert_array_equal(np.array([[60., 62., 63., 64., 65., 66., 67.], [68., 69., 70., 71., 72., 73., 74.]]),
110                                  segmentation[5])
111    np.testing.assert_array_equal(np.array([[0]]), iscrowd[5])
112
113
114def test_coco_keypoint():
115    data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint",
116                           decode=True, shuffle=False, extra_metadata=True)
117    data1 = data1.rename("_meta-filename", "filename")
118    num_iter = 0
119    file_name = []
120    image_shape = []
121    keypoints = []
122    num_keypoints = []
123    for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
124        file_name.append(text.to_str(data["filename"]))
125        image_shape.append(data["image"].shape)
126        keypoints.append(data["keypoints"])
127        num_keypoints.append(data["num_keypoints"])
128        num_iter += 1
129    assert num_iter == 2
130    assert file_name == ["000000391895", "000000318219"]
131    assert image_shape[0] == (2268, 4032, 3)
132    assert image_shape[1] == (561, 595, 3)
133    np.testing.assert_array_equal(np.array([[368., 61., 1., 369., 52., 2., 0., 0., 0., 382., 48., 2., 0., 0., 0., 368.,
134                                             84., 2., 435., 81., 2., 362., 125., 2., 446., 125., 2., 360., 153., 2., 0.,
135                                             0., 0., 397., 167., 1., 439., 166., 1., 369., 193., 2., 461., 234., 2.,
136                                             361., 246., 2., 474., 287., 2.]]), keypoints[0])
137    np.testing.assert_array_equal(np.array([[14]]), num_keypoints[0])
138    np.testing.assert_array_equal(np.array([[244., 139., 2., 0., 0., 0., 226., 118., 2., 0., 0., 0., 154., 159., 2.,
139                                             143., 261., 2., 135., 312., 2., 271., 423., 2., 184., 530., 2., 261., 280.,
140                                             2., 347., 592., 2., 0., 0., 0., 123., 596., 2., 0., 0., 0., 0., 0., 0., 0.,
141                                             0., 0., 0., 0., 0.]]),
142                                  keypoints[1])
143    np.testing.assert_array_equal(np.array([[10]]), num_keypoints[1])
144
145
146def test_coco_panoptic():
147    data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", decode=True, shuffle=False,
148                           extra_metadata=True)
149    data1 = data1.rename("_meta-filename", "filename")
150    num_iter = 0
151    file_name = []
152    image_shape = []
153    bbox = []
154    category_id = []
155    iscrowd = []
156    area = []
157    for data in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
158        file_name.append(text.to_str(data["filename"]))
159        image_shape.append(data["image"].shape)
160        bbox.append(data["bbox"])
161        category_id.append(data["category_id"])
162        iscrowd.append(data["iscrowd"])
163        area.append(data["area"])
164        num_iter += 1
165    assert num_iter == 2
166    assert file_name == ["000000391895", "000000574769"]
167    assert image_shape[0] == (2268, 4032, 3)
168    np.testing.assert_array_equal(np.array([[472, 173, 36, 48], [340, 22, 154, 301], [486, 183, 30, 35]]), bbox[0])
169    np.testing.assert_array_equal(np.array([[1], [1], [2]]), category_id[0])
170    np.testing.assert_array_equal(np.array([[0], [0], [0]]), iscrowd[0])
171    np.testing.assert_array_equal(np.array([[705], [14062], [626]]), area[0])
172    assert image_shape[1] == (642, 675, 3)
173    np.testing.assert_array_equal(np.array([[103, 133, 229, 422], [243, 175, 93, 164]]), bbox[1])
174    np.testing.assert_array_equal(np.array([[1], [3]]), category_id[1])
175    np.testing.assert_array_equal(np.array([[0], [0]]), iscrowd[1])
176    np.testing.assert_array_equal(np.array([[43102], [6079]]), area[1])
177
178
179def test_coco_meta_column():
180    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection",
181                           decode=True, shuffle=False, extra_metadata=True)
182    for item in data1.create_tuple_iterator():
183        assert len(item) == 4
184
185    data2 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff",
186                           decode=True, shuffle=False, extra_metadata=True)
187    for item in data2.create_tuple_iterator():
188        assert len(item) == 3
189
190    data3 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint",
191                           decode=True, shuffle=False, extra_metadata=True)
192    for item in data3.create_tuple_iterator():
193        assert len(item) == 3
194
195    data4 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic",
196                           decode=True, shuffle=False, extra_metadata=True)
197    for item in data4.create_tuple_iterator():
198        assert len(item) == 5
199
200
201def test_coco_detection_classindex():
202    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
203    class_index = data1.get_class_indexing()
204    assert class_index == {'person': [1], 'bicycle': [2], 'car': [3], 'cat': [4], 'dog': [5], 'monkey': [6],
205                           'bag': [7], 'orange': [8]}
206    num_iter = 0
207    for _ in data1.create_dict_iterator(output_numpy=True):
208        num_iter += 1
209    assert num_iter == 6
210
211
212def test_coco_panootic_classindex():
213    data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", decode=True)
214    class_index = data1.get_class_indexing()
215    assert class_index == {'person': [1, 1], 'bicycle': [2, 1], 'car': [3, 1]}
216    num_iter = 0
217    for _ in data1.create_dict_iterator(output_numpy=True):
218        num_iter += 1
219    assert num_iter == 2
220
221
222def test_coco_case_0():
223    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
224    data1 = data1.shuffle(10)
225    data1 = data1.batch(3, pad_info={})
226    num_iter = 0
227    for _ in data1.create_dict_iterator(num_epochs=1):
228        num_iter += 1
229    assert num_iter == 2
230
231
232def test_coco_case_1():
233    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
234    sizes = [0.5, 0.5]
235    randomize = False
236    dataset1, dataset2 = data1.split(sizes=sizes, randomize=randomize)
237
238    num_iter = 0
239    for _ in dataset1.create_dict_iterator(num_epochs=1):
240        num_iter += 1
241    assert num_iter == 3
242    num_iter = 0
243    for _ in dataset2.create_dict_iterator(num_epochs=1):
244        num_iter += 1
245    assert num_iter == 3
246
247
248def test_coco_case_2():
249    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
250    resize_op = vision.Resize((224, 224))
251
252    data1 = data1.map(operations=resize_op, input_columns=["image"])
253    data1 = data1.repeat(4)
254    num_iter = 0
255    for _ in data1.create_dict_iterator(output_numpy=True):
256        num_iter += 1
257    assert num_iter == 24
258
259
260def test_coco_case_3():
261    data1 = ds.CocoDataset(DATA_DIR_2, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
262    resize_op = vision.Resize((224, 224))
263
264    data1 = data1.map(operations=resize_op, input_columns=["image"])
265    data1 = data1.repeat(4)
266    num_iter = 0
267    for _ in data1.create_dict_iterator(output_numpy=True):
268        num_iter += 1
269    assert num_iter == 24
270
271
272def test_coco_case_exception():
273    try:
274        data1 = ds.CocoDataset("path_not_exist/", annotation_file=ANNOTATION_FILE, task="Detection")
275        for _ in data1.create_dict_iterator(output_numpy=True):
276            pass
277        assert False
278    except ValueError as e:
279        assert "does not exist or is not a directory or permission denied" in str(e)
280
281    try:
282        data1 = ds.CocoDataset(DATA_DIR, annotation_file="./file_not_exist", task="Detection")
283        for _ in data1.create_dict_iterator(output_numpy=True):
284            pass
285        assert False
286    except ValueError as e:
287        assert "does not exist or permission denied" in str(e)
288
289    try:
290        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Invalid task")
291        for _ in data1.create_dict_iterator(output_numpy=True):
292            pass
293        assert False
294    except ValueError as e:
295        assert "Invalid task type" in str(e)
296
297    try:
298        data1 = ds.CocoDataset(DATA_DIR, annotation_file=LACKOFIMAGE_FILE, task="Detection")
299        for _ in data1.create_dict_iterator(output_numpy=True):
300            pass
301        assert False
302    except RuntimeError as e:
303        assert "required node not found in JSON" in str(e)
304
305    try:
306        data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_CATEGORY_ID_FILE, task="Detection")
307        for _ in data1.create_dict_iterator(output_numpy=True):
308            pass
309        assert False
310    except RuntimeError as e:
311        assert "category_id can't find in categories" in str(e)
312
313    try:
314        data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_FILE, task="Detection")
315        for _ in data1.create_dict_iterator(output_numpy=True):
316            pass
317        assert False
318    except RuntimeError as e:
319        assert "failed to open JSON file" in str(e)
320
321    try:
322        sampler = ds.PKSampler(3)
323        data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_FILE, task="Detection", sampler=sampler)
324        for _ in data1.create_dict_iterator(output_numpy=True):
325            pass
326        assert False
327    except ValueError as e:
328        assert "CocoDataset doesn't support PKSampler" in str(e)
329
330    def exception_func(item):
331        raise Exception("Error occur!")
332
333    try:
334        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
335        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
336        for _ in data1.create_dict_iterator(output_numpy=True):
337            pass
338        assert False
339    except RuntimeError as e:
340        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
341
342    try:
343        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
344        data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
345        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
346        for _ in data1.create_dict_iterator(output_numpy=True):
347            pass
348        assert False
349    except RuntimeError as e:
350        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
351
352    try:
353        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
354        data1 = data1.map(operations=exception_func, input_columns=["bbox"], num_parallel_workers=1)
355        for _ in data1.create_dict_iterator(output_numpy=True):
356            pass
357        assert False
358    except RuntimeError as e:
359        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
360
361    try:
362        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection")
363        data1 = data1.map(operations=exception_func, input_columns=["category_id"], num_parallel_workers=1)
364        for _ in data1.create_dict_iterator(output_numpy=True):
365            pass
366        assert False
367    except RuntimeError as e:
368        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
369
370    try:
371        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
372        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
373        for _ in data1.create_dict_iterator(output_numpy=True):
374            pass
375        assert False
376    except RuntimeError as e:
377        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
378
379    try:
380        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
381        data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
382        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
383        for _ in data1.create_dict_iterator(output_numpy=True):
384            pass
385        assert False
386    except RuntimeError as e:
387        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
388
389    try:
390        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
391        data1 = data1.map(operations=exception_func, input_columns=["segmentation"], num_parallel_workers=1)
392        for _ in data1.create_dict_iterator(output_numpy=True):
393            pass
394        assert False
395    except RuntimeError as e:
396        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
397
398    try:
399        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Stuff")
400        data1 = data1.map(operations=exception_func, input_columns=["iscrowd"], num_parallel_workers=1)
401        for _ in data1.create_dict_iterator(output_numpy=True):
402            pass
403        assert False
404    except RuntimeError as e:
405        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
406
407    try:
408        data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
409        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
410        for _ in data1.create_dict_iterator(output_numpy=True):
411            pass
412        assert False
413    except RuntimeError as e:
414        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
415
416    try:
417        data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
418        data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
419        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
420        for _ in data1.create_dict_iterator(output_numpy=True):
421            pass
422        assert False
423    except RuntimeError as e:
424        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
425
426    try:
427        data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
428        data1 = data1.map(operations=exception_func, input_columns=["keypoints"], num_parallel_workers=1)
429        for _ in data1.create_dict_iterator(output_numpy=True):
430            pass
431        assert False
432    except RuntimeError as e:
433        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
434
435    try:
436        data1 = ds.CocoDataset(DATA_DIR, annotation_file=KEYPOINT_FILE, task="Keypoint")
437        data1 = data1.map(operations=exception_func, input_columns=["num_keypoints"], num_parallel_workers=1)
438        for _ in data1.create_dict_iterator(output_numpy=True):
439            pass
440        assert False
441    except RuntimeError as e:
442        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
443
444    try:
445        data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
446        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
447        for _ in data1.create_dict_iterator(output_numpy=True):
448            pass
449        assert False
450    except RuntimeError as e:
451        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
452
453    try:
454        data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
455        data1 = data1.map(operations=vision.Decode(), input_columns=["image"], num_parallel_workers=1)
456        data1 = data1.map(operations=exception_func, input_columns=["image"], num_parallel_workers=1)
457        for _ in data1.create_dict_iterator(output_numpy=True):
458            pass
459        assert False
460    except RuntimeError as e:
461        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
462
463    try:
464        data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
465        data1 = data1.map(operations=exception_func, input_columns=["bbox"], num_parallel_workers=1)
466        for _ in data1.create_dict_iterator(output_numpy=True):
467            pass
468        assert False
469    except RuntimeError as e:
470        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
471
472    try:
473        data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
474        data1 = data1.map(operations=exception_func, input_columns=["category_id"], num_parallel_workers=1)
475        for _ in data1.create_dict_iterator(output_numpy=True):
476            pass
477        assert False
478    except RuntimeError as e:
479        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
480
481    try:
482        data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic")
483        data1 = data1.map(operations=exception_func, input_columns=["area"], num_parallel_workers=1)
484        for _ in data1.create_dict_iterator(output_numpy=True):
485            pass
486        assert False
487    except RuntimeError as e:
488        assert "map operation: [PyFunc] failed. The corresponding data files" in str(e)
489
490
491if __name__ == '__main__':
492    test_coco_detection()
493    test_coco_stuff()
494    test_coco_keypoint()
495    test_coco_panoptic()
496    test_coco_detection_classindex()
497    test_coco_panootic_classindex()
498    test_coco_case_0()
499    test_coco_case_1()
500    test_coco_case_2()
501    test_coco_case_3()
502    test_coco_case_exception()
503