karolyartur commited on
Commit
6f101ec
·
verified ·
1 Parent(s): f429820

Delete loading script

Browse files
Files changed (1) hide show
  1. SynWBM.py +0 -184
SynWBM.py DELETED
@@ -1,184 +0,0 @@
1
- #
2
- # This file is part of the SynWBM distribution (https://huggingface.co/datasets/ABC-iRobotics/SynWBM).
3
- # Copyright (c) 2023 ABC-iRobotics.
4
- #
5
- # This program is free software: you can redistribute it and/or modify
6
- # it under the terms of the GNU General Public License as published by
7
- # the Free Software Foundation, version 3.
8
- #
9
- # This program is distributed in the hope that it will be useful, but
10
- # WITHOUT ANY WARRANTY; without even the implied warranty of
11
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12
- # General Public License for more details.
13
- #
14
- # You should have received a copy of the GNU General Public License
15
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
- #
17
- """SynWBM dataset"""
18
-
19
- import sys
20
- if sys.version_info < (3, 9):
21
- from typing import Sequence, Generator, Tuple
22
- else:
23
- from collections.abc import Sequence, Generator
24
- Tuple = tuple
25
-
26
- from typing import Optional, IO
27
-
28
- import datasets
29
- import itertools
30
-
31
-
32
- # ---- Constants ----
33
-
34
- _CITATION = """\
35
- COMING SOON
36
- """
37
-
38
- _DESCRIPTION = """\
39
- A synthetic instance segmentation dataset for white button mushrooms (Agaricus bisporus).
40
- The dataset incorporates rendered and generated synthetic images for training mushroom segmentation models.
41
- """
42
-
43
- _HOMEPAGE = "https://huggingface.co/datasets/ABC-iRobotics/SynWBM"
44
-
45
- _LICENSE = "GNU General Public License v3.0"
46
-
47
- _LATEST_VERSIONS = {
48
- "all": "1.0.0",
49
- "blender": "1.0.0",
50
- "sdxl": "1.0.0",
51
- }
52
-
53
- BASE_URL = "https://huggingface.co/datasets/ABC-iRobotics/SynWBM/resolve/main/"
54
-
55
-
56
-
57
- # ---- SynWBM dataset Configs ----
58
-
59
- class SynWBMDatasetConfig(datasets.BuilderConfig):
60
- """BuilderConfig for SynWBM dataset."""
61
-
62
- def __init__(self, name: str, base_urls: Sequence[str], images_txt: str, version: Optional[str] = None, **kwargs):
63
- _version = _LATEST_VERSIONS[name] if version is None else version
64
- super(SynWBMDatasetConfig, self).__init__(version=datasets.Version(_version), name=name, **kwargs)
65
- with open(images_txt, 'r') as f:
66
- image_list = f.readlines()
67
- img_urls = []
68
- depth_urls = []
69
- mask_urls = []
70
- for base_url in base_urls:
71
- img_urls.extend([base_url + image.strip() for image in image_list])
72
- depth_urls.extend([BASE_URL + "depths/" + image.strip() for image in image_list])
73
- mask_urls.extend([BASE_URL + "masks/" + image.strip() for image in image_list])
74
-
75
- self._imgs_urls = img_urls
76
- self._depth_urls = depth_urls
77
- self._masks_urls = mask_urls
78
-
79
-
80
- @property
81
- def features(self):
82
- return datasets.Features(
83
- {
84
- "image": datasets.Image(),
85
- "depth": datasets.Image(),
86
- "mask": datasets.Image(),
87
- }
88
- )
89
-
90
- @property
91
- def supervised_keys(self):
92
- return None
93
-
94
-
95
-
96
- # ---- SynWBM dataset Loader ----
97
-
98
- class SynWBMDataset(datasets.GeneratorBasedBuilder):
99
- """SynWBM dataset."""
100
-
101
- BUILDER_CONFIG_CLASS = SynWBMDatasetConfig
102
- BUILDER_CONFIGS = [
103
- SynWBMDatasetConfig(
104
- name = "all",
105
- description = "All images",
106
- base_urls = [
107
- BASE_URL + "rendered/",
108
- BASE_URL + "generated/"
109
- ],
110
- images_txt = "images.txt"
111
- ),
112
- SynWBMDatasetConfig(
113
- name = "blender",
114
- description = "Synthetic images rendered using Blender",
115
- base_urls = [
116
- BASE_URL + "rendered/"
117
- ],
118
- images_txt = "images.txt"
119
- ),
120
- SynWBMDatasetConfig(
121
- name = "sdxl",
122
- description = "Synthetic images generated by Stable Diffusion XL",
123
- base_urls = [
124
- BASE_URL + "generated/"
125
- ],
126
- images_txt = "images.txt"
127
- ),
128
- ]
129
- DEFAULT_WRITER_BATCH_SIZE = 10
130
-
131
- def _info(self):
132
- return datasets.DatasetInfo(
133
- description=_DESCRIPTION,
134
- features=self.config.features,
135
- supervised_keys=self.config.supervised_keys,
136
- homepage=_HOMEPAGE,
137
- license=_LICENSE,
138
- citation=_CITATION,
139
- version=self.config.version,
140
- )
141
-
142
- def _split_generators(self, dl_manager):
143
- imgs_paths = dl_manager.download(self.config._imgs_urls)
144
- depths_paths = dl_manager.download(self.config._depth_urls)
145
- masks_paths = dl_manager.download(self.config._masks_urls)
146
-
147
- imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in imgs_paths])
148
- depths_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in depths_paths])
149
- masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in masks_paths])
150
-
151
- return [
152
- datasets.SplitGenerator(
153
- name=datasets.Split.TRAIN,
154
- gen_kwargs={
155
- "images": imgs_gen,
156
- "depths": depths_gen,
157
- "masks": masks_gen,
158
- },
159
- ),
160
- ]
161
-
162
- def _generate_examples(
163
- self,
164
- images: Generator[Tuple[str,IO], None, None],
165
- depths: Generator[Tuple[str,IO], None, None],
166
- masks: Generator[Tuple[str,IO], None, None],
167
- ):
168
- for i, (img_info, depth_info, mask_info) in enumerate(zip(images, depths, masks)):
169
- img_file_path, img_file_obj = img_info
170
- depth_file_path, depth_file_obj = depth_info
171
- mask_file_path, mask_file_obj = mask_info
172
-
173
- img_bytes = img_file_obj.read()
174
- depth_bytes = depth_file_obj.read()
175
- mask_bytes = mask_file_obj.read()
176
- img_file_obj.close()
177
- depth_file_obj.close()
178
- mask_file_obj.close()
179
-
180
- yield i, {
181
- "image": {"path": img_file_path, "bytes": img_bytes},
182
- "depth": {"path": depth_file_path, "bytes": depth_bytes},
183
- "mask": {"path": mask_file_path, "bytes": mask_bytes},
184
- }