Update README.md
Browse files
README.md
CHANGED
@@ -180,15 +180,41 @@ dataloader = DataLoader(dataset, batch_size=16, shuffle=True)
|
|
180 |
import tensorflow as tf
|
181 |
from datasets import load_dataset
|
182 |
|
183 |
-
|
|
|
184 |
dataset = load_dataset("Dataseeds/DataSeeds.AI-Sample-Dataset-DSD", split="train")
|
185 |
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
191 |
)
|
|
|
|
|
|
|
|
|
|
|
192 |
```
|
193 |
|
194 |
## Dataset Characterization
|
|
|
180 |
import tensorflow as tf
|
181 |
from datasets import load_dataset
|
182 |
|
183 |
+
TARGET_IMG_SIZE = (224, 224)
|
184 |
+
BATCH_SIZE = 16
|
185 |
dataset = load_dataset("Dataseeds/DataSeeds.AI-Sample-Dataset-DSD", split="train")
|
186 |
|
187 |
+
def hf_dataset_generator():
|
188 |
+
for example in dataset:
|
189 |
+
yield example['image'], example['image_title']
|
190 |
+
|
191 |
+
def preprocess(image, title):
|
192 |
+
# Resize the image to a fixed size
|
193 |
+
image = tf.image.resize(image, TARGET_IMG_SIZE)
|
194 |
+
image = tf.cast(image, tf.uint8)
|
195 |
+
return image, title
|
196 |
+
|
197 |
+
# The output_signature defines the data types and shapes
|
198 |
+
tf_dataset = tf.data.Dataset.from_generator(
|
199 |
+
hf_dataset_generator,
|
200 |
+
output_signature=(
|
201 |
+
tf.TensorSpec(shape=(None, None, 3), dtype=tf.uint8),
|
202 |
+
tf.TensorSpec(shape=(), dtype=tf.string),
|
203 |
+
)
|
204 |
+
)
|
205 |
+
|
206 |
+
# Apply the preprocessing, shuffle, and batch
|
207 |
+
tf_dataset = (
|
208 |
+
tf_dataset.map(preprocess, num_parallel_calls=tf.data.AUTOTUNE)
|
209 |
+
.shuffle(buffer_size=100)
|
210 |
+
.batch(BATCH_SIZE)
|
211 |
+
.prefetch(tf.data.AUTOTUNE)
|
212 |
)
|
213 |
+
|
214 |
+
print("Dataset is ready.")
|
215 |
+
for images, titles in tf_dataset.take(1):
|
216 |
+
print("Image batch shape:", images.shape)
|
217 |
+
print("A title from the batch:", titles.numpy()[0].decode('utf-8'))
|
218 |
```
|
219 |
|
220 |
## Dataset Characterization
|