|
#!/bin/bash |
|
|
|
|
|
usage() { |
|
echo "Usage: $0 -s SOURCE_DIR -t TARGET_DIR [-c CHUNK_TYPE] [-m MAX_FILES] [-h]" |
|
echo "Uncompress chunked DeepFurniture dataset" |
|
echo "" |
|
echo "Required arguments:" |
|
echo " -s SOURCE_DIR Source directory containing the chunked dataset" |
|
echo " -t TARGET_DIR Target directory for the uncompressed dataset" |
|
echo "" |
|
echo "Optional arguments:" |
|
echo " -c CHUNK_TYPE Specific chunk type to process (scenes, furnitures, queries)" |
|
echo " If not specified, all chunk types will be processed" |
|
echo " -m MAX_FILES Maximum number of files to process per type (default: process all)" |
|
echo " -h Show this help message" |
|
exit 1 |
|
} |
|
|
|
|
|
while getopts "s:t:c:m:h" opt; do |
|
case $opt in |
|
s) SOURCE_DIR="$OPTARG";; |
|
t) TARGET_DIR="$OPTARG";; |
|
c) CHUNK_TYPE="$OPTARG";; |
|
m) MAX_FILES="$OPTARG";; |
|
h) usage;; |
|
?) usage;; |
|
esac |
|
done |
|
|
|
|
|
if [ -z "$SOURCE_DIR" ] || [ -z "$TARGET_DIR" ]; then |
|
echo "Error: Source and target directories are required" |
|
usage |
|
fi |
|
|
|
|
|
if [ ! -d "$SOURCE_DIR" ]; then |
|
echo "Error: Source directory does not exist: $SOURCE_DIR" |
|
exit 1 |
|
fi |
|
|
|
|
|
if [ -n "$MAX_FILES" ]; then |
|
if ! [[ "$MAX_FILES" =~ ^[0-9]+$ ]]; then |
|
echo "Error: MAX_FILES must be a positive integer" |
|
exit 1 |
|
fi |
|
echo "Will process maximum $MAX_FILES files per type" |
|
fi |
|
|
|
|
|
mkdir -p "$TARGET_DIR"/{metadata,scenes,furnitures,queries} |
|
|
|
|
|
echo "Copying metadata files..." |
|
for file in "$SOURCE_DIR"/metadata/*.json*; do |
|
if [[ ! $file =~ _index.json$ ]]; then |
|
cp "$file" "$TARGET_DIR/metadata/" |
|
fi |
|
done |
|
|
|
|
|
process_chunks() { |
|
local type=$1 |
|
local src_dir="$SOURCE_DIR/$type" |
|
local target_dir="$TARGET_DIR/$type" |
|
|
|
echo "Processing $type chunks..." |
|
|
|
|
|
if [ ! -d "$src_dir" ]; then |
|
echo "Warning: Directory not found: $src_dir" |
|
return |
|
fi |
|
|
|
|
|
chunks=($(ls -v "$src_dir"/*.tar.gz 2>/dev/null)) |
|
total_chunks=${#chunks[@]} |
|
|
|
if [ "$total_chunks" -eq 0 ]; then |
|
echo "No chunks found in $src_dir" |
|
return |
|
fi |
|
|
|
|
|
files_per_chunk=1000 |
|
if [ -n "$MAX_FILES" ]; then |
|
chunks_needed=$(( (MAX_FILES + files_per_chunk - 1) / files_per_chunk )) |
|
if [ "$chunks_needed" -lt "$total_chunks" ]; then |
|
total_chunks=$chunks_needed |
|
echo "Limiting to $total_chunks chunks ($MAX_FILES files) for $type" |
|
fi |
|
fi |
|
|
|
|
|
for ((i = 0; i < total_chunks; i++)); do |
|
chunk="${chunks[$i]}" |
|
chunk_name=$(basename "$chunk") |
|
printf "Extracting %s (%d/%d)..." "$chunk_name" $((i + 1)) "$total_chunks" |
|
|
|
if tar -xzf "$chunk" -C "$target_dir" 2>/dev/null; then |
|
echo " done" |
|
else |
|
echo " failed" |
|
echo "Warning: Failed to extract $chunk_name" |
|
fi |
|
|
|
|
|
|
|
if [ -n "$MAX_FILES" ] && [ "$i" -eq "$((total_chunks - 1))" ]; then |
|
|
|
local expected_total=$MAX_FILES |
|
local current_total=$(ls "$target_dir" | wc -l) |
|
|
|
if [ "$current_total" -gt "$expected_total" ]; then |
|
echo "Trimming excess files to meet MAX_FILES limit..." |
|
|
|
ls "$target_dir" | sort | tail -n+"$((expected_total + 1))" | \ |
|
xargs -I {} rm -rf "$target_dir/{}" |
|
fi |
|
fi |
|
done |
|
} |
|
|
|
|
|
if [ -n "$CHUNK_TYPE" ]; then |
|
case "$CHUNK_TYPE" in |
|
scenes|furnitures|queries) |
|
process_chunks "$CHUNK_TYPE" |
|
;; |
|
*) |
|
echo "Error: Invalid chunk type: $CHUNK_TYPE" |
|
echo "Valid types are: scenes, furnitures, queries" |
|
exit 1 |
|
;; |
|
esac |
|
else |
|
|
|
for type in scenes furnitures queries; do |
|
process_chunks "$type" |
|
done |
|
fi |
|
|
|
|
|
echo -e "\nValidating extracted files..." |
|
|
|
|
|
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "scenes" ]; then |
|
missing_files=0 |
|
total_scenes=0 |
|
for scene_dir in "$TARGET_DIR"/scenes/*; do |
|
if [ -d "$scene_dir" ]; then |
|
total_scenes=$((total_scenes + 1)) |
|
for required in "image.jpg" "annotation.json"; do |
|
if [ ! -f "$scene_dir/$required" ]; then |
|
echo "Warning: Missing $required in $(basename "$scene_dir")" |
|
missing_files=$((missing_files + 1)) |
|
fi |
|
done |
|
fi |
|
done |
|
echo "Scene validation complete. Processed $total_scenes scenes. Missing files: $missing_files" |
|
fi |
|
|
|
|
|
echo -e "\nExtraction Summary:" |
|
for type in scenes furnitures queries; do |
|
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "$type" ]; then |
|
file_count=$(find "$TARGET_DIR/$type" -type f | wc -l) |
|
echo "$type: $file_count files" |
|
fi |
|
done |
|
|
|
echo "Dataset uncompression completed!" |
|
echo "Output directory: $TARGET_DIR" |