DeepFurniture / uncompress_dataset.sh
byliu's picture
include visualizations
3341f2f
#!/bin/bash
# Usage function
usage() {
echo "Usage: $0 -s SOURCE_DIR -t TARGET_DIR [-c CHUNK_TYPE] [-m MAX_FILES] [-h]"
echo "Uncompress chunked DeepFurniture dataset"
echo ""
echo "Required arguments:"
echo " -s SOURCE_DIR Source directory containing the chunked dataset"
echo " -t TARGET_DIR Target directory for the uncompressed dataset"
echo ""
echo "Optional arguments:"
echo " -c CHUNK_TYPE Specific chunk type to process (scenes, furnitures, queries)"
echo " If not specified, all chunk types will be processed"
echo " -m MAX_FILES Maximum number of files to process per type (default: process all)"
echo " -h Show this help message"
exit 1
}
# Process command line arguments
while getopts "s:t:c:m:h" opt; do
case $opt in
s) SOURCE_DIR="$OPTARG";;
t) TARGET_DIR="$OPTARG";;
c) CHUNK_TYPE="$OPTARG";;
m) MAX_FILES="$OPTARG";;
h) usage;;
?) usage;;
esac
done
# Check required arguments
if [ -z "$SOURCE_DIR" ] || [ -z "$TARGET_DIR" ]; then
echo "Error: Source and target directories are required"
usage
fi
# Validate source directory
if [ ! -d "$SOURCE_DIR" ]; then
echo "Error: Source directory does not exist: $SOURCE_DIR"
exit 1
fi
# Validate MAX_FILES if provided
if [ -n "$MAX_FILES" ]; then
if ! [[ "$MAX_FILES" =~ ^[0-9]+$ ]]; then
echo "Error: MAX_FILES must be a positive integer"
exit 1
fi
echo "Will process maximum $MAX_FILES files per type"
fi
# Create target directory structure
mkdir -p "$TARGET_DIR"/{metadata,scenes,furnitures,queries}
# Copy metadata files (excluding index files)
echo "Copying metadata files..."
for file in "$SOURCE_DIR"/metadata/*.json*; do
if [[ ! $file =~ _index.json$ ]]; then
cp "$file" "$TARGET_DIR/metadata/"
fi
done
# Function to process chunks of a specific type
process_chunks() {
local type=$1
local src_dir="$SOURCE_DIR/$type"
local target_dir="$TARGET_DIR/$type"
echo "Processing $type chunks..."
# Check if source directory exists
if [ ! -d "$src_dir" ]; then
echo "Warning: Directory not found: $src_dir"
return
fi
# Get list of chunks and sort them
chunks=($(ls -v "$src_dir"/*.tar.gz 2>/dev/null))
total_chunks=${#chunks[@]}
if [ "$total_chunks" -eq 0 ]; then
echo "No chunks found in $src_dir"
return
fi
# Determine how many chunks to process based on MAX_FILES
files_per_chunk=1000 # Default files per chunk based on dataset structure
if [ -n "$MAX_FILES" ]; then
chunks_needed=$(( (MAX_FILES + files_per_chunk - 1) / files_per_chunk ))
if [ "$chunks_needed" -lt "$total_chunks" ]; then
total_chunks=$chunks_needed
echo "Limiting to $total_chunks chunks ($MAX_FILES files) for $type"
fi
fi
# Process chunks
for ((i = 0; i < total_chunks; i++)); do
chunk="${chunks[$i]}"
chunk_name=$(basename "$chunk")
printf "Extracting %s (%d/%d)..." "$chunk_name" $((i + 1)) "$total_chunks"
if tar -xzf "$chunk" -C "$target_dir" 2>/dev/null; then
echo " done"
else
echo " failed"
echo "Warning: Failed to extract $chunk_name"
fi
# If this is the last chunk and we have MAX_FILES set,
# we might need to remove excess files
if [ -n "$MAX_FILES" ] && [ "$i" -eq "$((total_chunks - 1))" ]; then
# Calculate how many files we should have
local expected_total=$MAX_FILES
local current_total=$(ls "$target_dir" | wc -l)
if [ "$current_total" -gt "$expected_total" ]; then
echo "Trimming excess files to meet MAX_FILES limit..."
# Remove excess files (keeping the first MAX_FILES files)
ls "$target_dir" | sort | tail -n+"$((expected_total + 1))" | \
xargs -I {} rm -rf "$target_dir/{}"
fi
fi
done
}
# Process chunks based on input
if [ -n "$CHUNK_TYPE" ]; then
case "$CHUNK_TYPE" in
scenes|furnitures|queries)
process_chunks "$CHUNK_TYPE"
;;
*)
echo "Error: Invalid chunk type: $CHUNK_TYPE"
echo "Valid types are: scenes, furnitures, queries"
exit 1
;;
esac
else
# Process all chunk types
for type in scenes furnitures queries; do
process_chunks "$type"
done
fi
# Basic validation
echo -e "\nValidating extracted files..."
# Check scenes
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "scenes" ]; then
missing_files=0
total_scenes=0
for scene_dir in "$TARGET_DIR"/scenes/*; do
if [ -d "$scene_dir" ]; then
total_scenes=$((total_scenes + 1))
for required in "image.jpg" "annotation.json"; do
if [ ! -f "$scene_dir/$required" ]; then
echo "Warning: Missing $required in $(basename "$scene_dir")"
missing_files=$((missing_files + 1))
fi
done
fi
done
echo "Scene validation complete. Processed $total_scenes scenes. Missing files: $missing_files"
fi
# Print final statistics
echo -e "\nExtraction Summary:"
for type in scenes furnitures queries; do
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "$type" ]; then
file_count=$(find "$TARGET_DIR/$type" -type f | wc -l)
echo "$type: $file_count files"
fi
done
echo "Dataset uncompression completed!"
echo "Output directory: $TARGET_DIR"