File size: 4,556 Bytes
7d16fe6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import pandas as pd
import requests
import os
import argparse
from tqdm import tqdm
from  dataset_utils import resize_square
from PIL import Image
import cairosvg
from io import BytesIO
from dotenv import load_dotenv
import time
import random
load_dotenv()

DATA_DIR = os.environ['DATA_DIR']
original_dir = os.path.join(DATA_DIR, 'original_path')
resized_dir = os.path.join(DATA_DIR, 'resized_path')

def fetch_image(url, max_retries=5):
    headers = {
        'User-Agent': random.choice([
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
        ])
    }
    
    for attempt in range(max_retries):
        try:
            response = requests.get(url, headers=headers, stream=True)
            if response.status_code == 429:  # Too Many Requests
                retry_after = int(response.headers.get("Retry-After", random.uniform(1, 5)))
                time.sleep(retry_after)
                continue
            response.raise_for_status()
            return response.content
        except requests.exceptions.RequestException as e:
            wait_time = 2 ** attempt + random.uniform(0, 1)  # Exponential backoff
            print(f"Error fetching {url}: {e}, retrying in {wait_time:.2f}s")
            time.sleep(wait_time)
    
    print(f"Failed to fetch {url} after {max_retries} attempts.")
    return None

def main(args):
    df = pd.read_csv(args.csv)
    df['original_path'] = ''
    df['resized_path'] = ''

    errors = 0
    count = 0
    
    for index, row in tqdm(df.iterrows(), total=len(df)):
        image_url = row['image']
        subject = row['subject'].replace(' ', '_').replace('/', '_')
        type_original_dir = os.path.join(original_dir, row['type'])
        type_resized_dir = os.path.join(resized_dir, row['type'])
        os.makedirs(type_original_dir, exist_ok=True)
        os.makedirs(type_resized_dir, exist_ok=True)
        
        image_data = fetch_image(image_url)
        if image_data:
            try:
                if image_url.endswith('.svg'):
                    png_bytes = cairosvg.svg2png(bytestring=image_data)
                    image = Image.open(BytesIO(png_bytes)).convert("RGBA")
                else:
                    image = Image.open(BytesIO(image_data))

                if row['type'] == 'brands':
                    image = image.convert("RGBA")
                    white_bg = Image.new("RGB", image.size, (255, 255, 255))
                    white_bg.paste(image, mask=image.split()[3])
                    image = white_bg
                
                first_letter = subject[0].lower()
                os.makedirs(os.path.join(type_original_dir, first_letter), exist_ok=True)
                original_filepath = os.path.join(type_original_dir, first_letter, f'{subject}.jpg')
                image.save(original_filepath, "JPEG", quality=95)

                im = Image.open(original_filepath)
                im = resize_square(im)
                
                os.makedirs(os.path.join(type_resized_dir, first_letter), exist_ok=True)
                resized_filepath = os.path.join(type_resized_dir, first_letter, f'{subject}.jpg')
                im.save(resized_filepath, im.format)

                df.at[index, 'original_path'] = original_filepath if os.path.exists(original_filepath) else ''
                df.at[index, 'resized_path'] = resized_filepath if os.path.exists(resized_filepath) else ''
                
            except Exception as e:
                errors += 1
                print(Exception(f"Failed to download image {subject}: {str(e)}"))
                continue
        else:
            errors += 1
            continue
        count += 1

    df = df[df['original_path'] != '']
    df = df[df['resized_path'] != '']
    df.to_csv(args.target_df, index=False)
    
    print(f'Finished downloading {count} images with {errors} errors')


def get_exp_parser():
    parser = argparse.ArgumentParser(add_help=False)
    parser.add_argument('--base_df', type=str)
    parser.add_argument('--target_df', type=str)
    parser.add_argument('--width', type=int, default=336)
    return parser


if __name__ == "__main__":
    parser = get_exp_parser()
    args = parser.parse_args()
    main(args)