File size: 1,441 Bytes
4681ac2
ef5559f
8eeaa6b
ef5559f
 
8eeaa6b
4681ac2
8eeaa6b
4681ac2
e74bbfb
4681ac2
ef5559f
f181e18
29dc431
 
ef5559f
fdbc942
8eeaa6b
ef5559f
29dc431
 
 
 
 
 
 
 
ef5559f
 
 
 
 
 
 
 
4681ac2
46ae1da
ef5559f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from pathlib import Path
import gradio as gr
import torch
from transformers import AutoModelForImageClassification
import shutil
from optimum.pipelines import pipeline

device = 1 if torch.cuda.is_available() else "cpu"

chk_point = "davanstrien/autotrain-ia-useful-covers-3665397856"

model = AutoModelForImageClassification.from_pretrained(chk_point)

try:
    pipe = pipeline(
    "image-classification",
    chk_point,
    accelerator="bettertransformer",device=device
)
except NotImplementedError:
    from transformers import pipeline
    pipe = pipeline(
    "image-classification",
    chk_point,
    device=device
)
    

def make_label_folders():
    folders = model.config.label2id.keys()
    for folder in folders:
        folder = Path(folder)
        if not folder.exists():
            folder.mkdir()
    return folders


def predictions_into_folders(files):
    files = [file.name for file in files]
    folders = make_label_folders()
    predictions = pipe(files)
    for file, prediction in zip(files, predictions):
        label = prediction[0]["label"]
        file_name = Path(file).name
        shutil.copy(file, f"{label}/{file_name}")
    for folder in folders:
        shutil.make_archive(folder, "zip", ".", folder)
    return [f"{folder}.zip" for folder in folders]


demo = gr.Interface(
    predictions_into_folders,
    gr.File(file_count="directory"),
    gr.Files(),
    cache_examples=True,
)

demo.launch()