image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000010676.jpg
|
10676_evjvqa
|
[{"from": "human", "value": "what color basket is this man holding?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000010677.jpg
|
10677_evjvqa
|
[{"from": "human", "value": "what color roof does this house have?\n<image>"}, {"from": "gpt", "value": "brown"}]
|
00000010678.jpg
|
10678_evjvqa
|
[{"from": "human", "value": "which watch does the person with the green shade wear?\n<image>"}, {"from": "gpt", "value": "on the right hand"}]
|
00000010680.jpg
|
10680_evjvqa
|
[{"from": "human", "value": "what color walls does this house have?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000010683.jpg
|
10683_evjvqa
|
[{"from": "human", "value": "which hand does the man in white pull the other up?\n<image>"}, {"from": "gpt", "value": "the left hand"}]
|
00000010692.jpg
|
10692_evjvqa
|
[{"from": "human", "value": "what color is the umbrella?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010696.jpg
|
10696_evjvqa
|
[{"from": "human", "value": "how many cows does each team have?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000005000.jpg
|
5000_evjvqa
|
[{"from": "human", "value": "what are the people dressing as warriors in the picture carrying?\n<image>"}, {"from": "gpt", "value": "they are carrying a sedan chair."}]
|
00000005000.jpg
|
5000_evjvqa
|
[{"from": "human", "value": "what color are the trousers worn by the only boy in the picture?\n<image>"}, {"from": "gpt", "value": "the boy is wearing a pair of navy blue trousers."}]
|
00000005000.jpg
|
5000_evjvqa
|
[{"from": "human", "value": "what is the weather like in the picture?\n<image>"}, {"from": "gpt", "value": "the weather looks cool."}]
|
00000005000.jpg
|
5000_evjvqa
|
[{"from": "human", "value": "what is the dominant color of the welcome arch?\n<image>"}, {"from": "gpt", "value": "the welcome arch's dominant color is red."}]
|
00000005002.jpg
|
5002_evjvqa
|
[{"from": "human", "value": "how many balconies does the house have?\n<image>"}, {"from": "gpt", "value": "the house has four balconies. "}]
|
00000005002.jpg
|
5002_evjvqa
|
[{"from": "human", "value": "what is the dominant color of the house?\n<image>"}, {"from": "gpt", "value": "white is the dominant color of the house. "}]
|
00000005002.jpg
|
5002_evjvqa
|
[{"from": "human", "value": "what color are the balcony railings of the house?\n<image>"}, {"from": "gpt", "value": "the balcony railings are black. "}]
|
00000005004.jpg
|
5004_evjvqa
|
[{"from": "human", "value": "what fruit is the woman holding?\n<image>"}, {"from": "gpt", "value": "she is holding a pineapple."}]
|
00000005004.jpg
|
5004_evjvqa
|
[{"from": "human", "value": "what is the expression on her face?\n<image>"}, {"from": "gpt", "value": "she is smiling. "}]
|
00000005004.jpg
|
5004_evjvqa
|
[{"from": "human", "value": "where is she?\n<image>"}, {"from": "gpt", "value": "she is going shopping at a supermarket."}]
|
00000005004.jpg
|
5004_evjvqa
|
[{"from": "human", "value": "what color is her blouse?\n<image>"}, {"from": "gpt", "value": "she is wearing a red blouse."}]
|
00000005006.jpg
|
5006_evjvqa
|
[{"from": "human", "value": "mainly, what kind of goods are on sale in the picture?\n<image>"}, {"from": "gpt", "value": "they are mostly fish sauce."}]
|
00000005006.jpg
|
5006_evjvqa
|
[{"from": "human", "value": "what color are the fish sauce plastic basins?\n<image>"}, {"from": "gpt", "value": "the fish sauce plastic basins are yellow."}]
|
00000005010.jpg
|
5010_evjvqa
|
[{"from": "human", "value": "what is the woman in black doing?\n<image>"}, {"from": "gpt", "value": "the woman in black is peddling goods. "}]
|
00000005010.jpg
|
5010_evjvqa
|
[{"from": "human", "value": "what color is the street peddler's outfit?\n<image>"}, {"from": "gpt", "value": "black is the color of the street peddler's outfit. "}]
|
00000005013.jpg
|
5013_evjvqa
|
[{"from": "human", "value": "where was the picture taken?\n<image>"}, {"from": "gpt", "value": "the picture was taken at a coffee shop. "}]
|
00000005013.jpg
|
5013_evjvqa
|
[{"from": "human", "value": "which kind of jeans does the man holding the phone wearing?\n<image>"}, {"from": "gpt", "value": "the man holding the phone is weaing a pair of short jeans. "}]
|
00000005016.jpg
|
5016_evjvqa
|
[{"from": "human", "value": "what color is the handle of the magnifying glass?\n<image>"}, {"from": "gpt", "value": "the handle is black."}]
|
00000005016.jpg
|
5016_evjvqa
|
[{"from": "human", "value": "what is the background color of the picture?\n<image>"}, {"from": "gpt", "value": "the background is white. "}]
|
00000005017.jpg
|
5017_evjvqa
|
[{"from": "human", "value": "what time was it when the picture was taken?\n<image>"}, {"from": "gpt", "value": "the picture was taken in the evening."}]
|
00000005019.jpg
|
5019_evjvqa
|
[{"from": "human", "value": "what is the boy sitting on the attic doing?\n<image>"}, {"from": "gpt", "value": "the boy is studying."}]
|
00000005019.jpg
|
5019_evjvqa
|
[{"from": "human", "value": "what color is the woman's t-shirt?\n<image>"}, {"from": "gpt", "value": "the woman's t-shirt is black."}]
|
00000005019.jpg
|
5019_evjvqa
|
[{"from": "human", "value": "what material are the stairs made of?\n<image>"}, {"from": "gpt", "value": "the stairs are made of wood. "}]
|
00000005022.jpg
|
5022_evjvqa
|
[{"from": "human", "value": "how many types of sticky rice are there on the flat winnowing bamboo basket?\n<image>"}, {"from": "gpt", "value": "there are two types of sticky rice displayed on the flat winnowing bamboo basket."}]
|
00000005022.jpg
|
5022_evjvqa
|
[{"from": "human", "value": "where are the dishes placed?\n<image>"}, {"from": "gpt", "value": "they are placed in bamboo baskets."}]
|
00000005023.jpg
|
5023_evjvqa
|
[{"from": "human", "value": "what color is the girl's sweater?\n<image>"}, {"from": "gpt", "value": "the sweater is blue. "}]
|
00000005023.jpg
|
5023_evjvqa
|
[{"from": "human", "value": "what color are the girl's pants?\n<image>"}, {"from": "gpt", "value": "the girl's pants are yellow. "}]
|
00000005025.jpg
|
5025_evjvqa
|
[{"from": "human", "value": "what color is the young man's watch?\n<image>"}, {"from": "gpt", "value": "the watch is green."}]
|
00000005025.jpg
|
5025_evjvqa
|
[{"from": "human", "value": "where was the picture taken?\n<image>"}, {"from": "gpt", "value": "it was taken in a shopping area. "}]
|
00000005025.jpg
|
5025_evjvqa
|
[{"from": "human", "value": "what type of pattern does the man's bag has?\n<image>"}, {"from": "gpt", "value": "the bag has a black and white checkered pattern. "}]
|
00000005026.jpg
|
5026_evjvqa
|
[{"from": "human", "value": "where was the picture taken?\n<image>"}, {"from": "gpt", "value": "it was taken in a shopping mall."}]
|
00000005026.jpg
|
5026_evjvqa
|
[{"from": "human", "value": "what color is the woman's hairgrip?\n<image>"}, {"from": "gpt", "value": "the hairgrip is black."}]
|
00000005026.jpg
|
5026_evjvqa
|
[{"from": "human", "value": "what color is the coat worn by the man standing next to the girl in white?\n<image>"}, {"from": "gpt", "value": "the coat is navy blue. "}]
|
00000005028.jpg
|
5028_evjvqa
|
[{"from": "human", "value": "what color is the car?\n<image>"}, {"from": "gpt", "value": "the car is red."}]
|
00000005028.jpg
|
5028_evjvqa
|
[{"from": "human", "value": "what is the color of the village entrance gate?\n<image>"}, {"from": "gpt", "value": "the village entrance gate is yellow. "}]
|
00000005032.jpg
|
5032_evjvqa
|
[{"from": "human", "value": "what color is the wall of the house?\n<image>"}, {"from": "gpt", "value": "the wall is yellow."}]
|
00000005032.jpg
|
5032_evjvqa
|
[{"from": "human", "value": "what type of plant is the one in the picture?\n<image>"}, {"from": "gpt", "value": "it is a bougainvillea plant. "}]
|
00000005032.jpg
|
5032_evjvqa
|
[{"from": "human", "value": "what type of vehicle is the one next to the old woman?\n<image>"}, {"from": "gpt", "value": "the vehicle next to the old woman is a bike. "}]
|
00000005035.jpg
|
5035_evjvqa
|
[{"from": "human", "value": "what does this store sell?\n<image>"}, {"from": "gpt", "value": "it mainly sells aquarium fish."}]
|
00000005035.jpg
|
5035_evjvqa
|
[{"from": "human", "value": "what color is the plastic stool next to the trash bag?\n<image>"}, {"from": "gpt", "value": "the plastic stool next to the trash bag is blue."}]
|
00000005035.jpg
|
5035_evjvqa
|
[{"from": "human", "value": "what is the color of the outfit worn by the woman squatting on the floor?\n<image>"}, {"from": "gpt", "value": "the woman squatting on the floor is wearing a black outfit. "}]
|
00000005036.jpg
|
5036_evjvqa
|
[{"from": "human", "value": "what is the sky like?\n<image>"}, {"from": "gpt", "value": "it is cloudy. "}]
|
00000005036.jpg
|
5036_evjvqa
|
[{"from": "human", "value": "what color is the shirt worn by the man crossing his arms?\n<image>"}, {"from": "gpt", "value": "the man is wearing a white shirt. "}]
|
00000005036.jpg
|
5036_evjvqa
|
[{"from": "human", "value": "how many colors are the barriers painted?\n<image>"}, {"from": "gpt", "value": "the barriers are painted with two colors. "}]
|
00000005037.jpg
|
5037_evjvqa
|
[{"from": "human", "value": "how many plant pots are displayed in the hall?\n<image>"}, {"from": "gpt", "value": "there are three plant pots displayed in the hall."}]
|
00000005037.jpg
|
5037_evjvqa
|
[{"from": "human", "value": "what kind of ceiling lights are they?\n<image>"}, {"from": "gpt", "value": "they are led ceiling lights."}]
|
00000005039.jpg
|
5039_evjvqa
|
[{"from": "human", "value": "how many air planes are there in the picture?\n<image>"}, {"from": "gpt", "value": "there are no air planes in the picture. "}]
|
00000005039.jpg
|
5039_evjvqa
|
[{"from": "human", "value": "what time of day is it in the picture?\n<image>"}, {"from": "gpt", "value": "it is daytime. "}]
|
00000005039.jpg
|
5039_evjvqa
|
[{"from": "human", "value": "how many container trucks running on the road?\n<image>"}, {"from": "gpt", "value": "there are no container trucks running on the road. "}]
|
00000005041.jpg
|
5041_evjvqa
|
[{"from": "human", "value": "what color is the dress of the woman sitting behind the man in black on the motorbike?\n<image>"}, {"from": "gpt", "value": "she is wearing a pink dress."}]
|
00000005041.jpg
|
5041_evjvqa
|
[{"from": "human", "value": "what color are the flags?\n<image>"}, {"from": "gpt", "value": "the flags are red. "}]
|
00000005042.jpg
|
5042_evjvqa
|
[{"from": "human", "value": "when was the picture taken?\n<image>"}, {"from": "gpt", "value": "the picture was taken in the evening. "}]
|
00000005042.jpg
|
5042_evjvqa
|
[{"from": "human", "value": "what position is the man wearing the white shirt in?\n<image>"}, {"from": "gpt", "value": "he is sitting. "}]
|
00000005042.jpg
|
5042_evjvqa
|
[{"from": "human", "value": "what color is the shirt of the man sitting?\n<image>"}, {"from": "gpt", "value": "it is white."}]
|
00000005043.jpg
|
5043_evjvqa
|
[{"from": "human", "value": "what type of dish is it?\n<image>"}, {"from": "gpt", "value": "it is pho."}]
|
00000005043.jpg
|
5043_evjvqa
|
[{"from": "human", "value": "what color is the mug?\n<image>"}, {"from": "gpt", "value": "the mug is purple. "}]
|
00000005045.jpg
|
5045_evjvqa
|
[{"from": "human", "value": "what color is the dancing lion?\n<image>"}, {"from": "gpt", "value": "the dancing lion is orange. "}]
|
00000005047.jpg
|
5047_evjvqa
|
[{"from": "human", "value": "what kind of vehicle is the one parked in front of the house?\n<image>"}, {"from": "gpt", "value": "the vehicle parked in front of the house is a motorbike. "}]
|
00000005047.jpg
|
5047_evjvqa
|
[{"from": "human", "value": "what time of day is it in the picture?\n<image>"}, {"from": "gpt", "value": "it is evening. "}]
|
00000005047.jpg
|
5047_evjvqa
|
[{"from": "human", "value": "what is the dominant color of the house?\n<image>"}, {"from": "gpt", "value": "the dominant color of the house is white. "}]
|
00000005048.jpg
|
5048_evjvqa
|
[{"from": "human", "value": "what color is the mask of the man standing?\n<image>"}, {"from": "gpt", "value": "it is white. "}]
|
00000005048.jpg
|
5048_evjvqa
|
[{"from": "human", "value": "what is the facial expression of the woman in the yellow top?\n<image>"}, {"from": "gpt", "value": "the woman in the yellow top is laughing. "}]
|
00000005048.jpg
|
5048_evjvqa
|
[{"from": "human", "value": "what color is the helmet on the side of the woman in the blue t-shirt?\n<image>"}, {"from": "gpt", "value": "it is pink. "}]
|
00000005048.jpg
|
5048_evjvqa
|
[{"from": "human", "value": "what is the position of the girl wearing the mask in the back of the picture?\n<image>"}, {"from": "gpt", "value": "she is sitting. "}]
|
00000005049.jpg
|
5049_evjvqa
|
[{"from": "human", "value": "what time of day is it in the picture?\n<image>"}, {"from": "gpt", "value": "it is evening. "}]
|
00000005049.jpg
|
5049_evjvqa
|
[{"from": "human", "value": "what color is the t-shirt of the woman sitting with the phone in her hand?\n<image>"}, {"from": "gpt", "value": "her t-shirt is white. "}]
|
00000005049.jpg
|
5049_evjvqa
|
[{"from": "human", "value": "what color are the lights in the fountain?\n<image>"}, {"from": "gpt", "value": "the lights are white. "}]
|
00000005052.jpg
|
5052_evjvqa
|
[{"from": "human", "value": "where was the picture taken?\n<image>"}, {"from": "gpt", "value": "the picture was taken in a coffee shop."}]
|
00000005052.jpg
|
5052_evjvqa
|
[{"from": "human", "value": "what color is the shirt of the man sitting at the table?\n<image>"}, {"from": "gpt", "value": "it is grey. "}]
|
00000005052.jpg
|
5052_evjvqa
|
[{"from": "human", "value": "how many wall-mounted fans are there in the shop?\n<image>"}, {"from": "gpt", "value": "there are three wall-mounted fans. "}]
|
00000005052.jpg
|
5052_evjvqa
|
[{"from": "human", "value": "what color are most of the chair cushion in the shop?\n<image>"}, {"from": "gpt", "value": "they are mostly brown. "}]
|
00000005054.jpg
|
5054_evjvqa
|
[{"from": "human", "value": "where was the picture taken according to the background?\n<image>"}, {"from": "gpt", "value": "the picture was taken inside an exhibition. "}]
|
00000005054.jpg
|
5054_evjvqa
|
[{"from": "human", "value": "what type of outfit are most of the women wearing?\n<image>"}, {"from": "gpt", "value": "they are mostly wearing ao dai. "}]
|
00000005054.jpg
|
5054_evjvqa
|
[{"from": "human", "value": "what color is the bag of the woman in the pink ao dai?\n<image>"}, {"from": "gpt", "value": "the bag is red. "}]
|
00000005055.jpg
|
5055_evjvqa
|
[{"from": "human", "value": "what color is the taxi?\n<image>"}, {"from": "gpt", "value": "the taxi is white."}]
|
00000005055.jpg
|
5055_evjvqa
|
[{"from": "human", "value": "what is the woman pushing on the road?\n<image>"}, {"from": "gpt", "value": "she is pushing a bicycle."}]
|
00000005057.jpg
|
5057_evjvqa
|
[{"from": "human", "value": "what time of day is it in the picture?\n<image>"}, {"from": "gpt", "value": "it is morning. "}]
|
00000005057.jpg
|
5057_evjvqa
|
[{"from": "human", "value": "what color are the roof tiles?\n<image>"}, {"from": "gpt", "value": "the roof tiles are red. "}]
|
00000005057.jpg
|
5057_evjvqa
|
[{"from": "human", "value": "what is the dominant color of the structure?\n<image>"}, {"from": "gpt", "value": "it is yellow. "}]
|
00000005059.jpg
|
5059_evjvqa
|
[{"from": "human", "value": "what animal is the man riding?\n<image>"}, {"from": "gpt", "value": "he is riding a horse."}]
|
00000005059.jpg
|
5059_evjvqa
|
[{"from": "human", "value": "how many people are mounted on a horse?\n<image>"}, {"from": "gpt", "value": "there is one person mounted on a horse. "}]
|
00000005059.jpg
|
5059_evjvqa
|
[{"from": "human", "value": "what color is the umbrella held by the girl walking the horse?\n<image>"}, {"from": "gpt", "value": "the umbrella is red. "}]
|
00000005059.jpg
|
5059_evjvqa
|
[{"from": "human", "value": "how many umbrellas are there in the picture?\n<image>"}, {"from": "gpt", "value": "there are four umbrellas in the picture."}]
|
00000005059.jpg
|
5059_evjvqa
|
[{"from": "human", "value": "what color are the pants of the man riding the horse?\n<image>"}, {"from": "gpt", "value": "the pants are black. "}]
|
00000005059.jpg
|
5059_evjvqa
|
[{"from": "human", "value": "what is the man mounted on the horse holding in his right hand?\n<image>"}, {"from": "gpt", "value": "he is holding a bottle of water in his right hand. "}]
|
00000005061.jpg
|
5061_evjvqa
|
[{"from": "human", "value": "what color is the ship?\n<image>"}, {"from": "gpt", "value": "the ship is white."}]
|
00000005061.jpg
|
5061_evjvqa
|
[{"from": "human", "value": "what color is the background of the flag with the golden star in the middle?\n<image>"}, {"from": "gpt", "value": "it is red. "}]
|
00000005061.jpg
|
5061_evjvqa
|
[{"from": "human", "value": "what time of day is it in the picture?\n<image>"}, {"from": "gpt", "value": "it is morning. "}]
|
00000005063.jpg
|
5063_evjvqa
|
[{"from": "human", "value": "what color is the onion basket?\n<image>"}, {"from": "gpt", "value": "it is red."}]
|
00000005063.jpg
|
5063_evjvqa
|
[{"from": "human", "value": "what is the dog sleeping on?\n<image>"}, {"from": "gpt", "value": "a handbag. "}]
|
00000005063.jpg
|
5063_evjvqa
|
[{"from": "human", "value": "what is the dog doing?\n<image>"}, {"from": "gpt", "value": "the dog is sleeping. "}]
|
00000005067.jpg
|
5067_evjvqa
|
[{"from": "human", "value": "what color is the female driver's helmet?\n<image>"}, {"from": "gpt", "value": "the helmet is beige brown. "}]
|
00000005067.jpg
|
5067_evjvqa
|
[{"from": "human", "value": "what color is the shipper's motorbike?\n<image>"}, {"from": "gpt", "value": "it is black. "}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.