image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000001658.jpg
1658_evjvqa
[{"from": "human", "value": "what is the woman in the black and white striped shirt going to buy?\n<image>"}, {"from": "gpt", "value": "buy fresh fish"}]
00000001658.jpg
1658_evjvqa
[{"from": "human", "value": "where is the woman in the black and white striped shirt standing?\n<image>"}, {"from": "gpt", "value": "standing at the fresh fish stall"}]
00000001658.jpg
1658_evjvqa
[{"from": "human", "value": "what products does the stall where the woman in the black-and-white striped t-shirt is standing sell?\n<image>"}, {"from": "gpt", "value": "sell fresh fish"}]
00000001660.jpg
1660_evjvqa
[{"from": "human", "value": "where is the lady sitting?\n<image>"}, {"from": "gpt", "value": "on the balcony of a coffee shop"}]
00000001660.jpg
1660_evjvqa
[{"from": "human", "value": "how many people does the woman go out to have coffee with?\n<image>"}, {"from": "gpt", "value": "the woman walks alone"}]
00000001660.jpg
1660_evjvqa
[{"from": "human", "value": "where is the woman sitting to sip coffee?\n<image>"}, {"from": "gpt", "value": "on a balcony"}]
00000001662.jpg
1662_evjvqa
[{"from": "human", "value": "what products are in the baskets placed outside the supermarket?\n<image>"}, {"from": "gpt", "value": "fruit and vegetable products"}]
00000001662.jpg
1662_evjvqa
[{"from": "human", "value": "what is the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "selecting vegetables in a basket"}]
00000001663.jpg
1663_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "walking in an alley"}]
00000001663.jpg
1663_evjvqa
[{"from": "human", "value": "where is the man walking?\n<image>"}, {"from": "gpt", "value": "in an alley, along the painted wall"}]
00000001670.jpg
1670_evjvqa
[{"from": "human", "value": "where are the two men eating?\n<image>"}, {"from": "gpt", "value": "on a canoe"}]
00000001670.jpg
1670_evjvqa
[{"from": "human", "value": "what are the two men doing on the canoe?\n<image>"}, {"from": "gpt", "value": "sitting to eat"}]
00000001670.jpg
1670_evjvqa
[{"from": "human", "value": "how many people are on the canoe?\n<image>"}, {"from": "gpt", "value": "three people"}]
00000001670.jpg
1670_evjvqa
[{"from": "human", "value": "how many people does the man in the red shirt eat with?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001670.jpg
1670_evjvqa
[{"from": "human", "value": "how many people is the man in the blue shirt eating with?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001671.jpg
1671_evjvqa
[{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "on the canoe"}]
00000001671.jpg
1671_evjvqa
[{"from": "human", "value": "what products does the girl's canoe sell?\n<image>"}, {"from": "gpt", "value": "fruits"}]
00000001671.jpg
1671_evjvqa
[{"from": "human", "value": "where is the girl selling the fruits?\n<image>"}, {"from": "gpt", "value": "on the canoe"}]
00000001679.jpg
1679_evjvqa
[{"from": "human", "value": "where is the pink jacket woman's family going?\n<image>"}, {"from": "gpt", "value": "going in the supermarket"}]
00000001679.jpg
1679_evjvqa
[{"from": "human", "value": "what is the pink jacket woman's family doing?\n<image>"}, {"from": "gpt", "value": "standing at the checkout to pay for the bill"}]
00000001679.jpg
1679_evjvqa
[{"from": "human", "value": "how many people are there in the family of the black jacket man?\n<image>"}, {"from": "gpt", "value": "five people"}]
00000001681.jpg
1681_evjvqa
[{"from": "human", "value": "how many cars are going in the opposite direction of the orange one?\n<image>"}, {"from": "gpt", "value": "there is a car going in the opposite direction"}]
00000001681.jpg
1681_evjvqa
[{"from": "human", "value": "how many cars are going in the same direction as the orange car?\n<image>"}, {"from": "gpt", "value": "there is a car going in the same direction as the orange car"}]
00000001681.jpg
1681_evjvqa
[{"from": "human", "value": "how many cars are going in the other lane of the orange one?\n<image>"}, {"from": "gpt", "value": "there is a car"}]
00000001681.jpg
1681_evjvqa
[{"from": "human", "value": "how many cars are in the same lane as the orange car?\n<image>"}, {"from": "gpt", "value": "there is car"}]
00000001687.jpg
1687_evjvqa
[{"from": "human", "value": "how many people are in the group of young people travelling to the sea?\n<image>"}, {"from": "gpt", "value": "four people"}]
00000001687.jpg
1687_evjvqa
[{"from": "human", "value": "how many girls are there carrying non la hats?\n<image>"}, {"from": "gpt", "value": "two friends"}]
00000001687.jpg
1687_evjvqa
[{"from": "human", "value": "where does the group of four girls travel?\n<image>"}, {"from": "gpt", "value": "sea"}]
00000001687.jpg
1687_evjvqa
[{"from": "human", "value": "where is the group of four girls standing?\n<image>"}, {"from": "gpt", "value": "on the beach"}]
00000001688.jpg
1688_evjvqa
[{"from": "human", "value": "how many christmas trees are there behind the woman standing?\n<image>"}, {"from": "gpt", "value": "there are two christmas trees"}]
00000001689.jpg
1689_evjvqa
[{"from": "human", "value": "what is the man in blue doing?\n<image>"}, {"from": "gpt", "value": "eating a food sample"}]
00000001689.jpg
1689_evjvqa
[{"from": "human", "value": "what is the woman wearing the black apron doing?\n<image>"}, {"from": "gpt", "value": "holding a tray of food samples"}]
00000001689.jpg
1689_evjvqa
[{"from": "human", "value": "what is the woman opposite the man in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "feeding the man with food samples"}]
00000001692.jpg
1692_evjvqa
[{"from": "human", "value": "what color is the young man walking along the street wearing?\n<image>"}, {"from": "gpt", "value": "black"}]
00000005900.jpg
5900_evjvqa
[{"from": "human", "value": "where is the man in white shirt standing compared to the woman in red shirt?\n<image>"}, {"from": "gpt", "value": "he is standing to the left of the woman in red shirt"}]
00000005900.jpg
5900_evjvqa
[{"from": "human", "value": "how many people are standing on the left side of the man in white shirt?\n<image>"}, {"from": "gpt", "value": "there are three people standing on the left side of the man in white shirt"}]
00000005900.jpg
5900_evjvqa
[{"from": "human", "value": "what are people holding in their hands?\n<image>"}, {"from": "gpt", "value": "they are holding the mobile phones"}]
00000005901.jpg
5901_evjvqa
[{"from": "human", "value": "where is the man holding the microphone standing compared to the people sitting?\n<image>"}, {"from": "gpt", "value": "he is standing opposite the people who are sitting"}]
00000005901.jpg
5901_evjvqa
[{"from": "human", "value": "what color is the shirt that the man holding the microphone is wearing?\n<image>"}, {"from": "gpt", "value": "a white shirt"}]
00000005901.jpg
5901_evjvqa
[{"from": "human", "value": "what color is the chair that everyone is sitting on?\n<image>"}, {"from": "gpt", "value": "the chair is blue"}]
00000005904.jpg
5904_evjvqa
[{"from": "human", "value": "what color are the flowers decorated near the dishes?\n<image>"}, {"from": "gpt", "value": "the flowers are yellow"}]
00000005906.jpg
5906_evjvqa
[{"from": "human", "value": "what color are the walls of the houses?\n<image>"}, {"from": "gpt", "value": "the walls are yellow"}]
00000005906.jpg
5906_evjvqa
[{"from": "human", "value": "what color is the shirt that the man driving the white vehicle is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing a blue shirt"}]
00000005906.jpg
5906_evjvqa
[{"from": "human", "value": "what is the man in blue shirt wearing on his head?\n<image>"}, {"from": "gpt", "value": "he is wearing a helmet"}]
00000005908.jpg
5908_evjvqa
[{"from": "human", "value": "what color is the shelf where the bell is placed?\n<image>"}, {"from": "gpt", "value": "the shelf is yellow"}]
00000005908.jpg
5908_evjvqa
[{"from": "human", "value": "what is on the yellow shelf next to the wall?\n<image>"}, {"from": "gpt", "value": "on the shelf is a bell"}]
00000005909.jpg
5909_evjvqa
[{"from": "human", "value": "what is the woman in yellow hat doing?\n<image>"}, {"from": "gpt", "value": "sweeping the trash"}]
00000005909.jpg
5909_evjvqa
[{"from": "human", "value": "how many people are holding brooms?\n<image>"}, {"from": "gpt", "value": "there are two people holding brooms"}]
00000005909.jpg
5909_evjvqa
[{"from": "human", "value": "where is the other broom holder standing compared to the woman in yellow hat?\n<image>"}, {"from": "gpt", "value": "behind the woman in yellow hat"}]
00000005910.jpg
5910_evjvqa
[{"from": "human", "value": "how many banana stalks are there in the picture?\n<image>"}, {"from": "gpt", "value": "there are four banana stalks in the picture"}]
00000005910.jpg
5910_evjvqa
[{"from": "human", "value": "what are on the foam trays?\n<image>"}, {"from": "gpt", "value": "on the foam trays are banana stalks"}]
00000005912.jpg
5912_evjvqa
[{"from": "human", "value": "how many people are holding the certificate of merit?\n<image>"}, {"from": "gpt", "value": "there are two people holding the certificate of merit"}]
00000005912.jpg
5912_evjvqa
[{"from": "human", "value": "what color is the outfit that the man on the right side is wearing?\n<image>"}, {"from": "gpt", "value": "the man on the right side is wearing blue outfit"}]
00000005912.jpg
5912_evjvqa
[{"from": "human", "value": "what is on the hands of two men?\n<image>"}, {"from": "gpt", "value": "on the hands of two men is a certificate of merit"}]
00000005917.jpg
5917_evjvqa
[{"from": "human", "value": "what is on the head of the man in the yellow shirt?\n<image>"}, {"from": "gpt", "value": "there is a pair of glasses on the head of the man in the yellow shirt"}]
00000005917.jpg
5917_evjvqa
[{"from": "human", "value": "how many women are there in front of the man in yellow shirt?\n<image>"}, {"from": "gpt", "value": "there are two women in front of the man"}]
00000005917.jpg
5917_evjvqa
[{"from": "human", "value": "what is the woman in red shirt holding in her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a cloth in her hand"}]
00000005918.jpg
5918_evjvqa
[{"from": "human", "value": "how many santa claus are there on the cake?\n<image>"}, {"from": "gpt", "value": "there are two santa claus on the cake"}]
00000005918.jpg
5918_evjvqa
[{"from": "human", "value": "how many pine trees are there on the cake?\n<image>"}, {"from": "gpt", "value": "there are two pine trees on the cake"}]
00000005918.jpg
5918_evjvqa
[{"from": "human", "value": "what color are the bells?\n<image>"}, {"from": "gpt", "value": "the bells are iridescent"}]
00000005919.jpg
5919_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "he is towing a vehicle"}]
00000005919.jpg
5919_evjvqa
[{"from": "human", "value": "which side of the man is the girl standing on?\n<image>"}, {"from": "gpt", "value": "the girl is standing on the left side of the man"}]
00000005919.jpg
5919_evjvqa
[{"from": "human", "value": "what is the left hand of the girl holding?\n<image>"}, {"from": "gpt", "value": "the left hand is holding a bouquet"}]
00000005922.jpg
5922_evjvqa
[{"from": "human", "value": "what color is the coat that the man standing at the vegetable department wears?\n<image>"}, {"from": "gpt", "value": "a brown coat"}]
00000005922.jpg
5922_evjvqa
[{"from": "human", "value": "where is the man in brown coat standing?\n<image>"}, {"from": "gpt", "value": "he is standing next to the vegetable department"}]
00000005922.jpg
5922_evjvqa
[{"from": "human", "value": "what color is the face mask that the man standing at the vegetable department wears?\n<image>"}, {"from": "gpt", "value": "a green face mask"}]
00000005926.jpg
5926_evjvqa
[{"from": "human", "value": "how many carts are being pushed in front of the man holding the loudspeaker?\n<image>"}, {"from": "gpt", "value": "there are two carts being pushed"}]
00000005927.jpg
5927_evjvqa
[{"from": "human", "value": "what is the guard of the store doing?\n<image>"}, {"from": "gpt", "value": "towing a vehicle"}]
00000005927.jpg
5927_evjvqa
[{"from": "human", "value": "what is the color of the vehicle that the guard is towing?\n<image>"}, {"from": "gpt", "value": "the vehicle is white"}]
00000005929.jpg
5929_evjvqa
[{"from": "human", "value": "what color are the bowls displayed on the table?\n<image>"}, {"from": "gpt", "value": "the bowls are black"}]
00000005929.jpg
5929_evjvqa
[{"from": "human", "value": "what color is the vehicle that the man in red shirt is leaning on?\n<image>"}, {"from": "gpt", "value": "the vehicle is yellow"}]
00000005929.jpg
5929_evjvqa
[{"from": "human", "value": "how many rows of tables and chairs are displayed in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "there are three rows of tables and chairs displayed in front of the restaurant"}]
00000005929.jpg
5929_evjvqa
[{"from": "human", "value": "what are set on the dining tables of the restaurant?\n<image>"}, {"from": "gpt", "value": "the dining tables of the restaurant have bottles of water, cups, chopsticks and menus"}]
00000005930.jpg
5930_evjvqa
[{"from": "human", "value": "how many light bulbs are mounted on the nameplate of the mill?\n<image>"}, {"from": "gpt", "value": "there are four light bulbs mounted on the nameplate"}]
00000005935.jpg
5935_evjvqa
[{"from": "human", "value": "what is behind the counter selling dragon fruit breads?\n<image>"}, {"from": "gpt", "value": "behind is a dragon fruit counter"}]
00000005936.jpg
5936_evjvqa
[{"from": "human", "value": "how many guards are standing in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "there are two guards standing in front of the restaurant"}]
00000005938.jpg
5938_evjvqa
[{"from": "human", "value": "what is the militiaman doing?\n<image>"}, {"from": "gpt", "value": "putting a bulletin board"}]
00000005940.jpg
5940_evjvqa
[{"from": "human", "value": "how many people are riding motorbikes in the market?\n<image>"}, {"from": "gpt", "value": "there are two people riding motorbikes in the market"}]
00000005940.jpg
5940_evjvqa
[{"from": "human", "value": "what color is the vehicle of the person in blue coat?\n<image>"}, {"from": "gpt", "value": "the vehicle of the person in blue coat is red and black"}]
00000005943.jpg
5943_evjvqa
[{"from": "human", "value": "how many women in red shirts are standing in front of the fruit stall?\n<image>"}, {"from": "gpt", "value": "there are two people standing in front of the fruit stall"}]
00000005943.jpg
5943_evjvqa
[{"from": "human", "value": "where is the woman in white shirt standing compared to the two people in red shirts?\n<image>"}, {"from": "gpt", "value": "she is standing to the left of the two people in red shirts"}]
00000005943.jpg
5943_evjvqa
[{"from": "human", "value": "what color is the chair next to the woman in red shirt?\n<image>"}, {"from": "gpt", "value": "the chair is blue"}]
00000005944.jpg
5944_evjvqa
[{"from": "human", "value": "how many people are driving motorbikes?\n<image>"}, {"from": "gpt", "value": "there is one person driving motorbike"}]
00000005944.jpg
5944_evjvqa
[{"from": "human", "value": "what color are the shirts that the two people going on the right side of the building are wearing?\n<image>"}, {"from": "gpt", "value": "the two people are wearing white shirts"}]
00000005944.jpg
5944_evjvqa
[{"from": "human", "value": "how many people are walking in front of the building?\n<image>"}, {"from": "gpt", "value": "there are two people walking in front of the building"}]
00000005949.jpg
5949_evjvqa
[{"from": "human", "value": "what color are the flowers on the plants?\n<image>"}, {"from": "gpt", "value": "the flowers are yellow"}]
00000005949.jpg
5949_evjvqa
[{"from": "human", "value": "how many plants are grown in each pot?\n<image>"}, {"from": "gpt", "value": "one plant is grown in each pot"}]
00000005949.jpg
5949_evjvqa
[{"from": "human", "value": "how are the walls of the pots decorated?\n<image>"}, {"from": "gpt", "value": "the walls of the pots are painted or printed with chinese characters"}]
00000005950.jpg
5950_evjvqa
[{"from": "human", "value": "which side of the person in helmet is the man sitting?\n<image>"}, {"from": "gpt", "value": "on the right side of the person helmet"}]
00000005950.jpg
5950_evjvqa
[{"from": "human", "value": "what color is the helmet that the person standing wears?\n<image>"}, {"from": "gpt", "value": "the person standing wears a black helmet"}]
00000005950.jpg
5950_evjvqa
[{"from": "human", "value": "what stall are the two person wearing black coats in?\n<image>"}, {"from": "gpt", "value": "in the decorations stall"}]
00000005951.jpg
5951_evjvqa
[{"from": "human", "value": "what color is the chair behind the woman in blue shirt?\n<image>"}, {"from": "gpt", "value": "the chair is blue"}]
00000005951.jpg
5951_evjvqa
[{"from": "human", "value": "where is the woman in yellow shirt standing compared to the woman in red coat?\n<image>"}, {"from": "gpt", "value": "she is standing behind the woman in red coat"}]
00000005951.jpg
5951_evjvqa
[{"from": "human", "value": "what is the woman in red coat sitting in front of the woman in yellow shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting goods"}]
00000005954.jpg
5954_evjvqa
[{"from": "human", "value": "what color is the name of the restaurant on the sign?\n<image>"}, {"from": "gpt", "value": "the name of the restaurant is yellow"}]
00000005955.jpg
5955_evjvqa
[{"from": "human", "value": "what is the girl sitting wearing on her ear?\n<image>"}, {"from": "gpt", "value": "she is wearing a wireless headphone"}]
00000005955.jpg
5955_evjvqa
[{"from": "human", "value": "what is the man sitting next to the girl sitting doing?\n<image>"}, {"from": "gpt", "value": "typing the computer"}]
00000005955.jpg
5955_evjvqa
[{"from": "human", "value": "what is the girl sitting holding in her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a mobile phone"}]
00000005959.jpg
5959_evjvqa
[{"from": "human", "value": "what color are the images of rivers?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000005960.jpg
5960_evjvqa
[{"from": "human", "value": "what is the pattern on the shirt of the man?\n<image>"}, {"from": "gpt", "value": "there are flowers printed on the shirt of the man"}]