image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000010430.jpg
10430_evjvqa
[{"from": "human", "value": "how many soldiers are in the street?\n<image>"}, {"from": "gpt", "value": "there are five soldiers in the street"}]
00000010433.jpg
10433_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a roundabout"}]
00000010434.jpg
10434_evjvqa
[{"from": "human", "value": "which position is the girl standing at?\n<image>"}, {"from": "gpt", "value": "the girl is standing on the left of the gate"}]
00000010435.jpg
10435_evjvqa
[{"from": "human", "value": "how many people are wearing ethnic minority clothes?\n<image>"}, {"from": "gpt", "value": "there are two people wearing ethnic minority clothes"}]
00000010436.jpg
10436_evjvqa
[{"from": "human", "value": "how many buses are there?\n<image>"}, {"from": "gpt", "value": "there is one bus"}]
00000010437.jpg
10437_evjvqa
[{"from": "human", "value": "how many cars are under the roof?\n<image>"}, {"from": "gpt", "value": "there are three cars under the roof"}]
00000010442.jpg
10442_evjvqa
[{"from": "human", "value": "how many bikes are placed on the left of the store?\n<image>"}, {"from": "gpt", "value": "there are four bikes"}]
00000010443.jpg
10443_evjvqa
[{"from": "human", "value": "by what means of transport is everyone going?\n<image>"}, {"from": "gpt", "value": "everyone is going by boat"}]
00000010446.jpg
10446_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing south vietnamese pajama with red shirt and black pants"}]
00000010447.jpg
10447_evjvqa
[{"from": "human", "value": "where is everyone eating?\n<image>"}, {"from": "gpt", "value": "everyone is eating at an eatery"}]
00000010448.jpg
10448_evjvqa
[{"from": "human", "value": "how many bikes are parked by the street?\n<image>"}, {"from": "gpt", "value": "there are two bikes parked by the street"}]
00000010451.jpg
10451_evjvqa
[{"from": "human", "value": "what is the road decorated with?\n<image>"}, {"from": "gpt", "value": "the road is decorated with umbrellas"}]
00000010452.jpg
10452_evjvqa
[{"from": "human", "value": "where is the man in white shirt standing?\n<image>"}, {"from": "gpt", "value": "the man in white shirt is standing on the far right"}]
00000010453.jpg
10453_evjvqa
[{"from": "human", "value": "what color is the highest flag?\n<image>"}, {"from": "gpt", "value": "the flag is red"}]
00000010456.jpg
10456_evjvqa
[{"from": "human", "value": "what kind is the traffic sign at the corner of the street?\n<image>"}, {"from": "gpt", "value": "the traffic sign at the corner of the street is a prohibition sign"}]
00000010457.jpg
10457_evjvqa
[{"from": "human", "value": "what color hair does the girl have?\n<image>"}, {"from": "gpt", "value": "the girl has yellow hair"}]
00000010458.jpg
10458_evjvqa
[{"from": "human", "value": "how many pictures are on the upper row?\n<image>"}, {"from": "gpt", "value": "there are four pictures on the upper row"}]
00000010459.jpg
10459_evjvqa
[{"from": "human", "value": "what is the status of the shopping cart?\n<image>"}, {"from": "gpt", "value": "the shopping cart is fully loaded"}]
00000010460.jpg
10460_evjvqa
[{"from": "human", "value": "what color shirt is the girl in the supermarket wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing blue shirt"}]
00000010461.jpg
10461_evjvqa
[{"from": "human", "value": "how many people are wearing helmets?\n<image>"}, {"from": "gpt", "value": "there is one person wearing helmet"}]
00000010462.jpg
10462_evjvqa
[{"from": "human", "value": "how many bikes are in front of the grocery store?\n<image>"}, {"from": "gpt", "value": "there is one bike in front of the grocery store"}]
00000010463.jpg
10463_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing white ao dai"}]
00000010464.jpg
10464_evjvqa
[{"from": "human", "value": "how many people are wearing pith helmets?\n<image>"}, {"from": "gpt", "value": "there are five people wearing pith helmets"}]
00000010466.jpg
10466_evjvqa
[{"from": "human", "value": "how many red chairs are in front of the cloth store?\n<image>"}, {"from": "gpt", "value": "there are two red chairs in front of the cloth store"}]
00000010467.jpg
10467_evjvqa
[{"from": "human", "value": "what color uniforms are the vegetable vendors wearing?\n<image>"}, {"from": "gpt", "value": "the vendors are wearing red uniforms"}]
00000010468.jpg
10468_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000010470.jpg
10470_evjvqa
[{"from": "human", "value": "where is the tower located?\n<image>"}, {"from": "gpt", "value": "the tower is located in the middle of the lake"}]
00000010472.jpg
10472_evjvqa
[{"from": "human", "value": "how many people are raising their hands?\n<image>"}, {"from": "gpt", "value": "there are two people raising their hands"}]
00000010473.jpg
10473_evjvqa
[{"from": "human", "value": "what is on the tray?\n<image>"}, {"from": "gpt", "value": "sand worm omelette"}]
00000010476.jpg
10476_evjvqa
[{"from": "human", "value": "what color shirt is the little girl wearing?\n<image>"}, {"from": "gpt", "value": "the little girl is wearing white shirt"}]
00000010479.jpg
10479_evjvqa
[{"from": "human", "value": "how many models are representing for the supermarket commercials?\n<image>"}, {"from": "gpt", "value": "there are two models representing for the supermarket"}]
00000010480.jpg
10480_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a handbag"}]
00000010485.jpg
10485_evjvqa
[{"from": "human", "value": "how many floors does the house have?\n<image>"}, {"from": "gpt", "value": "the house has two floors"}]
00000010486.jpg
10486_evjvqa
[{"from": "human", "value": "what color is the statue?\n<image>"}, {"from": "gpt", "value": "the statue is yellow"}]
00000010488.jpg
10488_evjvqa
[{"from": "human", "value": "how many meat balls are in the bowl?\n<image>"}, {"from": "gpt", "value": "there are three meat balls in the bowl"}]
00000010489.jpg
10489_evjvqa
[{"from": "human", "value": "how many people are walking in the street?\n<image>"}, {"from": "gpt", "value": "there are two people walking in the street"}]
00000010490.jpg
10490_evjvqa
[{"from": "human", "value": "who is holding the umbrella?\n<image>"}, {"from": "gpt", "value": "the boy is holding the umbrella"}]
00000010491.jpg
10491_evjvqa
[{"from": "human", "value": "how many people are wearing blue shirts?\n<image>"}, {"from": "gpt", "value": "there is one person wearing blue shirt"}]
00000010492.jpg
10492_evjvqa
[{"from": "human", "value": "how many christmas trees are there?\n<image>"}, {"from": "gpt", "value": "there is one christmas tree"}]
00000010493.jpg
10493_evjvqa
[{"from": "human", "value": "what is the girl holding on her hands?\n<image>"}, {"from": "gpt", "value": "the girl is holding a lantern on her hands"}]
00000010494.jpg
10494_evjvqa
[{"from": "human", "value": "where is the bicycle placed?\n<image>"}, {"from": "gpt", "value": "the bicycle is placed in front of the house"}]
00000003604.jpg
3604_evjvqa
[{"from": "human", "value": "how many women are there next to the advertisement?\n<image>"}, {"from": "gpt", "value": "there are four women around the advertisement"}]
00000003604.jpg
3604_evjvqa
[{"from": "human", "value": "where are those four women standing at?\n<image>"}, {"from": "gpt", "value": "in a phone accessories store"}]
00000003604.jpg
3604_evjvqa
[{"from": "human", "value": "what color are the shirts of those four women?\n<image>"}, {"from": "gpt", "value": "they are wearing black shirts"}]
00000003606.jpg
3606_evjvqa
[{"from": "human", "value": "where is the old man in white shirt standing at?\n<image>"}, {"from": "gpt", "value": "at a vegetable aisle in the market"}]
00000003606.jpg
3606_evjvqa
[{"from": "human", "value": "what is the old man in white shirt doing?\n<image>"}, {"from": "gpt", "value": "buying vegetables"}]
00000003606.jpg
3606_evjvqa
[{"from": "human", "value": "how many people are riding motorbike in the supermarket?\n<image>"}, {"from": "gpt", "value": "there is one woman riding motorbike in the supermarket"}]
00000003606.jpg
3606_evjvqa
[{"from": "human", "value": "what is sold at the aisle where the old man in white shirt standing at?\n<image>"}, {"from": "gpt", "value": "that aisle sells vegetables"}]
00000003606.jpg
3606_evjvqa
[{"from": "human", "value": "what is the aisle behind the old man in white shirt selling?\n<image>"}, {"from": "gpt", "value": "they sells different kinds of vegetables"}]
00000003606.jpg
3606_evjvqa
[{"from": "human", "value": "where is the woman riding at?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000003613.jpg
3613_evjvqa
[{"from": "human", "value": "what is this man standing for?\n<image>"}, {"from": "gpt", "value": "looking at the fruits"}]
00000003613.jpg
3613_evjvqa
[{"from": "human", "value": "what is this man choosing to buy?\n<image>"}, {"from": "gpt", "value": "choosing fruits"}]
00000003613.jpg
3613_evjvqa
[{"from": "human", "value": "what product is sold at this aisle?\n<image>"}, {"from": "gpt", "value": "different kinds of fruits"}]
00000003613.jpg
3613_evjvqa
[{"from": "human", "value": "where is this man buying fruit at?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000003617.jpg
3617_evjvqa
[{"from": "human", "value": "where is this woman standing at?\n<image>"}, {"from": "gpt", "value": "in front of a statue in the museum"}]
00000003617.jpg
3617_evjvqa
[{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "observing the statue in the museum"}]
00000003617.jpg
3617_evjvqa
[{"from": "human", "value": "where is this woman visiting at?\n<image>"}, {"from": "gpt", "value": "in a museum"}]
00000003617.jpg
3617_evjvqa
[{"from": "human", "value": "where are those statue placing in ?\n<image>"}, {"from": "gpt", "value": "in the museum"}]
00000003617.jpg
3617_evjvqa
[{"from": "human", "value": "what is the woman watching?\n<image>"}, {"from": "gpt", "value": "she is watching a statue"}]
00000003620.jpg
3620_evjvqa
[{"from": "human", "value": "what is this woman standing for?\n<image>"}, {"from": "gpt", "value": "selling food"}]
00000003620.jpg
3620_evjvqa
[{"from": "human", "value": "what is sold at this place?\n<image>"}, {"from": "gpt", "value": "different kinds of snails"}]
00000003620.jpg
3620_evjvqa
[{"from": "human", "value": "what is this woman selling?\n<image>"}, {"from": "gpt", "value": "food made from snails"}]
00000003623.jpg
3623_evjvqa
[{"from": "human", "value": "where is this man laying?\n<image>"}, {"from": "gpt", "value": "on a hammock near the river"}]
00000003623.jpg
3623_evjvqa
[{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "laying on a hammock next to the river"}]
00000003623.jpg
3623_evjvqa
[{"from": "human", "value": "where did this man set up his hammock?\n<image>"}, {"from": "gpt", "value": "near a river"}]
00000003629.jpg
3629_evjvqa
[{"from": "human", "value": "what is the woman in black ao dai standing for?\n<image>"}, {"from": "gpt", "value": "guiding the tourist standing nearby"}]
00000003629.jpg
3629_evjvqa
[{"from": "human", "value": "how many people are guiding for the group of tourists?\n<image>"}, {"from": "gpt", "value": "there is one woman in black ao dai guiding for the tourists"}]
00000003632.jpg
3632_evjvqa
[{"from": "human", "value": "where is the woman wearing conical hat standing at?\n<image>"}, {"from": "gpt", "value": "standing in a garden of fruits"}]
00000003632.jpg
3632_evjvqa
[{"from": "human", "value": "where is the woman in black shirt standing?\n<image>"}, {"from": "gpt", "value": "in a garden of fruits"}]
00000003638.jpg
3638_evjvqa
[{"from": "human", "value": "how many cars are there on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are two cars parking on the sidewalk"}]
00000003640.jpg
3640_evjvqa
[{"from": "human", "value": "how many motorbiek are there in front of the store?\n<image>"}, {"from": "gpt", "value": "there is one motorbike parking in front of the store"}]
00000003644.jpg
3644_evjvqa
[{"from": "human", "value": "what are those two women choosing?\n<image>"}, {"from": "gpt", "value": "they are choosing some fruits"}]
00000003644.jpg
3644_evjvqa
[{"from": "human", "value": "what is the woman wearing green t-shirt tending to buy?\n<image>"}, {"from": "gpt", "value": "tending to buy some fruits"}]
00000003644.jpg
3644_evjvqa
[{"from": "human", "value": "what is the girl wearing red t-shirt doing?\n<image>"}, {"from": "gpt", "value": "cutting the fruit"}]
00000003644.jpg
3644_evjvqa
[{"from": "human", "value": "how many people are standing to choose fruit?\n<image>"}, {"from": "gpt", "value": "there are two women choosing fruits"}]
00000003644.jpg
3644_evjvqa
[{"from": "human", "value": "what products are sold at this aisle?\n<image>"}, {"from": "gpt", "value": "selling different kinds of fruits"}]
00000003645.jpg
3645_evjvqa
[{"from": "human", "value": "how many gods are there in front of this temple?\n<image>"}, {"from": "gpt", "value": "there are two gods in front of the temple"}]
00000003649.jpg
3649_evjvqa
[{"from": "human", "value": "what are those man doing?\n<image>"}, {"from": "gpt", "value": "observing the vegetables on the aisle"}]
00000003649.jpg
3649_evjvqa
[{"from": "human", "value": "where is the man wearing black suit watching at?\n<image>"}, {"from": "gpt", "value": "watching to the vegetables aisle"}]
00000003649.jpg
3649_evjvqa
[{"from": "human", "value": "what is selling at the aisle where the man in black suit is watching at? \n<image>"}, {"from": "gpt", "value": "selling fresh vegetables"}]
00000003651.jpg
3651_evjvqa
[{"from": "human", "value": "what is the woman in pink t-shirt doing?\n<image>"}, {"from": "gpt", "value": "paying for the cashier"}]
00000003651.jpg
3651_evjvqa
[{"from": "human", "value": "where is the woman in pink t-shirt buying at?\n<image>"}, {"from": "gpt", "value": "in a grocery store"}]
00000003651.jpg
3651_evjvqa
[{"from": "human", "value": "how many people are staniding in the grocery store?\n<image>"}, {"from": "gpt", "value": "there are two women standing in the grocery store"}]
00000003651.jpg
3651_evjvqa
[{"from": "human", "value": "how many people are standing at the checkout?\n<image>"}, {"from": "gpt", "value": "there are two woman standing at the checkout"}]
00000003651.jpg
3651_evjvqa
[{"from": "human", "value": "who is buying at the grocery store?\n<image>"}, {"from": "gpt", "value": "the woman in pink t-shirt is buying at the grocery store"}]
00000003651.jpg
3651_evjvqa
[{"from": "human", "value": "who is standing at the checkout?\n<image>"}, {"from": "gpt", "value": "the woman in grey shirt is standing at the checkout"}]
00000003652.jpg
3652_evjvqa
[{"from": "human", "value": "what vietnamese dish is presenting in this bowl?\n<image>"}, {"from": "gpt", "value": "pho"}]
00000003654.jpg
3654_evjvqa
[{"from": "human", "value": "what is sold in this store?\n<image>"}, {"from": "gpt", "value": "this store sells women's clothes"}]
00000003656.jpg
3656_evjvqa
[{"from": "human", "value": "what vehicles are those people moving on the water by?\n<image>"}, {"from": "gpt", "value": "they are moving on the river by boat"}]
00000003656.jpg
3656_evjvqa
[{"from": "human", "value": "what are those people using their boats for?\n<image>"}, {"from": "gpt", "value": "they are using boats to transport the goods on the river"}]
00000003660.jpg
3660_evjvqa
[{"from": "human", "value": "what are those people performing?\n<image>"}, {"from": "gpt", "value": "they are performing a dancing show"}]
00000003660.jpg
3660_evjvqa
[{"from": "human", "value": "what are those people doing?\n<image>"}, {"from": "gpt", "value": "they are dancing"}]
00000003660.jpg
3660_evjvqa
[{"from": "human", "value": "what properties are those people using in their performance?\n<image>"}, {"from": "gpt", "value": "they are using the umbrellas for the performance"}]
00000003662.jpg
3662_evjvqa
[{"from": "human", "value": "what products are sold at this store?\n<image>"}, {"from": "gpt", "value": "this store sells different kinds of clothes"}]
00000003663.jpg
3663_evjvqa
[{"from": "human", "value": "where is the woman in pink jacket parking her motorbike?\n<image>"}, {"from": "gpt", "value": "on the sidewalk, behind the car"}]
00000003664.jpg
3664_evjvqa
[{"from": "human", "value": "where is this man sitting at?\n<image>"}, {"from": "gpt", "value": "he is sitting on the sidewalk of a store"}]
00000003664.jpg
3664_evjvqa
[{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "he is using his phone"}]
00000003664.jpg
3664_evjvqa
[{"from": "human", "value": "where is the man using his phone?\n<image>"}, {"from": "gpt", "value": "at a drinking store"}]
00000003666.jpg
3666_evjvqa
[{"from": "human", "value": "where are those products selling at?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000003666.jpg
3666_evjvqa
[{"from": "human", "value": "where are those vegetables presented at?\n<image>"}, {"from": "gpt", "value": "on the aisles in the supermarket"}]