image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000002423.jpg
2423_evjvqa
[{"from": "human", "value": "what does the boat with the woman in the conical hat that is reaching for the boat next to it carry?\n<image>"}, {"from": "gpt", "value": "coconuts"}]
00000002425.jpg
2425_evjvqa
[{"from": "human", "value": "how many people are coming out from the historical site gate?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002425.jpg
2425_evjvqa
[{"from": "human", "value": "whom does the man in blue shirt come out from the gate of the historical site with?\n<image>"}, {"from": "gpt", "value": "with a woman in white shirt and a woman in black shirt"}]
00000002425.jpg
2425_evjvqa
[{"from": "human", "value": "where are the three people coming out from?\n<image>"}, {"from": "gpt", "value": "from a historical site"}]
00000002429.jpg
2429_evjvqa
[{"from": "human", "value": "what topic are the exhibits displayed in the room related to?\n<image>"}, {"from": "gpt", "value": "the exhibits about the sea"}]
00000002429.jpg
2429_evjvqa
[{"from": "human", "value": "what exhibits does the room display?\n<image>"}, {"from": "gpt", "value": "a fish skeleton and a boat"}]
00000002429.jpg
2429_evjvqa
[{"from": "human", "value": "how many exhibits are displayed in the room?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002432.jpg
2432_evjvqa
[{"from": "human", "value": "where is this woman standing?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000002432.jpg
2432_evjvqa
[{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "using the mobile phone"}]
00000002432.jpg
2432_evjvqa
[{"from": "human", "value": "what is the woman holding in her hand?\n<image>"}, {"from": "gpt", "value": "a milk bottle"}]
00000002437.jpg
2437_evjvqa
[{"from": "human", "value": "what item are sold in the area where the women are standing at?\n<image>"}, {"from": "gpt", "value": "fabric"}]
00000002437.jpg
2437_evjvqa
[{"from": "human", "value": "whom does the woman in white shirt go to buy fabric with?\n<image>"}, {"from": "gpt", "value": "the woman goes to buy alone"}]
00000002437.jpg
2437_evjvqa
[{"from": "human", "value": "whom is the woman in white shirt talking to?\n<image>"}, {"from": "gpt", "value": "a person in orange shirt"}]
00000002442.jpg
2442_evjvqa
[{"from": "human", "value": "whom is the young man in blue shirt walking with?\n<image>"}, {"from": "gpt", "value": "with a woman in black shirt"}]
00000002442.jpg
2442_evjvqa
[{"from": "human", "value": "how many people does the young man in blue shirt walk with?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002442.jpg
2442_evjvqa
[{"from": "human", "value": "what does young man in blue shirt walking hold in his hand?\n<image>"}, {"from": "gpt", "value": "a red bag"}]
00000002444.jpg
2444_evjvqa
[{"from": "human", "value": "what is the woman in the white cloth hat doing?\n<image>"}, {"from": "gpt", "value": "choosing pineapples"}]
00000002444.jpg
2444_evjvqa
[{"from": "human", "value": "where is the woman in white cloth hat standing?\n<image>"}, {"from": "gpt", "value": "at the pineapple counter"}]
00000002444.jpg
2444_evjvqa
[{"from": "human", "value": "how many people are choosing pineapples?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002444.jpg
2444_evjvqa
[{"from": "human", "value": "how many people does the woman stand choosing pineapples with?\n<image>"}, {"from": "gpt", "value": "the woman chooeses alone"}]
00000002444.jpg
2444_evjvqa
[{"from": "human", "value": "what products is the woman choosing?\n<image>"}, {"from": "gpt", "value": "fruits"}]
00000002445.jpg
2445_evjvqa
[{"from": "human", "value": "where is this house built?\n<image>"}, {"from": "gpt", "value": "beside a river"}]
00000002446.jpg
2446_evjvqa
[{"from": "human", "value": "where are the two women standing?\n<image>"}, {"from": "gpt", "value": "in front of a stall selling vegetables and fruits"}]
00000002446.jpg
2446_evjvqa
[{"from": "human", "value": "how many people are standing in front of the stall selling fruits and vegetables?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002446.jpg
2446_evjvqa
[{"from": "human", "value": "how many women are standing in front of the vegetable stall?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002446.jpg
2446_evjvqa
[{"from": "human", "value": "how many people in conical hats are standing in front of the vegetable stall?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002446.jpg
2446_evjvqa
[{"from": "human", "value": "what does the woman without a hat stand in front of the vegetable stall for?\n<image>"}, {"from": "gpt", "value": "buying vegetables"}]
00000002448.jpg
2448_evjvqa
[{"from": "human", "value": "what stalls are at the entrance to the market?\n<image>"}, {"from": "gpt", "value": "the clothing stall"}]
00000002448.jpg
2448_evjvqa
[{"from": "human", "value": "how many people are standing in front of the market?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002451.jpg
2451_evjvqa
[{"from": "human", "value": "what are the women doing?\n<image>"}, {"from": "gpt", "value": "collecting garbage on the roadside"}]
00000002451.jpg
2451_evjvqa
[{"from": "human", "value": "how many bicycles are leaning on the wall?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002451.jpg
2451_evjvqa
[{"from": "human", "value": "how many people are working on the roadside?\n<image>"}, {"from": "gpt", "value": "five"}]
00000002451.jpg
2451_evjvqa
[{"from": "human", "value": "what are these five women doing?\n<image>"}, {"from": "gpt", "value": "collecting garbage on the roadside"}]
00000002453.jpg
2453_evjvqa
[{"from": "human", "value": "what products does the stall on the left mainly sell?\n<image>"}, {"from": "gpt", "value": "children toys"}]
00000002453.jpg
2453_evjvqa
[{"from": "human", "value": "how many people are sitting in front of the children's toy stall at the beginning of the street?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002453.jpg
2453_evjvqa
[{"from": "human", "value": "what color is the shirt that the owner of the children's toy stall at the beginning of the street wears?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002453.jpg
2453_evjvqa
[{"from": "human", "value": "whom does the woman in red shirt sitting in front of the children's toy stall located at the beginning of the street sit with?\n<image>"}, {"from": "gpt", "value": "a little girl"}]
00000002456.jpg
2456_evjvqa
[{"from": "human", "value": "how many lights are placed at the clothing stall?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002456.jpg
2456_evjvqa
[{"from": "human", "value": "how many people are selecting clothes at the clothing stall of the woman in black shirt?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002456.jpg
2456_evjvqa
[{"from": "human", "value": "what stall is the woman wearing glasses and a black shirt standing at?\n<image>"}, {"from": "gpt", "value": "the clothing stall"}]
00000002459.jpg
2459_evjvqa
[{"from": "human", "value": "what is the girl holding in hand?\n<image>"}, {"from": "gpt", "value": "a bunch of flowers"}]
00000002459.jpg
2459_evjvqa
[{"from": "human", "value": "what color is the umbrella that the girl uses?\n<image>"}, {"from": "gpt", "value": "the white umbrella"}]
00000002459.jpg
2459_evjvqa
[{"from": "human", "value": "what does the girl wear?\n<image>"}, {"from": "gpt", "value": "the girl wears ao dai"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "what color is the shirt of the woman selling vegetables?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "how many bicycles are being parked near the vegetable stall?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "how many people sell fresh food?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "what does the woman sitting next to the woman in blue shirt sell?\n<image>"}, {"from": "gpt", "value": "fresh food"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "whom does the woman sitting next to the woman in blue shirt sell with?\n<image>"}, {"from": "gpt", "value": "a child"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "what color is the shirt that the woman sitting next to the woman holding a child wears?\n<image>"}, {"from": "gpt", "value": "polka dot blue"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "what does the woman in the blue polka dot shirt sell?\n<image>"}, {"from": "gpt", "value": "fresh food"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "how many people prepare stalls for sale?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002460.jpg
2460_evjvqa
[{"from": "human", "value": "what does the woman in red sell?\n<image>"}, {"from": "gpt", "value": "vegetables"}]
00000002462.jpg
2462_evjvqa
[{"from": "human", "value": "what is this group of people doing?\n<image>"}, {"from": "gpt", "value": "lining up"}]
00000002462.jpg
2462_evjvqa
[{"from": "human", "value": "where is this group of people lining up?\n<image>"}, {"from": "gpt", "value": "at a chicken shop"}]
00000002462.jpg
2462_evjvqa
[{"from": "human", "value": "what do this group of people line up for?\n<image>"}, {"from": "gpt", "value": "buying chicken"}]
00000002463.jpg
2463_evjvqa
[{"from": "human", "value": "what is the man with a face mask opposite the cashier doing?\n<image>"}, {"from": "gpt", "value": "charging"}]
00000002465.jpg
2465_evjvqa
[{"from": "human", "value": "where are the four women traveling?\n<image>"}, {"from": "gpt", "value": "sea"}]
00000002465.jpg
2465_evjvqa
[{"from": "human", "value": "where are the four women standing?\n<image>"}, {"from": "gpt", "value": "on the bridge"}]
00000002465.jpg
2465_evjvqa
[{"from": "human", "value": "how many people does the woman in yellow shirt go to the sea with?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002465.jpg
2465_evjvqa
[{"from": "human", "value": "how many people does the woman holding the red handbag travel to the sea with?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002468.jpg
2468_evjvqa
[{"from": "human", "value": "where is the man in blue shirt standing?\n<image>"}, {"from": "gpt", "value": "at a book counter"}]
00000002468.jpg
2468_evjvqa
[{"from": "human", "value": "how many people does the man in blue shirt go to view books with?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002468.jpg
2468_evjvqa
[{"from": "human", "value": "whom does the man in blue shirt go to view books with?\n<image>"}, {"from": "gpt", "value": "a man in white shirt"}]
00000002468.jpg
2468_evjvqa
[{"from": "human", "value": "how many people does the woman in white shirt stand at the book counter with?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002468.jpg
2468_evjvqa
[{"from": "human", "value": "where is the woman in white shirt standing?\n<image>"}, {"from": "gpt", "value": "at a book counter"}]
00000002470.jpg
2470_evjvqa
[{"from": "human", "value": "who does the outermost picture show?\n<image>"}, {"from": "gpt", "value": "a woman"}]
00000002470.jpg
2470_evjvqa
[{"from": "human", "value": "how many people does the outermost picture show?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002470.jpg
2470_evjvqa
[{"from": "human", "value": "who does the photo next to the photograph of a woman show?\n<image>"}, {"from": "gpt", "value": "a man in an orange shirt, a woman and a boy in a green shirt"}]
00000002470.jpg
2470_evjvqa
[{"from": "human", "value": "how many people does the photo next to the photograph of a woman show?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002473.jpg
2473_evjvqa
[{"from": "human", "value": "how many people are sitting on the canoe?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002473.jpg
2473_evjvqa
[{"from": "human", "value": "whom does the woman in red ao ba ba sit on the canoe with?\n<image>"}, {"from": "gpt", "value": "the man in white ao ba ba"}]
00000002473.jpg
2473_evjvqa
[{"from": "human", "value": "where is the man white ao ba ba sitting?\n<image>"}, {"from": "gpt", "value": "on a canoe"}]
00000002473.jpg
2473_evjvqa
[{"from": "human", "value": "whom does the man in white ao ba ba sit with?\n<image>"}, {"from": "gpt", "value": "a woman in red ao ba ba"}]
00000002478.jpg
2478_evjvqa
[{"from": "human", "value": "where is the man lying on the mattress?\n<image>"}, {"from": "gpt", "value": "on the beach"}]
00000002478.jpg
2478_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "lying on a mattress by the sea"}]
00000002478.jpg
2478_evjvqa
[{"from": "human", "value": "whom does the man lie on the beach with?\n<image>"}, {"from": "gpt", "value": "the man lies alone"}]
00000002480.jpg
2480_evjvqa
[{"from": "human", "value": "whom does the woman wearing a brown bag walk down the walking street with?\n<image>"}, {"from": "gpt", "value": "a woman in white coat"}]
00000002480.jpg
2480_evjvqa
[{"from": "human", "value": "whom does the woman in orange coat walk down the walking street with?\n<image>"}, {"from": "gpt", "value": "the woman walks alone"}]
00000002482.jpg
2482_evjvqa
[{"from": "human", "value": "how many people are rowing the canoes on the water?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002482.jpg
2482_evjvqa
[{"from": "human", "value": "what is the woman in red shirt and conical hat doing?\n<image>"}, {"from": "gpt", "value": "rowing the canoe"}]
00000002482.jpg
2482_evjvqa
[{"from": "human", "value": "what is the woman in conical hat doing?\n<image>"}, {"from": "gpt", "value": "rowing the canoe"}]
00000002482.jpg
2482_evjvqa
[{"from": "human", "value": "what is the woman in red shirt doing?\n<image>"}, {"from": "gpt", "value": "rowing the canoe"}]
00000002482.jpg
2482_evjvqa
[{"from": "human", "value": "how many people does the woman in conical hat rowing the canoe carry?\n<image>"}, {"from": "gpt", "value": "the woman row the canoe alone"}]
00000002482.jpg
2482_evjvqa
[{"from": "human", "value": "how many canoes are on the water?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002485.jpg
2485_evjvqa
[{"from": "human", "value": "what does the big green boat carry?\n<image>"}, {"from": "gpt", "value": "flowers"}]
00000002485.jpg
2485_evjvqa
[{"from": "human", "value": "how many people are standing on the big green coat?\n<image>"}, {"from": "gpt", "value": "four"}]
00000002485.jpg
2485_evjvqa
[{"from": "human", "value": "how many women stand on the big green boat?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002485.jpg
2485_evjvqa
[{"from": "human", "value": "what color is the coat of the woman standing on the big green boat?\n<image>"}, {"from": "gpt", "value": "pink"}]
00000002485.jpg
2485_evjvqa
[{"from": "human", "value": "how many people wear conical hats and stand on the big green boat?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002488.jpg
2488_evjvqa
[{"from": "human", "value": "whom does the woman in green coat go to the supermarket with?\n<image>"}, {"from": "gpt", "value": "the woman in white coat and a kid sitting in the trollet"}]
00000002488.jpg
2488_evjvqa
[{"from": "human", "value": "how many children does the woman in green coat take to the supermarket?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002488.jpg
2488_evjvqa
[{"from": "human", "value": "where is the kid sitting?\n<image>"}, {"from": "gpt", "value": "in the trolley"}]
00000002491.jpg
2491_evjvqa
[{"from": "human", "value": "what instruments are the girls playing?\n<image>"}, {"from": "gpt", "value": "they are playing drums"}]
00000002491.jpg
2491_evjvqa
[{"from": "human", "value": "what instrument is used mainly in this performance?\n<image>"}, {"from": "gpt", "value": "this performance mainly use drum"}]
00000002491.jpg
2491_evjvqa
[{"from": "human", "value": "what program is going on?\n<image>"}, {"from": "gpt", "value": "the grand opening program"}]
00000002492.jpg
2492_evjvqa
[{"from": "human", "value": "where is the man riding the motorbike?\n<image>"}, {"from": "gpt", "value": "on the riverside"}]
00000002492.jpg
2492_evjvqa
[{"from": "human", "value": "in which direction is the man driving?\n<image>"}, {"from": "gpt", "value": "towards the citadel gate"}]
00000002492.jpg
2492_evjvqa
[{"from": "human", "value": "how many people are riding motorbikes on the road?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002492.jpg
2492_evjvqa
[{"from": "human", "value": "where is the man going?\n<image>"}, {"from": "gpt", "value": "towards the fort gate"}]
00000002497.jpg
2497_evjvqa
[{"from": "human", "value": "what ship is this?\n<image>"}, {"from": "gpt", "value": "this is a cargo ship"}]