image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000001960.jpg
1960_evjvqa
[{"from": "human", "value": "what does the red circle sign with a white horizontal line in the middle mean?\n<image>"}, {"from": "gpt", "value": "forbidden to go in the opposite direction"}]
00000001960.jpg
1960_evjvqa
[{"from": "human", "value": "how many lanes are there on each side?\n<image>"}, {"from": "gpt", "value": "two lanes"}]
00000001960.jpg
1960_evjvqa
[{"from": "human", "value": "what vehicles are on the street?\n<image>"}, {"from": "gpt", "value": "no vehicles"}]
00000001963.jpg
1963_evjvqa
[{"from": "human", "value": "how many motorbikes are moving on the road?\n<image>"}, {"from": "gpt", "value": "one motorbike"}]
00000001963.jpg
1963_evjvqa
[{"from": "human", "value": "how many people are walking?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001964.jpg
1964_evjvqa
[{"from": "human", "value": "what does this area in the supermarket sell?\n<image>"}, {"from": "gpt", "value": "fruit"}]
00000001964.jpg
1964_evjvqa
[{"from": "human", "value": "where are these people shopping?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000001965.jpg
1965_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "khen dancing"}]
00000001965.jpg
1965_evjvqa
[{"from": "human", "value": "what is the man holding in his hand?\n<image>"}, {"from": "gpt", "value": "the khen"}]
00000001965.jpg
1965_evjvqa
[{"from": "human", "value": "what is the woman on the left holding?\n<image>"}, {"from": "gpt", "value": "the orange umbrella"}]
00000001967.jpg
1967_evjvqa
[{"from": "human", "value": "what is the man in the black shirt doing?\n<image>"}, {"from": "gpt", "value": "introducing the product for the two people wearing white shirts"}]
00000001967.jpg
1967_evjvqa
[{"from": "human", "value": "what color mask is the man in the white shirt standing between the other two men?\n<image>"}, {"from": "gpt", "value": "gray"}]
00000001970.jpg
1970_evjvqa
[{"from": "human", "value": "what color is the traffic light showing?\n<image>"}, {"from": "gpt", "value": "green"}]
00000001971.jpg
1971_evjvqa
[{"from": "human", "value": "what color is the woman on the left wearing?\n<image>"}, {"from": "gpt", "value": "yellow shirt and black skirt"}]
00000001971.jpg
1971_evjvqa
[{"from": "human", "value": "what product is sold in this store?\n<image>"}, {"from": "gpt", "value": "garlic"}]
00000001977.jpg
1977_evjvqa
[{"from": "human", "value": "what are the people in yellow shirts doing?\n<image>"}, {"from": "gpt", "value": "drumming"}]
00000001988.jpg
1988_evjvqa
[{"from": "human", "value": "what is the girl wearing outside?\n<image>"}, {"from": "gpt", "value": "vest"}]
00000001991.jpg
1991_evjvqa
[{"from": "human", "value": "what are the two outer left and right?\n<image>"}, {"from": "gpt", "value": "lions"}]
00000001991.jpg
1991_evjvqa
[{"from": "human", "value": "who is standing next to the left side of the red lion?\n<image>"}, {"from": "gpt", "value": "mammonish"}]
00000001991.jpg
1991_evjvqa
[{"from": "human", "value": "what does the mammonish hold in his hand?\n<image>"}, {"from": "gpt", "value": "gold bar"}]
00000001993.jpg
1993_evjvqa
[{"from": "human", "value": "what is the woman wearing the red shirt doing?\n<image>"}, {"from": "gpt", "value": "driving a white motorbike from the inside to the gate"}]
00000001993.jpg
1993_evjvqa
[{"from": "human", "value": "what vehicle is there outside the gate?\n<image>"}, {"from": "gpt", "value": "a motorbike"}]
00000001999.jpg
1999_evjvqa
[{"from": "human", "value": "what does this stall sell?\n<image>"}, {"from": "gpt", "value": "assorted dried fish and sausages"}]
00000001999.jpg
1999_evjvqa
[{"from": "human", "value": "what color shirt is the man wearing the blue helmet wearing?\n<image>"}, {"from": "gpt", "value": "sky blue, white, blue and orange horizontal striped shirt"}]
00000001999.jpg
1999_evjvqa
[{"from": "human", "value": "what does the stall the man in the helmet standing in front of sell?\n<image>"}, {"from": "gpt", "value": "assorted dried fish"}]
00000002602.jpg
2602_evjvqa
[{"from": "human", "value": "where are these lanterns hung?\n<image>"}, {"from": "gpt", "value": "above a street"}]
00000002602.jpg
2602_evjvqa
[{"from": "human", "value": "what is the above side of this street decorated with?\n<image>"}, {"from": "gpt", "value": "lanterns"}]
00000002606.jpg
2606_evjvqa
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "at a cashier counter in a supermarket"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "how many cyclos are coming out from the citadel gate?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "which direction is the cyclo going?\n<image>"}, {"from": "gpt", "value": "the direction out of the citadel gate"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "how many people are opposite the cyclo?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "how many bicycles are there on the side of the road?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "how many cyclos are in front of the bicycle?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "where is the bicycle?\n<image>"}, {"from": "gpt", "value": "on the side of the road"}]
00000002607.jpg
2607_evjvqa
[{"from": "human", "value": "where is the cyclo moving?\n<image>"}, {"from": "gpt", "value": "towards the bicycle"}]
00000002611.jpg
2611_evjvqa
[{"from": "human", "value": "where is the woman in pink shirt standing?\n<image>"}, {"from": "gpt", "value": "at a department in a supermarket"}]
00000002611.jpg
2611_evjvqa
[{"from": "human", "value": "what is the woman in pink shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting goods"}]
00000002611.jpg
2611_evjvqa
[{"from": "human", "value": "what is the woman in white dress doing?\n<image>"}, {"from": "gpt", "value": "selecting goods"}]
00000002611.jpg
2611_evjvqa
[{"from": "human", "value": "where is the woman in white dress standing?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000002611.jpg
2611_evjvqa
[{"from": "human", "value": "how many people are walking between the woman in pink shirt and the woman in white dress?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002614.jpg
2614_evjvqa
[{"from": "human", "value": "where is the young man standing?\n<image>"}, {"from": "gpt", "value": "outside a castle"}]
00000002614.jpg
2614_evjvqa
[{"from": "human", "value": "where is the young man visiting?\n<image>"}, {"from": "gpt", "value": "at a castle"}]
00000002614.jpg
2614_evjvqa
[{"from": "human", "value": "whom does the young man visit with?\n<image>"}, {"from": "gpt", "value": "the young man visits alone"}]
00000002614.jpg
2614_evjvqa
[{"from": "human", "value": "how many people are standing in front of the castle?\n<image>"}, {"from": "gpt", "value": "nobody is standing in front of the castle"}]
00000002614.jpg
2614_evjvqa
[{"from": "human", "value": "how many conical hats does the young man bring?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002616.jpg
2616_evjvqa
[{"from": "human", "value": "where are these lanterns hung?\n<image>"}, {"from": "gpt", "value": "above a street"}]
00000002616.jpg
2616_evjvqa
[{"from": "human", "value": "where are the stalls displayed?\n<image>"}, {"from": "gpt", "value": "on both sides of the road"}]
00000002616.jpg
2616_evjvqa
[{"from": "human", "value": "what do the two sides of the road display?\n<image>"}, {"from": "gpt", "value": "the stalls selling goods"}]
00000002617.jpg
2617_evjvqa
[{"from": "human", "value": "how many people are selling?\n<image>"}, {"from": "gpt", "value": "four"}]
00000002617.jpg
2617_evjvqa
[{"from": "human", "value": "how many employees selling wear brown shirts?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002617.jpg
2617_evjvqa
[{"from": "human", "value": "what are the three girls in brown shirts doing?\n<image>"}, {"from": "gpt", "value": "selling"}]
00000002617.jpg
2617_evjvqa
[{"from": "human", "value": "what do the three girls in brown shirts sell?\n<image>"}, {"from": "gpt", "value": "fast food"}]
00000002617.jpg
2617_evjvqa
[{"from": "human", "value": "where are the three girls in brown shirts standing?\n<image>"}, {"from": "gpt", "value": "at a fast food stall"}]
00000002621.jpg
2621_evjvqa
[{"from": "human", "value": "where does this alley lead?\n<image>"}, {"from": "gpt", "value": "to a river"}]
00000002621.jpg
2621_evjvqa
[{"from": "human", "value": "where are these houses built?\n<image>"}, {"from": "gpt", "value": "by a river"}]
00000002623.jpg
2623_evjvqa
[{"from": "human", "value": "where is the young man in black shirt going?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000002623.jpg
2623_evjvqa
[{"from": "human", "value": "what is the young man in black shirt doing?\n<image>"}, {"from": "gpt", "value": "going to the supermarket to buy things"}]
00000002623.jpg
2623_evjvqa
[{"from": "human", "value": "whom does the young man stand near the cart with?\n<image>"}, {"from": "gpt", "value": "the young man stands alone"}]
00000002623.jpg
2623_evjvqa
[{"from": "human", "value": "what products does the department right behind the young man sell?\n<image>"}, {"from": "gpt", "value": "fish sauce"}]
00000002623.jpg
2623_evjvqa
[{"from": "human", "value": "what does the department to the right of the department selling fish sauce sell?\n<image>"}, {"from": "gpt", "value": "cooking oil"}]
00000002625.jpg
2625_evjvqa
[{"from": "human", "value": "what products does this shop sell?\n<image>"}, {"from": "gpt", "value": "christmas decorations"}]
00000002625.jpg
2625_evjvqa
[{"from": "human", "value": "how many people are standing inside the shop selling christmas decorations?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002625.jpg
2625_evjvqa
[{"from": "human", "value": "how many motorbikes are being parked in the shop selling christmas decorations?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002632.jpg
2632_evjvqa
[{"from": "human", "value": "how many people are standing at the promotion area?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002632.jpg
2632_evjvqa
[{"from": "human", "value": "how many people are passing by the promotion area?\n<image>"}, {"from": "gpt", "value": "one"}]
00000002636.jpg
2636_evjvqa
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "in a market"}]
00000002637.jpg
2637_evjvqa
[{"from": "human", "value": "whom is the girl in green shirt walking with?\n<image>"}, {"from": "gpt", "value": "the girl in black and white striped shirt"}]
00000002637.jpg
2637_evjvqa
[{"from": "human", "value": "how many vehicles are being parked at the curb where the girl in green shirt is walking?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002637.jpg
2637_evjvqa
[{"from": "human", "value": "how many motorbikes are parked at curb opposite the curb the girl in green shirt is walking?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002645.jpg
2645_evjvqa
[{"from": "human", "value": "what exhibits does this area display?\n<image>"}, {"from": "gpt", "value": "a tank"}]
00000002645.jpg
2645_evjvqa
[{"from": "human", "value": "where is the tank placed?\n<image>"}, {"from": "gpt", "value": "in a display area"}]
00000002647.jpg
2647_evjvqa
[{"from": "human", "value": "what items are displayed inside the building?\n<image>"}, {"from": "gpt", "value": "motorbikes"}]
00000002650.jpg
2650_evjvqa
[{"from": "human", "value": "which products are the two women buying?\n<image>"}, {"from": "gpt", "value": "vegetables"}]
00000002650.jpg
2650_evjvqa
[{"from": "human", "value": "what is the woman in white shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting vegetables"}]
00000002650.jpg
2650_evjvqa
[{"from": "human", "value": "what is the woman in pink shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting vegetables"}]
00000002650.jpg
2650_evjvqa
[{"from": "human", "value": "where is the woman in white shirt standing?\n<image>"}, {"from": "gpt", "value": "at the vegetable stall"}]
00000002650.jpg
2650_evjvqa
[{"from": "human", "value": "where is the woman in pink shirt standing?\n<image>"}, {"from": "gpt", "value": "at the vegetable stall"}]
00000002650.jpg
2650_evjvqa
[{"from": "human", "value": "how many people are standing at the vegetable stall?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002653.jpg
2653_evjvqa
[{"from": "human", "value": "what is the woman in pink shirt doing?\n<image>"}, {"from": "gpt", "value": "pressing the phone"}]
00000002653.jpg
2653_evjvqa
[{"from": "human", "value": "what products does the woman pressing the phone sell? \n<image>"}, {"from": "gpt", "value": "hats"}]
00000002653.jpg
2653_evjvqa
[{"from": "human", "value": "what does the stall next to the stall of the woman in pink shirt sell?\n<image>"}, {"from": "gpt", "value": "footwear"}]
00000002655.jpg
2655_evjvqa
[{"from": "human", "value": "what is the girl doing?\n<image>"}, {"from": "gpt", "value": "selecting bracelets"}]
00000002655.jpg
2655_evjvqa
[{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "at a bracelet stall"}]
00000002655.jpg
2655_evjvqa
[{"from": "human", "value": "how many people are standing at the bracelet stall?\n<image>"}, {"from": "gpt", "value": "two girls are standing at the bracelet stall"}]
00000002656.jpg
2656_evjvqa
[{"from": "human", "value": "how many people are sitting in the yard?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002656.jpg
2656_evjvqa
[{"from": "human", "value": "whom does the man sit with?\n<image>"}, {"from": "gpt", "value": "a boy in orange shirt and a woman"}]
00000002656.jpg
2656_evjvqa
[{"from": "human", "value": "what color is the house where the man sits mainly painted?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000002656.jpg
2656_evjvqa
[{"from": "human", "value": "whom does the boy in orange shirt sit with?\n<image>"}, {"from": "gpt", "value": "a woman and a man in red shirt"}]
00000002658.jpg
2658_evjvqa
[{"from": "human", "value": "where is the woman and the man holding baskets in their hands walking?\n<image>"}, {"from": "gpt", "value": "in a corridor selling vegetables"}]
00000002658.jpg
2658_evjvqa
[{"from": "human", "value": "what products are displayed on both sides of the corridor where the woman holding a basket are walking?\n<image>"}, {"from": "gpt", "value": "vegetables"}]
00000002658.jpg
2658_evjvqa
[{"from": "human", "value": "whom is the woman holding a basket in hand walking with?\n<image>"}, {"from": "gpt", "value": "with a man holding basket in hand"}]
00000002658.jpg
2658_evjvqa
[{"from": "human", "value": "how many people are on the corridor?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002658.jpg
2658_evjvqa
[{"from": "human", "value": "what do two people holding baskets want to buy?\n<image>"}, {"from": "gpt", "value": "vegetables"}]
00000002660.jpg
2660_evjvqa
[{"from": "human", "value": "where are the lanterns hung?\n<image>"}, {"from": "gpt", "value": "in a cafe"}]
00000002663.jpg
2663_evjvqa
[{"from": "human", "value": "how many people are walking on the corridor?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002663.jpg
2663_evjvqa
[{"from": "human", "value": "whom is the woman in black shirt going with?\n<image>"}, {"from": "gpt", "value": "a woman in yellow shirt"}]
00000002663.jpg
2663_evjvqa
[{"from": "human", "value": "whom is the woman in yellow shirt going with?\n<image>"}, {"from": "gpt", "value": "a woman in black shirt"}]
00000002672.jpg
2672_evjvqa
[{"from": "human", "value": "what is the group of students doing?\n<image>"}, {"from": "gpt", "value": "playing tug"}]
00000002672.jpg
2672_evjvqa
[{"from": "human", "value": "what game is the group of students playing?\n<image>"}, {"from": "gpt", "value": "playing tug"}]
00000002672.jpg
2672_evjvqa
[{"from": "human", "value": "what game are the students participating?\n<image>"}, {"from": "gpt", "value": "playing tug"}]