image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000000574.jpg
574_evjvqa
[{"from": "human", "value": "what is the woman in red shirt wearing on her collar?\n<image>"}, {"from": "gpt", "value": "glasses"}]
00000000574.jpg
574_evjvqa
[{"from": "human", "value": "how many children are viewing the relics?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000575.jpg
575_evjvqa
[{"from": "human", "value": "how do most people in the market travel?\n<image>"}, {"from": "gpt", "value": "on foot"}]
00000000575.jpg
575_evjvqa
[{"from": "human", "value": "what color lanterns are the night markets decorated with?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000580.jpg
580_evjvqa
[{"from": "human", "value": "who is this young man standing with?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000580.jpg
580_evjvqa
[{"from": "human", "value": "what is in this young man's hand?\n<image>"}, {"from": "gpt", "value": "a bag of bubble tea cup"}]
00000000580.jpg
580_evjvqa
[{"from": "human", "value": "what is this person wearing on his shoulder?\n<image>"}, {"from": "gpt", "value": "bag"}]
00000000581.jpg
581_evjvqa
[{"from": "human", "value": "how many helmets are there on the motorbike?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000584.jpg
584_evjvqa
[{"from": "human", "value": "how many customers choose to sit outside the cafe?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000584.jpg
584_evjvqa
[{"from": "human", "value": "who are standing at the entrance of the cafe?\n<image>"}, {"from": "gpt", "value": "two staff"}]
00000000584.jpg
584_evjvqa
[{"from": "human", "value": "where are the customers of this cafe currently sitting?\n<image>"}, {"from": "gpt", "value": "inside the cafe"}]
00000000585.jpg
585_evjvqa
[{"from": "human", "value": "what are these two people standing there for?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}]
00000000585.jpg
585_evjvqa
[{"from": "human", "value": "what is the young man standing next to?\n<image>"}, {"from": "gpt", "value": "bicycle"}]
00000000585.jpg
585_evjvqa
[{"from": "human", "value": "who is the young man standing next to?\n<image>"}, {"from": "gpt", "value": "the girl wearing ao dai"}]
00000000585.jpg
585_evjvqa
[{"from": "human", "value": "what is on the handlebar of the bicycle?\n<image>"}, {"from": "gpt", "value": "bouquet"}]
00000000593.jpg
593_evjvqa
[{"from": "human", "value": "what is this couple doing in the middle of the road?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}]
00000000593.jpg
593_evjvqa
[{"from": "human", "value": "where are these two people being photographed?\n<image>"}, {"from": "gpt", "value": "in the middle of the road"}]
00000000593.jpg
593_evjvqa
[{"from": "human", "value": "how is the person facing the young man dressed?\n<image>"}, {"from": "gpt", "value": "red ao dai"}]
00000000593.jpg
593_evjvqa
[{"from": "human", "value": "what is the girl in the red ao dai holding in her hand?\n<image>"}, {"from": "gpt", "value": "flower basket"}]
00000000595.jpg
595_evjvqa
[{"from": "human", "value": "what is the child sitting on?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
00000000595.jpg
595_evjvqa
[{"from": "human", "value": "what is the man pushing the baby with?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
00000000595.jpg
595_evjvqa
[{"from": "human", "value": "how is the person pushing the baby in the supermarket dressed?\n<image>"}, {"from": "gpt", "value": "white t-shirt and blue shorts"}]
00000000599.jpg
599_evjvqa
[{"from": "human", "value": "what is this girl doing while holding the phone in her hand?\n<image>"}, {"from": "gpt", "value": "going downstairs"}]
00000000599.jpg
599_evjvqa
[{"from": "human", "value": "what is this girl wearing by her waist?\n<image>"}, {"from": "gpt", "value": "bag"}]
00000000599.jpg
599_evjvqa
[{"from": "human", "value": "what is this woman holding in her hand?\n<image>"}, {"from": "gpt", "value": "mobile phone"}]
00000010300.jpg
10300_evjvqa
[{"from": "human", "value": "what color is the wall of the house?\n<image>"}, {"from": "gpt", "value": "the wall of the house is yellow"}]
00000010301.jpg
10301_evjvqa
[{"from": "human", "value": "what is the shelf displaying?\n<image>"}, {"from": "gpt", "value": "the shelf is displaying fruits"}]
00000010302.jpg
10302_evjvqa
[{"from": "human", "value": "what animal is the gate decorated with?\n<image>"}, {"from": "gpt", "value": "the gate is decorated with a peacock"}]
00000010303.jpg
10303_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "everyone is standing in a gallery"}]
00000010307.jpg
10307_evjvqa
[{"from": "human", "value": "what stall are the group of women standing?\n<image>"}, {"from": "gpt", "value": "the group of women are standing in the beverage stall"}]
00000010308.jpg
10308_evjvqa
[{"from": "human", "value": "how many motorbikes are near the market gate?\n<image>"}, {"from": "gpt", "value": "there are three motorbikes near the market gate"}]
00000010309.jpg
10309_evjvqa
[{"from": "human", "value": "which position is the boat having a woman sitting on a hammock?\n<image>"}, {"from": "gpt", "value": "the boat is at the leftmost position"}]
00000010311.jpg
10311_evjvqa
[{"from": "human", "value": "what is the standing girl leaning against?\n<image>"}, {"from": "gpt", "value": "the standing girl is leaning against the handrail"}]
00000010312.jpg
10312_evjvqa
[{"from": "human", "value": "how many girls in white shirt are on the left of the man in gray coat?\n<image>"}, {"from": "gpt", "value": "on the left are two girls"}]
00000010313.jpg
10313_evjvqa
[{"from": "human", "value": "what store is everyone shopping at?\n<image>"}, {"from": "gpt", "value": "everyone is shopping at the clothing and jewelry store"}]
00000010315.jpg
10315_evjvqa
[{"from": "human", "value": "what clothes is the little girl wearing?\n<image>"}, {"from": "gpt", "value": "the little girl is wearing yellow skirt"}]
00000010316.jpg
10316_evjvqa
[{"from": "human", "value": "what color is the motorbike that the man is riding?\n<image>"}, {"from": "gpt", "value": "the motorbike is white"}]
00000010317.jpg
10317_evjvqa
[{"from": "human", "value": "where is everyone heading toward?\n<image>"}, {"from": "gpt", "value": "everyone is heading toward the monument"}]
00000010318.jpg
10318_evjvqa
[{"from": "human", "value": "what is the store selling?\n<image>"}, {"from": "gpt", "value": "the store is selling christmas decorations"}]
00000010320.jpg
10320_evjvqa
[{"from": "human", "value": "how many people are carrying poles?\n<image>"}, {"from": "gpt", "value": "there are two people carrying poles"}]
00000010322.jpg
10322_evjvqa
[{"from": "human", "value": "where is the little girl in red shirt sitting?\n<image>"}, {"from": "gpt", "value": "the little girl is sitting on a bike"}]
00000010324.jpg
10324_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this area is a clothing stall"}]
00000010325.jpg
10325_evjvqa
[{"from": "human", "value": "what scenery is the painting portraying?\n<image>"}, {"from": "gpt", "value": "the painting is portraying the scenery of an old town"}]
00000010326.jpg
10326_evjvqa
[{"from": "human", "value": "what is the woman lifting?\n<image>"}, {"from": "gpt", "value": "the woman is lifting a package on her head"}]
00000010328.jpg
10328_evjvqa
[{"from": "human", "value": "what stall is the woman in purple jacket standing at?\n<image>"}, {"from": "gpt", "value": "the woman in purple jacket is standing at the vegetable stall"}]
00000010331.jpg
10331_evjvqa
[{"from": "human", "value": "how many bowls of fish sauce are there?\n<image>"}, {"from": "gpt", "value": "there are two bowls of fish sauce"}]
00000010333.jpg
10333_evjvqa
[{"from": "human", "value": "where is the woman sitting?\n<image>"}, {"from": "gpt", "value": "the woman is sitting at the fruit stall"}]
00000010335.jpg
10335_evjvqa
[{"from": "human", "value": "how many chairs are around the table?\n<image>"}, {"from": "gpt", "value": "there are four chairs around the table"}]
00000010336.jpg
10336_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "the girl is sitting on a canoe"}]
00000010337.jpg
10337_evjvqa
[{"from": "human", "value": "what color shirt is the walking little girl wearing?\n<image>"}, {"from": "gpt", "value": "the little girl is wearing red shirt"}]
00000010339.jpg
10339_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "everyone is at a temple"}]
00000010342.jpg
10342_evjvqa
[{"from": "human", "value": "how many cars are parked in the street?\n<image>"}, {"from": "gpt", "value": "there is one car parked in the street"}]
00000010343.jpg
10343_evjvqa
[{"from": "human", "value": "where is the boy in gray shirt standing compared to the girl in blue coat?\n<image>"}, {"from": "gpt", "value": "the boy in gray shirt is standing in front of the girl"}]
00000010344.jpg
10344_evjvqa
[{"from": "human", "value": "how many pots are in the museum?\n<image>"}, {"from": "gpt", "value": "there is one pot"}]
00000010345.jpg
10345_evjvqa
[{"from": "human", "value": "how many cars are running in the street?\n<image>"}, {"from": "gpt", "value": "there are two cars running in the street"}]
00000010347.jpg
10347_evjvqa
[{"from": "human", "value": "how many people are holding incense sticks?\n<image>"}, {"from": "gpt", "value": "there are three people holding incense sticks"}]
00000010348.jpg
10348_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "everyone is standing in a supermarket"}]
00000010349.jpg
10349_evjvqa
[{"from": "human", "value": "which position is the police officer standing at?\n<image>"}, {"from": "gpt", "value": "standing at the leftmost position"}]
00000010350.jpg
10350_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "sitting on a bench"}]
00000010351.jpg
10351_evjvqa
[{"from": "human", "value": "what color shirt is the female cashier wearing?\n<image>"}, {"from": "gpt", "value": "the female cashier is wearing red shirt"}]
00000010352.jpg
10352_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "the man is pushing an electric bike"}]
00000010354.jpg
10354_evjvqa
[{"from": "human", "value": "what color shirt is the person standing next to the price board wearing?\n<image>"}, {"from": "gpt", "value": "the person standing next to the price board is wearing black shirt"}]
00000010355.jpg
10355_evjvqa
[{"from": "human", "value": "how many people are speaking?\n<image>"}, {"from": "gpt", "value": "there is one person speaking"}]
00000010356.jpg
10356_evjvqa
[{"from": "human", "value": "what color are the flowers being sold?\n<image>"}, {"from": "gpt", "value": "the flowers are yellow"}]
00000010360.jpg
10360_evjvqa
[{"from": "human", "value": "how many women are sitting in the stall?\n<image>"}, {"from": "gpt", "value": "there are two women sitting in the stall"}]
00000010364.jpg
10364_evjvqa
[{"from": "human", "value": "what color shirt is the boy wearing?\n<image>"}, {"from": "gpt", "value": "the boy is wearing blue shirt"}]
00000010367.jpg
10367_evjvqa
[{"from": "human", "value": "what color shirt is the young man wearing?\n<image>"}, {"from": "gpt", "value": "the young man is wearing yellow shirt"}]
00000010369.jpg
10369_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "everyone is standing on a paddy field"}]
00000010371.jpg
10371_evjvqa
[{"from": "human", "value": "how many soldiers is the statue portraying? \n<image>"}, {"from": "gpt", "value": "the statue is portraying one soldier"}]
00000010373.jpg
10373_evjvqa
[{"from": "human", "value": "what color shirt is the supermarket employee wearing?\n<image>"}, {"from": "gpt", "value": "the employee is wearing blue shirt"}]
00000010374.jpg
10374_evjvqa
[{"from": "human", "value": "what color clothes is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing black coat and black dress"}]
00000010375.jpg
10375_evjvqa
[{"from": "human", "value": "where is the woman in red ao dai standing?\n<image>"}, {"from": "gpt", "value": "the woman is standing in front of a decoration store"}]
00000010377.jpg
10377_evjvqa
[{"from": "human", "value": "how many little girls are beside santa claus?\n<image>"}, {"from": "gpt", "value": "there are two little girls beside santa claus"}]
00000010378.jpg
10378_evjvqa
[{"from": "human", "value": "what is the medical staff member on the left doing?\n<image>"}, {"from": "gpt", "value": "the medical staff member is taking temperature of a person"}]
00000010379.jpg
10379_evjvqa
[{"from": "human", "value": "what is the woman in blue shirt doing?\n<image>"}, {"from": "gpt", "value": "the woman is inspecting a product"}]
00000010381.jpg
10381_evjvqa
[{"from": "human", "value": "what color uniforms are the kids standing in the front row wearing?\n<image>"}, {"from": "gpt", "value": "the kids are wearing red uniforms"}]
00000010384.jpg
10384_evjvqa
[{"from": "human", "value": "what color is the background of the signboard?\n<image>"}, {"from": "gpt", "value": "the signboard is red"}]
00000010385.jpg
10385_evjvqa
[{"from": "human", "value": "how many motorbikes are parked on the right sidewalk?\n<image>"}, {"from": "gpt", "value": "there is one motorbike"}]
00000010387.jpg
10387_evjvqa
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000010390.jpg
10390_evjvqa
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000010391.jpg
10391_evjvqa
[{"from": "human", "value": "how many animal statues are near the christmas tree?\n<image>"}, {"from": "gpt", "value": "there is one statue"}]
00000010393.jpg
10393_evjvqa
[{"from": "human", "value": "where is the pagoda located?\n<image>"}, {"from": "gpt", "value": "the pagoda is located on a hill"}]
00000010395.jpg
10395_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "everyone is standing in a market"}]
00000010398.jpg
10398_evjvqa
[{"from": "human", "value": "where is this scene?\n<image>"}, {"from": "gpt", "value": "this is the scene of a market"}]
00000010399.jpg
10399_evjvqa
[{"from": "human", "value": "what supermarket is this?\n<image>"}, {"from": "gpt", "value": "this is an electronics supermarket"}]
00000010401.jpg
10401_evjvqa
[{"from": "human", "value": "where is the chilli bowl placed?\n<image>"}, {"from": "gpt", "value": "the chilli bowl is placed on the vegetable plate"}]
00000010405.jpg
10405_evjvqa
[{"from": "human", "value": "where are the houses built?\n<image>"}, {"from": "gpt", "value": "the houses are built next to the railway"}]
00000010406.jpg
10406_evjvqa
[{"from": "human", "value": "how many men are wearing blue shirts?\n<image>"}, {"from": "gpt", "value": "there are two men wearing blue shirts"}]
00000010407.jpg
10407_evjvqa
[{"from": "human", "value": "what is the woman in yellow shirt doing?\n<image>"}, {"from": "gpt", "value": "the woman in yellow shirt is selling stuff to the customer"}]
00000010410.jpg
10410_evjvqa
[{"from": "human", "value": "how many people are selecting fruits?\n<image>"}, {"from": "gpt", "value": "there are four people selecting fruits"}]
00000010411.jpg
10411_evjvqa
[{"from": "human", "value": "which side is the young man standing on compared to the girl?\n<image>"}, {"from": "gpt", "value": "the young man is standing on the right of the girl"}]
00000010413.jpg
10413_evjvqa
[{"from": "human", "value": "how many men are sitting?\n<image>"}, {"from": "gpt", "value": "there are two men sitting"}]
00000010414.jpg
10414_evjvqa
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000010417.jpg
10417_evjvqa
[{"from": "human", "value": "how many vehicles are running in the street?\n<image>"}, {"from": "gpt", "value": "there is one vehicle running in the street"}]
00000010420.jpg
10420_evjvqa
[{"from": "human", "value": "how many white cups are there?\n<image>"}, {"from": "gpt", "value": "there are three cups"}]
00000010421.jpg
10421_evjvqa
[{"from": "human", "value": "who is carrying the girl?\n<image>"}, {"from": "gpt", "value": "the boy is carrying the girl"}]
00000010422.jpg
10422_evjvqa
[{"from": "human", "value": "how many cars are in the left street?\n<image>"}, {"from": "gpt", "value": "there is one car in the left street"}]
00000010425.jpg
10425_evjvqa
[{"from": "human", "value": "what is the girl holding on her hand?\n<image>"}, {"from": "gpt", "value": "the girl is holding seasoning packs"}]
00000010426.jpg
10426_evjvqa
[{"from": "human", "value": "which position is the young man standing at?\n<image>"}, {"from": "gpt", "value": "the young man is standing at the rightmost position"}]
00000010429.jpg
10429_evjvqa
[{"from": "human", "value": "how many people are wearing black shirt?\n<image>"}, {"from": "gpt", "value": "there are two people wearing black shirt"}]