image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000002255.jpg
2255_evjvqa
[{"from": "human", "value": "what are the man selling eggs doing?\n<image>"}, {"from": "gpt", "value": "giving the eggs to the guests"}]
00000002255.jpg
2255_evjvqa
[{"from": "human", "value": "what is the man in yellow t-shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing eggs"}]
00000002255.jpg
2255_evjvqa
[{"from": "human", "value": "what aisle is the man wearing blue t-shirt and helmet standing at?\n<image>"}, {"from": "gpt", "value": "at the egg aisle"}]
00000002255.jpg
2255_evjvqa
[{"from": "human", "value": "how many motorbikes are there behind the egg aisle?\n<image>"}, {"from": "gpt", "value": "one motorbike"}]
00000002263.jpg
2263_evjvqa
[{"from": "human", "value": "how many boats are moving on the water?\n<image>"}, {"from": "gpt", "value": "one boat"}]
00000002272.jpg
2272_evjvqa
[{"from": "human", "value": "how many people are sitting in the shop on the left sidewalk?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000002272.jpg
2272_evjvqa
[{"from": "human", "value": "where is the woman wearing conical hat standing at?\n<image>"}, {"from": "gpt", "value": "at the store on the left sidewalk"}]
00000002272.jpg
2272_evjvqa
[{"from": "human", "value": "how many cycloes are moving on the road?\n<image>"}, {"from": "gpt", "value": "one cyclo"}]
00000002274.jpg
2274_evjvqa
[{"from": "human", "value": "where is the man sitting at?\n<image>"}, {"from": "gpt", "value": "at the tet decoration shop"}]
00000002274.jpg
2274_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "making the tet decorations"}]
00000002274.jpg
2274_evjvqa
[{"from": "human", "value": "what color is the shirt of the man selling tet decorations?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002274.jpg
2274_evjvqa
[{"from": "human", "value": "how many people are sitting with the man?\n<image>"}, {"from": "gpt", "value": "the man is sitting alone"}]
00000002279.jpg
2279_evjvqa
[{"from": "human", "value": "what products are there in the glass?\n<image>"}, {"from": "gpt", "value": "phones"}]
00000002279.jpg
2279_evjvqa
[{"from": "human", "value": "what products are exhibited behind the glass?\n<image>"}, {"from": "gpt", "value": "mobile phones"}]
00000002279.jpg
2279_evjvqa
[{"from": "human", "value": "what products are sold at this area?\n<image>"}, {"from": "gpt", "value": "mobile phones"}]
00000002282.jpg
2282_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "a mangrove"}]
00000002283.jpg
2283_evjvqa
[{"from": "human", "value": "what art work is exhibited on the road?\n<image>"}, {"from": "gpt", "value": "pictures"}]
00000002283.jpg
2283_evjvqa
[{"from": "human", "value": "what is the man in blue shirt holding on his hand?\n<image>"}, {"from": "gpt", "value": "a camera"}]
00000002283.jpg
2283_evjvqa
[{"from": "human", "value": "what is the job of the man in blue shirt?\n<image>"}, {"from": "gpt", "value": "the photographer"}]
00000002285.jpg
2285_evjvqa
[{"from": "human", "value": "what products does this market sell? \n<image>"}, {"from": "gpt", "value": "clothes"}]
00000002286.jpg
2286_evjvqa
[{"from": "human", "value": "what is this woman about to buy?\n<image>"}, {"from": "gpt", "value": "handwash liquid"}]
00000002292.jpg
2292_evjvqa
[{"from": "human", "value": "where is the girl in black t-shirt sitting at?\n<image>"}, {"from": "gpt", "value": "on a railway"}]
00000002292.jpg
2292_evjvqa
[{"from": "human", "value": "how many motorbikes are there close to the railway?\n<image>"}, {"from": "gpt", "value": "one motorbike"}]
00000002292.jpg
2292_evjvqa
[{"from": "human", "value": "how many girls are sitting on the railway?\n<image>"}, {"from": "gpt", "value": "three girls"}]
00000002292.jpg
2292_evjvqa
[{"from": "human", "value": "how many people are sitting with the girl in orange shirt?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000002294.jpg
2294_evjvqa
[{"from": "human", "value": "what above the people's heads is this avenue decorated with?\n<image>"}, {"from": "gpt", "value": "lanterns"}]
00000002295.jpg
2295_evjvqa
[{"from": "human", "value": "what is this room exhibiting?\n<image>"}, {"from": "gpt", "value": "ao dai"}]
00000002295.jpg
2295_evjvqa
[{"from": "human", "value": "what is exhibited behind the glasses?\n<image>"}, {"from": "gpt", "value": "ao dai"}]
00000002295.jpg
2295_evjvqa
[{"from": "human", "value": "how many are there visiting in the room?\n<image>"}, {"from": "gpt", "value": "nobody is visiting"}]
00000002297.jpg
2297_evjvqa
[{"from": "human", "value": "what is this store selling ?\n<image>"}, {"from": "gpt", "value": "cars "}]
00000002298.jpg
2298_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "at the checkout of a grocery store"}]
00000002298.jpg
2298_evjvqa
[{"from": "human", "value": "how many people are paying for the bills?\n<image>"}, {"from": "gpt", "value": "nobody is paying for the bills"}]
00000002298.jpg
2298_evjvqa
[{"from": "human", "value": "how many people are there in this grocery store?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000002298.jpg
2298_evjvqa
[{"from": "human", "value": "how many people are sitting with the girl?\n<image>"}, {"from": "gpt", "value": "the girl is sitting alone"}]
00000001601.jpg
1601_evjvqa
[{"from": "human", "value": "how many cars are parked at the front door?\n<image>"}, {"from": "gpt", "value": "one"}]
00000001601.jpg
1601_evjvqa
[{"from": "human", "value": "what color is the car parked at the front door?\n<image>"}, {"from": "gpt", "value": "black"}]
00000001601.jpg
1601_evjvqa
[{"from": "human", "value": "what color is the motorbike parked in front of the house door?\n<image>"}, {"from": "gpt", "value": "orange"}]
00000001602.jpg
1602_evjvqa
[{"from": "human", "value": "where was this scene recorded?\n<image>"}, {"from": "gpt", "value": "on a beach"}]
00000001603.jpg
1603_evjvqa
[{"from": "human", "value": "how many men are going to see flowers?\n<image>"}, {"from": "gpt", "value": "two men"}]
00000001603.jpg
1603_evjvqa
[{"from": "human", "value": "what are the two men doing?\n<image>"}, {"from": "gpt", "value": "viewing flowers"}]
00000001603.jpg
1603_evjvqa
[{"from": "human", "value": "where are two men standing?\n<image>"}, {"from": "gpt", "value": "standing in a flower garden"}]
00000001603.jpg
1603_evjvqa
[{"from": "human", "value": "what do two men intend to buy?\n<image>"}, {"from": "gpt", "value": "flowers"}]
00000001603.jpg
1603_evjvqa
[{"from": "human", "value": "what color of the helmet is the man in the red striped shirt wearing?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000001612.jpg
1612_evjvqa
[{"from": "human", "value": "with how many people do the man in the green shirt walk on the street?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001612.jpg
1612_evjvqa
[{"from": "human", "value": "which kind of stand is the man in the blue shirt going nearby?\n<image>"}, {"from": "gpt", "value": "toy stand"}]
00000001612.jpg
1612_evjvqa
[{"from": "human", "value": "what color of the shirt is the woman walking with the man in the blue shirt wearing?\n<image>"}, {"from": "gpt", "value": "black"}]
00000001612.jpg
1612_evjvqa
[{"from": "human", "value": "what is the little girl standing near the toy stand doing?\n<image>"}, {"from": "gpt", "value": "reading"}]
00000001612.jpg
1612_evjvqa
[{"from": "human", "value": "with whom is the reading girl strolling along the street?\n<image>"}, {"from": "gpt", "value": "with the man wearing a red shirt"}]
00000001613.jpg
1613_evjvqa
[{"from": "human", "value": "how many girls are having a meal?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000001613.jpg
1613_evjvqa
[{"from": "human", "value": "what are two girls doing?\n<image>"}, {"from": "gpt", "value": "eating"}]
00000001613.jpg
1613_evjvqa
[{"from": "human", "value": "how many people does the girl in the white t-shirt eat with?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001613.jpg
1613_evjvqa
[{"from": "human", "value": "how many people does the girl wearing a green shirt eat with?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001616.jpg
1616_evjvqa
[{"from": "human", "value": "how many people are moving by bicycles in the painting?\n<image>"}, {"from": "gpt", "value": "there are two people traveling by bicycle"}]
00000001616.jpg
1616_evjvqa
[{"from": "human", "value": "how many bicycles are in the painting moving on the roadway?\n<image>"}, {"from": "gpt", "value": "the painting depicts a bicycle which is moving on the roadway"}]
00000001619.jpg
1619_evjvqa
[{"from": "human", "value": "what is the woman wearing the yellow t-shirt doing?\n<image>"}, {"from": "gpt", "value": "buying toys"}]
00000001619.jpg
1619_evjvqa
[{"from": "human", "value": "where is the girl wearing a yellow t-shirt standing?\n<image>"}, {"from": "gpt", "value": "toy shop"}]
00000001619.jpg
1619_evjvqa
[{"from": "human", "value": "what is the girl wearing a yellow t-shirt about to buy?\n<image>"}, {"from": "gpt", "value": "a toy bar"}]
00000001619.jpg
1619_evjvqa
[{"from": "human", "value": "where is the girl in pink standing?\n<image>"}, {"from": "gpt", "value": "at a toy shop"}]
00000001619.jpg
1619_evjvqa
[{"from": "human", "value": "how many people are standing at the toy store?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000001627.jpg
1627_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "carrying a tray of food for guests"}]
00000001627.jpg
1627_evjvqa
[{"from": "human", "value": "what is the man holding in his hand?\n<image>"}, {"from": "gpt", "value": "a tray of food"}]
00000001627.jpg
1627_evjvqa
[{"from": "human", "value": "where is the man going?\n<image>"}, {"from": "gpt", "value": "in the middle of the market"}]
00000001627.jpg
1627_evjvqa
[{"from": "human", "value": "where is the woman in the striped shirt standing?\n<image>"}, {"from": "gpt", "value": "a counter that sells kinds of baskets"}]
00000001627.jpg
1627_evjvqa
[{"from": "human", "value": "what stall is the woman in the striped shirt standing at?\n<image>"}, {"from": "gpt", "value": "a stall that sells kinds of baskets"}]
00000001628.jpg
1628_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "taking a photo"}]
00000001628.jpg
1628_evjvqa
[{"from": "human", "value": "what scene is the man photographing?\n<image>"}, {"from": "gpt", "value": "the house"}]
00000001628.jpg
1628_evjvqa
[{"from": "human", "value": "what tools does the man use to take pictures?\n<image>"}, {"from": "gpt", "value": "a phone"}]
00000001630.jpg
1630_evjvqa
[{"from": "human", "value": "how many cars are parked outside the house?\n<image>"}, {"from": "gpt", "value": "there is a car parked"}]
00000001642.jpg
1642_evjvqa
[{"from": "human", "value": "where is this bridge built?\n<image>"}, {"from": "gpt", "value": "on the sea"}]
00000001642.jpg
1642_evjvqa
[{"from": "human", "value": "what is this bridge across?\n<image>"}, {"from": "gpt", "value": "across the sea"}]
00000001643.jpg
1643_evjvqa
[{"from": "human", "value": "what is the man on the left holding in his hand?\n<image>"}, {"from": "gpt", "value": "a loudspeaker"}]
00000001643.jpg
1643_evjvqa
[{"from": "human", "value": "what is the man holding a loudspeaker doing?\n<image>"}, {"from": "gpt", "value": "announcing"}]
00000001643.jpg
1643_evjvqa
[{"from": "human", "value": "what color uniform is the man on the right wearing?\n<image>"}, {"from": "gpt", "value": "the uniform is blue"}]
00000001645.jpg
1645_evjvqa
[{"from": "human", "value": "at which stall is the peach tree decorated?\n<image>"}, {"from": "gpt", "value": "the cake stand"}]
00000001645.jpg
1645_evjvqa
[{"from": "human", "value": "how many people are there who came to buy cake?\n<image>"}, {"from": "gpt", "value": "no one came to buy cake"}]
00000001645.jpg
1645_evjvqa
[{"from": "human", "value": "what tree is the cake stand decorated with?\n<image>"}, {"from": "gpt", "value": "a peach tree"}]
00000001647.jpg
1647_evjvqa
[{"from": "human", "value": "what products does this store sell?\n<image>"}, {"from": "gpt", "value": "christmas ornaments"}]
00000001649.jpg
1649_evjvqa
[{"from": "human", "value": "where does the man in black lead the woman?\n<image>"}, {"from": "gpt", "value": "to buy clothes"}]
00000001649.jpg
1649_evjvqa
[{"from": "human", "value": "what is the woman standing near the black suit man doing?\n<image>"}, {"from": "gpt", "value": "choosing clothes"}]
00000001649.jpg
1649_evjvqa
[{"from": "human", "value": "who is the woman standing to choose clothes to buy with?\n<image>"}, {"from": "gpt", "value": "with a man wearing a black jacket"}]
00000001649.jpg
1649_evjvqa
[{"from": "human", "value": "how many people is the woman standing to choose clothes to buy with?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001649.jpg
1649_evjvqa
[{"from": "human", "value": "with how many people does the man go shopping for clothes with?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001650.jpg
1650_evjvqa
[{"from": "human", "value": "what color does the seller of sticky rice wear?\n<image>"}, {"from": "gpt", "value": "pink"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "coming to buy milk"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "what does the woman intend to buy?\n<image>"}, {"from": "gpt", "value": "a bottle of milk"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "what product is the woman looking at?\n<image>"}, {"from": "gpt", "value": "milk"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "what is the woman holding in her hand?\n<image>"}, {"from": "gpt", "value": "a bottle of milk"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "how many people are coming to buy milk?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "what color of the shirt does the woman choosing milk wear?\n<image>"}, {"from": "gpt", "value": "pink"}]
00000001651.jpg
1651_evjvqa
[{"from": "human", "value": "with whom is the woman who is choosing milk going shopping for milk?\n<image>"}, {"from": "gpt", "value": "the woman is walking alone"}]
00000001653.jpg
1653_evjvqa
[{"from": "human", "value": "how many working employees are in the store?\n<image>"}, {"from": "gpt", "value": "three employees"}]
00000001653.jpg
1653_evjvqa
[{"from": "human", "value": "how many working male employees are sitting?\n<image>"}, {"from": "gpt", "value": "an employee"}]
00000001653.jpg
1653_evjvqa
[{"from": "human", "value": "how many female employees is the male employee working with?\n<image>"}, {"from": "gpt", "value": "two female employees"}]
00000001655.jpg
1655_evjvqa
[{"from": "human", "value": "where is the young man standing?\n<image>"}, {"from": "gpt", "value": "on the railway"}]
00000001655.jpg
1655_evjvqa
[{"from": "human", "value": "what color of the shirt does the young man standing in the middle of the railway wear?\n<image>"}, {"from": "gpt", "value": "white"}]
00000001655.jpg
1655_evjvqa
[{"from": "human", "value": "whom is the young man with?\n<image>"}, {"from": "gpt", "value": "alone"}]
00000001657.jpg
1657_evjvqa
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "on a boat"}]
00000001657.jpg
1657_evjvqa
[{"from": "human", "value": "where is this group of people going?\n<image>"}, {"from": "gpt", "value": "traveling on a river"}]
00000001658.jpg
1658_evjvqa
[{"from": "human", "value": "what is the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "giving fish to a customer"}]
00000001658.jpg
1658_evjvqa
[{"from": "human", "value": "what is the woman in the black and white striped shirt doing?\n<image>"}, {"from": "gpt", "value": "receiving fish from the woman in the blue shirt"}]