image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000002112.jpg
2112_evjvqa
[{"from": "human", "value": "what is this store selling?\n<image>"}, {"from": "gpt", "value": "motorcycle"}]
00000002112.jpg
2112_evjvqa
[{"from": "human", "value": "how many motorbikes are there on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes on the sidewalk"}]
00000002114.jpg
2114_evjvqa
[{"from": "human", "value": "which side is the black motorbike moving compared to the car?\n<image>"}, {"from": "gpt", "value": "the motorbike is moving in front of the car"}]
00000002114.jpg
2114_evjvqa
[{"from": "human", "value": "where is the red motorbike?\n<image>"}, {"from": "gpt", "value": "next to the left sidewalk"}]
00000002115.jpg
2115_evjvqa
[{"from": "human", "value": "what is the girl in black t-shirt carrying on her shoulder?\n<image>"}, {"from": "gpt", "value": "she is carrying a black bag"}]
00000002115.jpg
2115_evjvqa
[{"from": "human", "value": "who is the walking next to the person in white dress?\n<image>"}, {"from": "gpt", "value": "the person walking next to the girl in white dress is the girl in pink dress with blue striped"}]
00000002115.jpg
2115_evjvqa
[{"from": "human", "value": "who is sitting on the sidewalk?\n<image>"}, {"from": "gpt", "value": "the man who is sitting on the sidewalk is the man wearing green t-shirt"}]
00000002119.jpg
2119_evjvqa
[{"from": "human", "value": "where is the human statue located in this room?\n<image>"}, {"from": "gpt", "value": "one is on the altar, one is on the floor"}]
00000002119.jpg
2119_evjvqa
[{"from": "human", "value": "how many chairs are ther in the room?\n<image>"}, {"from": "gpt", "value": "there are four chairs in this room"}]
00000002119.jpg
2119_evjvqa
[{"from": "human", "value": "where are the books in this room located?\n<image>"}, {"from": "gpt", "value": "on the bookshelf"}]
00000002122.jpg
2122_evjvqa
[{"from": "human", "value": "what is the woman wearing red cap doing?\n<image>"}, {"from": "gpt", "value": "go shopping in the supermarket"}]
00000002122.jpg
2122_evjvqa
[{"from": "human", "value": "what is the woman wearing red cap holding?\n<image>"}, {"from": "gpt", "value": "the woman wearing red cap is holding a packet of vegestable"}]
00000002122.jpg
2122_evjvqa
[{"from": "human", "value": "what is selling on the aisle next to the woman?\n<image>"}, {"from": "gpt", "value": "vegestable"}]
00000002125.jpg
2125_evjvqa
[{"from": "human", "value": "which side of the white-painted house is the construction located?\n<image>"}, {"from": "gpt", "value": "on the right side "}]
00000002125.jpg
2125_evjvqa
[{"from": "human", "value": "how many houses are there between the white-painted house and the black-painted house?\n<image>"}, {"from": "gpt", "value": "they are next to each other"}]
00000002125.jpg
2125_evjvqa
[{"from": "human", "value": "how many buildings are there between the construction and the black-painted house?\n<image>"}, {"from": "gpt", "value": "one building"}]
00000002130.jpg
2130_evjvqa
[{"from": "human", "value": "what vehicles are mainly used on the road in front of the building?\n<image>"}, {"from": "gpt", "value": "motorbike are mainly used"}]
00000002131.jpg
2131_evjvqa
[{"from": "human", "value": "how many people are there in frony of the hotpot store?\n<image>"}, {"from": "gpt", "value": "there is one person in front of the hotpot store"}]
00000002131.jpg
2131_evjvqa
[{"from": "human", "value": "where are the motorbike parked?\n<image>"}, {"from": "gpt", "value": "the motorbiked is parked in front of the clothing store which is next to the hotpot store"}]
00000002136.jpg
2136_evjvqa
[{"from": "human", "value": "where is the motorbike?\n<image>"}, {"from": "gpt", "value": "in front of the door"}]
00000002136.jpg
2136_evjvqa
[{"from": "human", "value": "where is the dog?\n<image>"}, {"from": "gpt", "value": "the dog is on the main road leading to the door, next to the motorbike"}]
00000002136.jpg
2136_evjvqa
[{"from": "human", "value": "which side of the door is opened?\n<image>"}, {"from": "gpt", "value": "the right side"}]
00000002137.jpg
2137_evjvqa
[{"from": "human", "value": "what color is the basket containing cabbages?\n<image>"}, {"from": "gpt", "value": "the basket containing cabbages' color is green"}]
00000002138.jpg
2138_evjvqa
[{"from": "human", "value": "how many motorbikes are there having people sitting on?\n<image>"}, {"from": "gpt", "value": "there is one motorbike "}]
00000002139.jpg
2139_evjvqa
[{"from": "human", "value": "what is the girl with the crown wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing white ao dai"}]
00000002139.jpg
2139_evjvqa
[{"from": "human", "value": "what is the girl in white ao dai wearing on her head?\n<image>"}, {"from": "gpt", "value": "she is wearing the crown"}]
00000002139.jpg
2139_evjvqa
[{"from": "human", "value": "what are some people holding the phone for?\n<image>"}, {"from": "gpt", "value": "to take a picture of the miss in white ao dai"}]
00000002139.jpg
2139_evjvqa
[{"from": "human", "value": "what are the people gathering for?\n<image>"}, {"from": "gpt", "value": "they are gathering to welcome the miss"}]
00000002145.jpg
2145_evjvqa
[{"from": "human", "value": "what is this aisle selling?\n<image>"}, {"from": "gpt", "value": "different kinds of sandals"}]
00000002145.jpg
2145_evjvqa
[{"from": "human", "value": "who is carrying the bag?\n<image>"}, {"from": "gpt", "value": "the man, the woman in blue shirt and the woman next to the woman in blue shirt"}]
00000002145.jpg
2145_evjvqa
[{"from": "human", "value": "who is wearing mask along those people?\n<image>"}, {"from": "gpt", "value": "the woman in white shirt with black striped"}]
00000002149.jpg
2149_evjvqa
[{"from": "human", "value": "what are they about to buy?\n<image>"}, {"from": "gpt", "value": "they are about to buy milk"}]
00000002149.jpg
2149_evjvqa
[{"from": "human", "value": "what is the boy in white t-shirt pointing at?\n<image>"}, {"from": "gpt", "value": "he is pointing to the milk on the shelf"}]
00000002149.jpg
2149_evjvqa
[{"from": "human", "value": "what are the woman and the kid doing?\n<image>"}, {"from": "gpt", "value": "they are choosing milk"}]
00000002151.jpg
2151_evjvqa
[{"from": "human", "value": "where is the woman carrying a lot of bags riding?\n<image>"}, {"from": "gpt", "value": "she is riding on the left of this road"}]
00000002151.jpg
2151_evjvqa
[{"from": "human", "value": "what vehicle is the woman wearing pink pants riding on?\n<image>"}, {"from": "gpt", "value": "the woman in pink pants are riding electrical bike"}]
00000002151.jpg
2151_evjvqa
[{"from": "human", "value": "what are the bike users moving on this road wearing on their head?\n<image>"}, {"from": "gpt", "value": "they wear conical hats"}]
00000002152.jpg
2152_evjvqa
[{"from": "human", "value": "what are selling at this corner of the market?\n<image>"}, {"from": "gpt", "value": "different types of fish"}]
00000002152.jpg
2152_evjvqa
[{"from": "human", "value": "is there anyone buying fish?\n<image>"}, {"from": "gpt", "value": "nobody is buying fish"}]
00000002152.jpg
2152_evjvqa
[{"from": "human", "value": "what are mainly sold along this way?\n<image>"}, {"from": "gpt", "value": "different kinds of fish"}]
00000002153.jpg
2153_evjvqa
[{"from": "human", "value": "what is blocking the left side road?\n<image>"}, {"from": "gpt", "value": "a barrier"}]
00000002153.jpg
2153_evjvqa
[{"from": "human", "value": "where are all the motorbikes in this avenue parked?\n<image>"}, {"from": "gpt", "value": "all motorbikes in this avenue are parked on the sidewalk"}]
00000002153.jpg
2153_evjvqa
[{"from": "human", "value": "are there any cars moving on this road?\n<image>"}, {"from": "gpt", "value": "there is no car at all"}]
00000002153.jpg
2153_evjvqa
[{"from": "human", "value": "where is the barrier?\n<image>"}, {"from": "gpt", "value": "the barrier is on the left side of the road"}]
00000002154.jpg
2154_evjvqa
[{"from": "human", "value": "what is the man standing for?\n<image>"}, {"from": "gpt", "value": "to state in the meeting"}]
00000002154.jpg
2154_evjvqa
[{"from": "human", "value": "what is surrounded by the tables?\n<image>"}, {"from": "gpt", "value": "pots of decorative plants"}]
00000002154.jpg
2154_evjvqa
[{"from": "human", "value": "how many people are standing in the meeting?\n<image>"}, {"from": "gpt", "value": "there is one man standing in the meeting"}]
00000002165.jpg
2165_evjvqa
[{"from": "human", "value": "what is the girl standing on the stairs holding on her hand?\n<image>"}, {"from": "gpt", "value": "the girl standing on the stairs is holding a black purse"}]
00000002165.jpg
2165_evjvqa
[{"from": "human", "value": "how many people are standing with the girl?\n<image>"}, {"from": "gpt", "value": "she is standing alone"}]
00000002170.jpg
2170_evjvqa
[{"from": "human", "value": "what is the man sitting holding the board for?\n<image>"}, {"from": "gpt", "value": "to watch the menu and order the food"}]
00000002170.jpg
2170_evjvqa
[{"from": "human", "value": "what is the left most girl is holding on her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a smartphone"}]
00000002170.jpg
2170_evjvqa
[{"from": "human", "value": "what is the man wearing mask holding on his hand?\n<image>"}, {"from": "gpt", "value": "the man wearing mask is holding the menu"}]
00000002172.jpg
2172_evjvqa
[{"from": "human", "value": "who is walking behind the girl wearing jacket?\n<image>"}, {"from": "gpt", "value": "the man wears black t-shirt and blue jeans"}]
00000002172.jpg
2172_evjvqa
[{"from": "human", "value": "what is sold in the store that the girl wearing jacket is standing at?\n<image>"}, {"from": "gpt", "value": "this store sells clothes"}]
00000002175.jpg
2175_evjvqa
[{"from": "human", "value": "what is the girl wearing red ao dai is sitting on?\n<image>"}, {"from": "gpt", "value": "the girl wearing red ao dai is sitting on a red plastic chair"}]
00000002175.jpg
2175_evjvqa
[{"from": "human", "value": "where is the man in white shirt sitting compared to the girl in red ao dai?\n<image>"}, {"from": "gpt", "value": "the man in white shirt is sitting on the left of the girl in red ao dai"}]
00000002175.jpg
2175_evjvqa
[{"from": "human", "value": "where is the old man in white shirt sitting compared to the girl in red ao dai?\n<image>"}, {"from": "gpt", "value": "the old man in white shirt is sitting opposite to the girl in red ao dai"}]
00000002175.jpg
2175_evjvqa
[{"from": "human", "value": "what is the man in white shirt sitting under?\n<image>"}, {"from": "gpt", "value": "under a green umbrella"}]
00000002176.jpg
2176_evjvqa
[{"from": "human", "value": "what are these two girls raising their hands for?\n<image>"}, {"from": "gpt", "value": "these two girls are raising their hands for photographing"}]
00000002176.jpg
2176_evjvqa
[{"from": "human", "value": "what are sold at the store where the two girls are photographing at?\n<image>"}, {"from": "gpt", "value": "the two girls are photographing in front of a store selling caps"}]
00000002176.jpg
2176_evjvqa
[{"from": "human", "value": "what are the two girls carry with on their shoulder?\n<image>"}, {"from": "gpt", "value": "white handbag"}]
00000002176.jpg
2176_evjvqa
[{"from": "human", "value": "what is the sold at the store where the man in black shirt is sitting in front of?\n<image>"}, {"from": "gpt", "value": "sells caps anfd many different things"}]
00000002178.jpg
2178_evjvqa
[{"from": "human", "value": "how many people are walking on the street?\n<image>"}, {"from": "gpt", "value": "there are five people walking on the street"}]
00000002178.jpg
2178_evjvqa
[{"from": "human", "value": "where are the women in those people?\n<image>"}, {"from": "gpt", "value": "the women are moving ahead in those walking people"}]
00000002181.jpg
2181_evjvqa
[{"from": "human", "value": "what is this man tending to do in front of the house?\n<image>"}, {"from": "gpt", "value": "the man is cleaning the doors "}]
00000002181.jpg
2181_evjvqa
[{"from": "human", "value": "what is the man wearing white t-shirt holding ?\n<image>"}, {"from": "gpt", "value": "the water spray"}]
00000002181.jpg
2181_evjvqa
[{"from": "human", "value": "who is holding the water spray to wash the door?\n<image>"}, {"from": "gpt", "value": "the man who is wearing white shirt and shorts"}]
00000002183.jpg
2183_evjvqa
[{"from": "human", "value": "what is the girl in white dress holding on her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a book"}]
00000002183.jpg
2183_evjvqa
[{"from": "human", "value": "what is the girl holding the book wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing white dress and high heels"}]
00000002183.jpg
2183_evjvqa
[{"from": "human", "value": "what is hanging above the girl's head?\n<image>"}, {"from": "gpt", "value": "there are pots of decorative plants above the girl's head"}]
00000002184.jpg
2184_evjvqa
[{"from": "human", "value": "what is next to the pole of road sign?\n<image>"}, {"from": "gpt", "value": "a small store and a bike"}]
00000002184.jpg
2184_evjvqa
[{"from": "human", "value": "where is the bike?\n<image>"}, {"from": "gpt", "value": "next to the pole of road sign"}]
00000002184.jpg
2184_evjvqa
[{"from": "human", "value": "who are sitting at the small store?\n<image>"}, {"from": "gpt", "value": "a man and a woman are sitting at the small store"}]
00000002196.jpg
2196_evjvqa
[{"from": "human", "value": "what aisle is next to the tomato aisle?\n<image>"}, {"from": "gpt", "value": "the cucumber aisle is next to the tomato aisle"}]
00000002196.jpg
2196_evjvqa
[{"from": "human", "value": "what aisle is next to the cucumber aisle?\n<image>"}, {"from": "gpt", "value": "the tomato aisle is next to the cucumber aisle"}]
00000002196.jpg
2196_evjvqa
[{"from": "human", "value": "what is the girl in pink t-shirt choosing?\n<image>"}, {"from": "gpt", "value": "tomatoes"}]
00000002196.jpg
2196_evjvqa
[{"from": "human", "value": "what is the girl in pink t-shirt doing?\n<image>"}, {"from": "gpt", "value": "the girl in pink t-shirt is choosing tomatoes"}]
00000002199.jpg
2199_evjvqa
[{"from": "human", "value": "what color are the lights hanging above?\n<image>"}, {"from": "gpt", "value": "the color of the lights hanging above is blue"}]
00000002199.jpg
2199_evjvqa
[{"from": "human", "value": "what color are the lights used to decorate the gate from the above?\n<image>"}, {"from": "gpt", "value": "the blue lights are used to deacorate the gates"}]
00000002199.jpg
2199_evjvqa
[{"from": "human", "value": "where is the kid in orange t-shirt?\n<image>"}, {"from": "gpt", "value": "the kid in orange t-shirt is sitting between the two adults"}]
00000010700.jpg
10700_evjvqa
[{"from": "human", "value": "what color bag is the woman in the sunglasses carrying?\n<image>"}, {"from": "gpt", "value": "white"}]
00000010701.jpg
10701_evjvqa
[{"from": "human", "value": "what are the people in blue protective gear doing?\n<image>"}, {"from": "gpt", "value": "quick test for covid"}]
00000010703.jpg
10703_evjvqa
[{"from": "human", "value": "where are these two women standing?\n<image>"}, {"from": "gpt", "value": "on the beach"}]
00000010704.jpg
10704_evjvqa
[{"from": "human", "value": "what are these people doing?\n<image>"}, {"from": "gpt", "value": "they are listening to a woman in pink give a presentation"}]
00000010708.jpg
10708_evjvqa
[{"from": "human", "value": "how many people are sitting outside the drink shop?\n<image>"}, {"from": "gpt", "value": "four"}]
00000010712.jpg
10712_evjvqa
[{"from": "human", "value": "where are these people going?\n<image>"}, {"from": "gpt", "value": "the pagoda"}]
00000010714.jpg
10714_evjvqa
[{"from": "human", "value": "what is the woman in the pink coat doing?\n<image>"}, {"from": "gpt", "value": "waiting to pay"}]
00000010715.jpg
10715_evjvqa
[{"from": "human", "value": "where are these guys standing?\n<image>"}, {"from": "gpt", "value": "in the historical monument"}]
00000010716.jpg
10716_evjvqa
[{"from": "human", "value": "where is this woman standing?\n<image>"}, {"from": "gpt", "value": "in front of the drink shop"}]
00000010717.jpg
10717_evjvqa
[{"from": "human", "value": "what is this group of students doing?\n<image>"}, {"from": "gpt", "value": "they are singing the national anthem"}]
00000010718.jpg
10718_evjvqa
[{"from": "human", "value": "what program is currently playing on the tv?\n<image>"}, {"from": "gpt", "value": "the music show"}]
00000010719.jpg
10719_evjvqa
[{"from": "human", "value": "where are the students standing?\n<image>"}, {"from": "gpt", "value": "in front of the ancient temple"}]
00000010721.jpg
10721_evjvqa
[{"from": "human", "value": "what are these people standing for?\n<image>"}, {"from": "gpt", "value": "they queue to buy things"}]
00000010722.jpg
10722_evjvqa
[{"from": "human", "value": "what are these?\n<image>"}, {"from": "gpt", "value": "these are ancient objects on display"}]
00000010723.jpg
10723_evjvqa
[{"from": "human", "value": "what does this stall sell?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
00000010724.jpg
10724_evjvqa
[{"from": "human", "value": "how many women are standing on the sidewalk?\n<image>"}, {"from": "gpt", "value": "three"}]
00000010726.jpg
10726_evjvqa
[{"from": "human", "value": "who is the woman in the pink shirt going to the supermarket with?\n<image>"}, {"from": "gpt", "value": "the woman is wearing the green "}]
00000010727.jpg
10727_evjvqa
[{"from": "human", "value": "where is the woman in red standing?\n<image>"}, {"from": "gpt", "value": "staying in a food stall in a market"}]
00000010731.jpg
10731_evjvqa
[{"from": "human", "value": "what color shirt does the shopkeeper wear?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000010735.jpg
10735_evjvqa
[{"from": "human", "value": "what is the woman carrying a white bag doing?\n<image>"}, {"from": "gpt", "value": "picking postcards"}]