image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000004964.jpg
|
4964_evjvqa
|
[{"from": "human", "value": "how many flower baskets are in front of the flower vendor by the street?\n<image>"}, {"from": "gpt", "value": "there are two flower baskets in front of the flower vendor"}]
|
00000004967.jpg
|
4967_evjvqa
|
[{"from": "human", "value": "what color is the car running in the street?\n<image>"}, {"from": "gpt", "value": "the car is white"}]
|
00000004967.jpg
|
4967_evjvqa
|
[{"from": "human", "value": "how many cars are running in the street?\n<image>"}, {"from": "gpt", "value": "there is one car running in the street"}]
|
00000004967.jpg
|
4967_evjvqa
|
[{"from": "human", "value": "how many people are standing on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are three people standing on the sidewalk"}]
|
00000004969.jpg
|
4969_evjvqa
|
[{"from": "human", "value": "what does the statue portray?\n<image>"}, {"from": "gpt", "value": "the statue portrays the fighting soldiers"}]
|
00000004969.jpg
|
4969_evjvqa
|
[{"from": "human", "value": "how many soldiers are portrayed? \n<image>"}, {"from": "gpt", "value": "there are three soldiers portrayed"}]
|
00000004969.jpg
|
4969_evjvqa
|
[{"from": "human", "value": "where is the statue placed?\n<image>"}, {"from": "gpt", "value": "the statue is placed at a martyrs monument"}]
|
00000004970.jpg
|
4970_evjvqa
|
[{"from": "human", "value": "how many windows with decorative pattern does the temple have?\n<image>"}, {"from": "gpt", "value": "the temple has two windows with decorative pattern"}]
|
00000004971.jpg
|
4971_evjvqa
|
[{"from": "human", "value": "how many vehicles are running in the street?\n<image>"}, {"from": "gpt", "value": "there are two vehicles running in the street"}]
|
00000004971.jpg
|
4971_evjvqa
|
[{"from": "human", "value": "how many vehicles are parked on the roadside?\n<image>"}, {"from": "gpt", "value": "there is one vehicle parked on the roadside"}]
|
00000004971.jpg
|
4971_evjvqa
|
[{"from": "human", "value": "what is the building at the end of the road?\n<image>"}, {"from": "gpt", "value": "the building at the end of the road is the independence palace"}]
|
00000004972.jpg
|
4972_evjvqa
|
[{"from": "human", "value": "what color is the bowl icon?\n<image>"}, {"from": "gpt", "value": "the bowl icon is orange"}]
|
00000004974.jpg
|
4974_evjvqa
|
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
|
00000004976.jpg
|
4976_evjvqa
|
[{"from": "human", "value": "what is the woman in red shirt holding in her hand?\n<image>"}, {"from": "gpt", "value": "holding a plastic bag in her hand"}]
|
00000004976.jpg
|
4976_evjvqa
|
[{"from": "human", "value": "where is the woman in red shirt standing?\n<image>"}, {"from": "gpt", "value": "standing in a store"}]
|
00000004976.jpg
|
4976_evjvqa
|
[{"from": "human", "value": "what does the store sell?\n<image>"}, {"from": "gpt", "value": "the store sells votive papers"}]
|
00000004977.jpg
|
4977_evjvqa
|
[{"from": "human", "value": "what is the fruit stall staff doing?\n<image>"}, {"from": "gpt", "value": "introducing products to the customer"}]
|
00000004977.jpg
|
4977_evjvqa
|
[{"from": "human", "value": "what stall is the woman being advised by the staff standing in front of?\n<image>"}, {"from": "gpt", "value": "standing in front of the grape stall"}]
|
00000004977.jpg
|
4977_evjvqa
|
[{"from": "human", "value": "how many customers is grape stall having?\n<image>"}, {"from": "gpt", "value": "the grape stall is having one customer"}]
|
00000004981.jpg
|
4981_evjvqa
|
[{"from": "human", "value": "which hand is the woman holding the pen with?\n<image>"}, {"from": "gpt", "value": "holding the pen with right hand"}]
|
00000004981.jpg
|
4981_evjvqa
|
[{"from": "human", "value": "which country does the flag on the right of the woman belong to?\n<image>"}, {"from": "gpt", "value": "the flag belongs to vietnam"}]
|
00000004981.jpg
|
4981_evjvqa
|
[{"from": "human", "value": "on which hand is the watch worn by the woman?\n<image>"}, {"from": "gpt", "value": "the watch is worn on left hand"}]
|
00000004982.jpg
|
4982_evjvqa
|
[{"from": "human", "value": "what color is the car?\n<image>"}, {"from": "gpt", "value": "the car is orange"}]
|
00000004982.jpg
|
4982_evjvqa
|
[{"from": "human", "value": "what kind of car is this?\n<image>"}, {"from": "gpt", "value": "this is a supercar"}]
|
00000004982.jpg
|
4982_evjvqa
|
[{"from": "human", "value": "how many cars are running in the street?\n<image>"}, {"from": "gpt", "value": "there is one car running in the street"}]
|
00000004983.jpg
|
4983_evjvqa
|
[{"from": "human", "value": "what are the two women doing?\n<image>"}, {"from": "gpt", "value": "harvesting coffee"}]
|
00000004983.jpg
|
4983_evjvqa
|
[{"from": "human", "value": "how many people are harvesting coffee?\n<image>"}, {"from": "gpt", "value": "there are two people harvesting coffee"}]
|
00000004983.jpg
|
4983_evjvqa
|
[{"from": "human", "value": "what fruit are the two women harvesting?\n<image>"}, {"from": "gpt", "value": "harvesting coffee fruit"}]
|
00000004985.jpg
|
4985_evjvqa
|
[{"from": "human", "value": "how many soldiers are sitting on the iron frame?\n<image>"}, {"from": "gpt", "value": "there is one soldier sitting on the iron frame"}]
|
00000004985.jpg
|
4985_evjvqa
|
[{"from": "human", "value": "what color is the car below the soldier?\n<image>"}, {"from": "gpt", "value": "the car is black"}]
|
00000004986.jpg
|
4986_evjvqa
|
[{"from": "human", "value": "how many women are wearing ao dai?\n<image>"}, {"from": "gpt", "value": "there are two women wearing ao dai"}]
|
00000004986.jpg
|
4986_evjvqa
|
[{"from": "human", "value": "what color ao dais are the two women wearing?\n<image>"}, {"from": "gpt", "value": "the ao dais are green"}]
|
00000004986.jpg
|
4986_evjvqa
|
[{"from": "human", "value": "what color jacket is the woman pushing the shopping cart wearing?\n<image>"}, {"from": "gpt", "value": "wearing black jacket"}]
|
00000004988.jpg
|
4988_evjvqa
|
[{"from": "human", "value": "how many women are carrying pole?\n<image>"}, {"from": "gpt", "value": "there are two women carrying pole"}]
|
00000004988.jpg
|
4988_evjvqa
|
[{"from": "human", "value": "what is the woman in yellow coat holding in her hand?\n<image>"}, {"from": "gpt", "value": "holding a cellphone in her hand"}]
|
00000004988.jpg
|
4988_evjvqa
|
[{"from": "human", "value": "what is in the baskets that the two women carrying?\n<image>"}, {"from": "gpt", "value": "in the baskets are flower bunches"}]
|
00000004989.jpg
|
4989_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
|
00000004989.jpg
|
4989_evjvqa
|
[{"from": "human", "value": "how is the market?\n<image>"}, {"from": "gpt", "value": "the market is very crowded"}]
|
00000004989.jpg
|
4989_evjvqa
|
[{"from": "human", "value": "what vehicle is the woman in yellow skirt riding?\n<image>"}, {"from": "gpt", "value": "riding a scooter"}]
|
00000004991.jpg
|
4991_evjvqa
|
[{"from": "human", "value": "how many beams of blue light shining at the sky?\n<image>"}, {"from": "gpt", "value": "there are three beams of light shining at the sky"}]
|
00000004991.jpg
|
4991_evjvqa
|
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
|
00000004991.jpg
|
4991_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is an amusement park on the shore"}]
|
00000004992.jpg
|
4992_evjvqa
|
[{"from": "human", "value": "what color is the teapot?\n<image>"}, {"from": "gpt", "value": "the teapot is black"}]
|
00000004992.jpg
|
4992_evjvqa
|
[{"from": "human", "value": "how many big moon cake boxes are there?\n<image>"}, {"from": "gpt", "value": "there are two big moon cake boxes"}]
|
00000004992.jpg
|
4992_evjvqa
|
[{"from": "human", "value": "what is on the dish?\n<image>"}, {"from": "gpt", "value": "on the dish are moon cakes"}]
|
00000004993.jpg
|
4993_evjvqa
|
[{"from": "human", "value": "how many people are walking underwater?\n<image>"}, {"from": "gpt", "value": "there are five people walking underwater"}]
|
00000004993.jpg
|
4993_evjvqa
|
[{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "diving underwater"}]
|
00000004993.jpg
|
4993_evjvqa
|
[{"from": "human", "value": "what is the leader holding in the hand?\n<image>"}, {"from": "gpt", "value": "holding an underwater living creature"}]
|
00000004995.jpg
|
4995_evjvqa
|
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
|
00000004996.jpg
|
4996_evjvqa
|
[{"from": "human", "value": "what is the girl holding with two hands?\n<image>"}, {"from": "gpt", "value": "holding the hat"}]
|
00000004996.jpg
|
4996_evjvqa
|
[{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "standing on the balcony of a building"}]
|
00000004996.jpg
|
4996_evjvqa
|
[{"from": "human", "value": "what color shirt is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing red shirt with pattern"}]
|
00000004999.jpg
|
4999_evjvqa
|
[{"from": "human", "value": "how many people are sitting in the house?\n<image>"}, {"from": "gpt", "value": "there are two people sitting in the house"}]
|
00000004999.jpg
|
4999_evjvqa
|
[{"from": "human", "value": "what color shirt is the man sitting in the house wearing?\n<image>"}, {"from": "gpt", "value": "wearing red shirt"}]
|
00000002401.jpg
|
2401_evjvqa
|
[{"from": "human", "value": "what stall are the two women in white shirts standing at?\n<image>"}, {"from": "gpt", "value": "the stall selling decorative items"}]
|
00000002401.jpg
|
2401_evjvqa
|
[{"from": "human", "value": "what is the woman in white shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing decorative items"}]
|
00000002401.jpg
|
2401_evjvqa
|
[{"from": "human", "value": "who does the woman in white shirt go to the market with?\n<image>"}, {"from": "gpt", "value": "with a woman in white shirt"}]
|
00000002401.jpg
|
2401_evjvqa
|
[{"from": "human", "value": "how many people are standing at the decorative items stall?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002401.jpg
|
2401_evjvqa
|
[{"from": "human", "value": "what color are the shirts of the two women watching at the decorative items stall?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000002402.jpg
|
2402_evjvqa
|
[{"from": "human", "value": "what tree is the fruit department decorated with?\n<image>"}, {"from": "gpt", "value": "an apricot blossom tree"}]
|
00000002402.jpg
|
2402_evjvqa
|
[{"from": "human", "value": "what department is the cake department located near?\n<image>"}, {"from": "gpt", "value": "the fruit department"}]
|
00000002402.jpg
|
2402_evjvqa
|
[{"from": "human", "value": "what department is next to the fruit department?\n<image>"}, {"from": "gpt", "value": "the cake department"}]
|
00000002402.jpg
|
2402_evjvqa
|
[{"from": "human", "value": "how many people are coming to pick fruits?\n<image>"}, {"from": "gpt", "value": "nobody is coming"}]
|
00000002402.jpg
|
2402_evjvqa
|
[{"from": "human", "value": "how many customers are going in the supermarket?\n<image>"}, {"from": "gpt", "value": "nobody are in the supermarket"}]
|
00000002403.jpg
|
2403_evjvqa
|
[{"from": "human", "value": "how many motorbikes are turning the lights on?\n<image>"}, {"from": "gpt", "value": "five"}]
|
00000002403.jpg
|
2403_evjvqa
|
[{"from": "human", "value": "how many motorcycles have no lights on?\n<image>"}, {"from": "gpt", "value": "one"}]
|
00000002403.jpg
|
2403_evjvqa
|
[{"from": "human", "value": "who does the man in the white shirt carry in the back?\n<image>"}, {"from": "gpt", "value": "a woman in white shirt"}]
|
00000002404.jpg
|
2404_evjvqa
|
[{"from": "human", "value": "where are the students lining up?\n<image>"}, {"from": "gpt", "value": "on the schoolyard"}]
|
00000002404.jpg
|
2404_evjvqa
|
[{"from": "human", "value": "how many students are holding the flags?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002404.jpg
|
2404_evjvqa
|
[{"from": "human", "value": "what position in the line is the boy holding the flag standing?\n<image>"}, {"from": "gpt", "value": "the first position"}]
|
00000002404.jpg
|
2404_evjvqa
|
[{"from": "human", "value": "what position in the line is the girl holding the flag standing?\n<image>"}, {"from": "gpt", "value": "the first position"}]
|
00000002404.jpg
|
2404_evjvqa
|
[{"from": "human", "value": "what position in the line are the two students holding the flags standing?\n<image>"}, {"from": "gpt", "value": "the first position"}]
|
00000002406.jpg
|
2406_evjvqa
|
[{"from": "human", "value": "whose hand is the boy standing near the lake holding?\n<image>"}, {"from": "gpt", "value": "the hand of the woman in white dress"}]
|
00000002406.jpg
|
2406_evjvqa
|
[{"from": "human", "value": "what color is the bridge at the lakeside painted mainly?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000002407.jpg
|
2407_evjvqa
|
[{"from": "human", "value": "where are these people visiting?\n<image>"}, {"from": "gpt", "value": "in an antique"}]
|
00000002413.jpg
|
2413_evjvqa
|
[{"from": "human", "value": "how many trolleys are going in the hallway?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002413.jpg
|
2413_evjvqa
|
[{"from": "human", "value": "what does the woman in blue shirt hold in her hand?\n<image>"}, {"from": "gpt", "value": "a wallet"}]
|
00000002413.jpg
|
2413_evjvqa
|
[{"from": "human", "value": "what is the woman in blue shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing food"}]
|
00000002413.jpg
|
2413_evjvqa
|
[{"from": "human", "value": "whom does the woman in blue shirt go to the supermarket with?\n<image>"}, {"from": "gpt", "value": "the woman goes alone"}]
|
00000002413.jpg
|
2413_evjvqa
|
[{"from": "human", "value": "how many people are watching at the department that the woman in the blue shirt is standing at?\n<image>"}, {"from": "gpt", "value": "one"}]
|
00000002414.jpg
|
2414_evjvqa
|
[{"from": "human", "value": "whom is the man going with?\n<image>"}, {"from": "gpt", "value": "with a woman in blue dress"}]
|
00000002414.jpg
|
2414_evjvqa
|
[{"from": "human", "value": "whom is the woman in blue outfit going with?\n<image>"}, {"from": "gpt", "value": "with a man in blue shirt"}]
|
00000002417.jpg
|
2417_evjvqa
|
[{"from": "human", "value": "how many people are standing in front of the house?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002417.jpg
|
2417_evjvqa
|
[{"from": "human", "value": "how many people are standing on the street?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002417.jpg
|
2417_evjvqa
|
[{"from": "human", "value": "how many chairs are brought in front of the house?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002418.jpg
|
2418_evjvqa
|
[{"from": "human", "value": "whom does the girl stand with?\n<image>"}, {"from": "gpt", "value": "the girl stands alone"}]
|
00000002418.jpg
|
2418_evjvqa
|
[{"from": "human", "value": "how many people does the girl stand with?\n<image>"}, {"from": "gpt", "value": "the girl stands alone"}]
|
00000002418.jpg
|
2418_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the girl standing wears?\n<image>"}, {"from": "gpt", "value": "pink"}]
|
00000002418.jpg
|
2418_evjvqa
|
[{"from": "human", "value": "how many people stand far behind the girl?\n<image>"}, {"from": "gpt", "value": "one"}]
|
00000002419.jpg
|
2419_evjvqa
|
[{"from": "human", "value": "where is this couple standing?\n<image>"}, {"from": "gpt", "value": "in the middle of the street"}]
|
00000002421.jpg
|
2421_evjvqa
|
[{"from": "human", "value": "what color is the glasses that girl in black outfit wears?\n<image>"}, {"from": "gpt", "value": "black"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "where is the woman sitting on the chair?\n<image>"}, {"from": "gpt", "value": "in a grocery store"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "how many customers come to buy at grocery store?\n<image>"}, {"from": "gpt", "value": "three"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "where is the man in white shirt going to?\n<image>"}, {"from": "gpt", "value": "to the grocery store"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "whom does the woman wearing a conical hat and yellow shirt stand with at the grocery store?\n<image>"}, {"from": "gpt", "value": "the woman in blue shirt"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "whom does the man in white shirt go to the grocery store with?\n<image>"}, {"from": "gpt", "value": "the man goes alone"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "whom is the man in blue shirt walking on the street with?\n<image>"}, {"from": "gpt", "value": "with the woman in black shirt"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "whom is the woman in black outfit walking on the street with?\n<image>"}, {"from": "gpt", "value": "with the man in blue shirt"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "how many women come to buy at the grocery store?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000002422.jpg
|
2422_evjvqa
|
[{"from": "human", "value": "how many motorbikes come out from the market?\n<image>"}, {"from": "gpt", "value": "two"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.