image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003666.jpg
|
3666_evjvqa
|
[{"from": "human", "value": "what products are sold on those aisles?\n<image>"}, {"from": "gpt", "value": "those aisles sell different kinds of vegetables"}]
|
00000003667.jpg
|
3667_evjvqa
|
[{"from": "human", "value": "where are this group traveling at?\n<image>"}, {"from": "gpt", "value": "they are traveling in a countryside"}]
|
00000003667.jpg
|
3667_evjvqa
|
[{"from": "human", "value": "how many people are riding the boat?\n<image>"}, {"from": "gpt", "value": "there is one woman wearing conical hat riding the boat"}]
|
00000003667.jpg
|
3667_evjvqa
|
[{"from": "human", "value": "what are those two peopl sitting on the boat doing?\n<image>"}, {"from": "gpt", "value": "one is preparing for the other one to taking picture"}]
|
00000003667.jpg
|
3667_evjvqa
|
[{"from": "human", "value": "where are their boat coming to?\n<image>"}, {"from": "gpt", "value": "they are moving under the bridge"}]
|
00000003667.jpg
|
3667_evjvqa
|
[{"from": "human", "value": "what kind of hat are the people sitting on the boat wearing?\n<image>"}, {"from": "gpt", "value": "they are wearing conical hat"}]
|
00000003670.jpg
|
3670_evjvqa
|
[{"from": "human", "value": "where are those red plastic chair?\n<image>"}, {"from": "gpt", "value": "in front of the grocery store"}]
|
00000003670.jpg
|
3670_evjvqa
|
[{"from": "human", "value": "what are this house's windows using for?\n<image>"}, {"from": "gpt", "value": "they are used to hang the products"}]
|
00000003670.jpg
|
3670_evjvqa
|
[{"from": "human", "value": "where are the products put in the store?\n<image>"}, {"from": "gpt", "value": "right next to the windows"}]
|
00000003671.jpg
|
3671_evjvqa
|
[{"from": "human", "value": "where are those three men standing at?\n<image>"}, {"from": "gpt", "value": "in the house which exhibits the historical artifacts"}]
|
00000003671.jpg
|
3671_evjvqa
|
[{"from": "human", "value": "where are those three men visiting at?\n<image>"}, {"from": "gpt", "value": "in a house which exhibits the historical artifacts"}]
|
00000003671.jpg
|
3671_evjvqa
|
[{"from": "human", "value": "how many people are visiting the room which exhibits the historical artifacts?\n<image>"}, {"from": "gpt", "value": "there are three men visiting the room which exhibits the historical artifacts"}]
|
00000003676.jpg
|
3676_evjvqa
|
[{"from": "human", "value": "what is the man riding grab doing?\n<image>"}, {"from": "gpt", "value": "he is buying pho"}]
|
00000003676.jpg
|
3676_evjvqa
|
[{"from": "human", "value": "what is the woman wearing the helmet doing?\n<image>"}, {"from": "gpt", "value": "she is lining up to buy pho"}]
|
00000003677.jpg
|
3677_evjvqa
|
[{"from": "human", "value": "how many people are sitting in the grocery store?\n<image>"}, {"from": "gpt", "value": "there is one person sitting in the grocery store"}]
|
00000003679.jpg
|
3679_evjvqa
|
[{"from": "human", "value": "who is the man wearing white t-shirt standing with?\n<image>"}, {"from": "gpt", "value": "with a girl wearing white dress"}]
|
00000003679.jpg
|
3679_evjvqa
|
[{"from": "human", "value": "who is the girl in white dress playing with?\n<image>"}, {"from": "gpt", "value": "with a man in white t-shirt"}]
|
00000003679.jpg
|
3679_evjvqa
|
[{"from": "human", "value": "where is the man and the girl standing at?\n<image>"}, {"from": "gpt", "value": "next to the fence near a river"}]
|
00000003681.jpg
|
3681_evjvqa
|
[{"from": "human", "value": "where are these two women standing at?\n<image>"}, {"from": "gpt", "value": "at a corridor in a supermarket "}]
|
00000003681.jpg
|
3681_evjvqa
|
[{"from": "human", "value": "what are these two women doing?\n<image>"}, {"from": "gpt", "value": "buying things in the supermarket"}]
|
00000003687.jpg
|
3687_evjvqa
|
[{"from": "human", "value": "what vehicles are this couple moving on?\n<image>"}, {"from": "gpt", "value": "motorbike"}]
|
00000003687.jpg
|
3687_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "the man is towing the motorbike "}]
|
00000003687.jpg
|
3687_evjvqa
|
[{"from": "human", "value": "who is the woman walking with on the avenue?\n<image>"}, {"from": "gpt", "value": "with the man towing the motorbike"}]
|
00000003698.jpg
|
3698_evjvqa
|
[{"from": "human", "value": "what is the man wearing helmet doing?\n<image>"}, {"from": "gpt", "value": "he is loading the goods on the motorbike"}]
|
00000003698.jpg
|
3698_evjvqa
|
[{"from": "human", "value": "what is the woman wearing mask doing?\n<image>"}, {"from": "gpt", "value": "she is moving through the fence"}]
|
00000004907.jpg
|
4907_evjvqa
|
[{"from": "human", "value": "how many teachers are going with the children?\n<image>"}, {"from": "gpt", "value": "there are three teachers going with the children"}]
|
00000004908.jpg
|
4908_evjvqa
|
[{"from": "human", "value": "what color shirt is the man with eyeglasses wearing?\n<image>"}, {"from": "gpt", "value": "wearing red shirt"}]
|
00000004908.jpg
|
4908_evjvqa
|
[{"from": "human", "value": "what color shirt is the flower shop staff wearing?\n<image>"}, {"from": "gpt", "value": "wearing white shirt"}]
|
00000004908.jpg
|
4908_evjvqa
|
[{"from": "human", "value": "which side of the man in red shirt is the little girl standing?\n<image>"}, {"from": "gpt", "value": "the little girl is standing on the right of the man in red shirt"}]
|
00000004911.jpg
|
4911_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "taking the wallet"}]
|
00000004911.jpg
|
4911_evjvqa
|
[{"from": "human", "value": "what store is the man standing?\n<image>"}, {"from": "gpt", "value": "standing in a building materials store"}]
|
00000004911.jpg
|
4911_evjvqa
|
[{"from": "human", "value": "where is the building materials store owner standing compared to the man?\n<image>"}, {"from": "gpt", "value": "standing in front of the man"}]
|
00000004912.jpg
|
4912_evjvqa
|
[{"from": "human", "value": "how is the weather in this area?\n<image>"}, {"from": "gpt", "value": "it is raining in this area"}]
|
00000004912.jpg
|
4912_evjvqa
|
[{"from": "human", "value": "what type of bike is the bike parked in front of the food stand?\n<image>"}, {"from": "gpt", "value": "the bike is an electric bicycle"}]
|
00000004912.jpg
|
4912_evjvqa
|
[{"from": "human", "value": "how many people are buying at the food stand?\n<image>"}, {"from": "gpt", "value": "there is one person buying food"}]
|
00000004913.jpg
|
4913_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a supermarket"}]
|
00000004913.jpg
|
4913_evjvqa
|
[{"from": "human", "value": "what color coat is the girl passing through two men wearing?\n<image>"}, {"from": "gpt", "value": "wearing a red coat"}]
|
00000004916.jpg
|
4916_evjvqa
|
[{"from": "human", "value": "what store are the two women standing in?\n<image>"}, {"from": "gpt", "value": "standing in a souvenir store"}]
|
00000004916.jpg
|
4916_evjvqa
|
[{"from": "human", "value": "what is the woman with crossbody bag holding on her hand?\n<image>"}, {"from": "gpt", "value": "holding a stilt house model on her hand"}]
|
00000004916.jpg
|
4916_evjvqa
|
[{"from": "human", "value": "how many bags is the woman in white striped shirt holding?\n<image>"}, {"from": "gpt", "value": "holding two bags"}]
|
00000004919.jpg
|
4919_evjvqa
|
[{"from": "human", "value": "how many people are parasailing?\n<image>"}, {"from": "gpt", "value": "there are two people parasailing"}]
|
00000004919.jpg
|
4919_evjvqa
|
[{"from": "human", "value": "where are the two people parasailing?\n<image>"}, {"from": "gpt", "value": "parasailing at sea"}]
|
00000004919.jpg
|
4919_evjvqa
|
[{"from": "human", "value": "how many parachutes are behind the two people?\n<image>"}, {"from": "gpt", "value": "there is one parachute behind the two people"}]
|
00000004920.jpg
|
4920_evjvqa
|
[{"from": "human", "value": "what color shirt is the man standing on the roof of the boat wearing?\n<image>"}, {"from": "gpt", "value": "the man wearing white shirt"}]
|
00000004920.jpg
|
4920_evjvqa
|
[{"from": "human", "value": "where is the boat located?\n<image>"}, {"from": "gpt", "value": "the boat is docking on a river in the jungle"}]
|
00000004920.jpg
|
4920_evjvqa
|
[{"from": "human", "value": "where is the man in white shirt standing at?\n<image>"}, {"from": "gpt", "value": "standing on the roof of the boat"}]
|
00000004921.jpg
|
4921_evjvqa
|
[{"from": "human", "value": "what color coat is the woman with white sun hat wearing?\n<image>"}, {"from": "gpt", "value": "wearing a red coat"}]
|
00000004922.jpg
|
4922_evjvqa
|
[{"from": "human", "value": "what is the girl in white shirt doing?\n<image>"}, {"from": "gpt", "value": "taking goods on the stall"}]
|
00000004922.jpg
|
4922_evjvqa
|
[{"from": "human", "value": "what color shirt is the boy standing next to the girl in white shirt wearing?\n<image>"}, {"from": "gpt", "value": "wearing red shirt"}]
|
00000004922.jpg
|
4922_evjvqa
|
[{"from": "human", "value": "what color is the staff uniform of the supermarket?\n<image>"}, {"from": "gpt", "value": "the uniform is red"}]
|
00000004923.jpg
|
4923_evjvqa
|
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing in front of a supermarket"}]
|
00000004923.jpg
|
4923_evjvqa
|
[{"from": "human", "value": "what color are the baskets?\n<image>"}, {"from": "gpt", "value": "the baskets are black"}]
|
00000004923.jpg
|
4923_evjvqa
|
[{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "lining up to go shopping in the supermarket"}]
|
00000004926.jpg
|
4926_evjvqa
|
[{"from": "human", "value": "what color shirt is the girl wearing?\n<image>"}, {"from": "gpt", "value": "wearing white shirt"}]
|
00000004926.jpg
|
4926_evjvqa
|
[{"from": "human", "value": "what color lipstick is the girl wearing?\n<image>"}, {"from": "gpt", "value": "wearing red lipstick"}]
|
00000004926.jpg
|
4926_evjvqa
|
[{"from": "human", "value": "where is the girl?\n<image>"}, {"from": "gpt", "value": "sitting in a basket boat on a river"}]
|
00000004928.jpg
|
4928_evjvqa
|
[{"from": "human", "value": "how many gas pumps does the gas station have?\n<image>"}, {"from": "gpt", "value": "the gas station has five gas pumps"}]
|
00000004932.jpg
|
4932_evjvqa
|
[{"from": "human", "value": "how many kids are standing on the field?\n<image>"}, {"from": "gpt", "value": "there are two kids standing on the field"}]
|
00000004932.jpg
|
4932_evjvqa
|
[{"from": "human", "value": "what suit are the kids wearing?\n<image>"}, {"from": "gpt", "value": "wearing cow coat"}]
|
00000004932.jpg
|
4932_evjvqa
|
[{"from": "human", "value": "where are the kids standing?\n<image>"}, {"from": "gpt", "value": "standing on a field"}]
|
00000004933.jpg
|
4933_evjvqa
|
[{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "shopping"}]
|
00000004933.jpg
|
4933_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a supermarket"}]
|
00000004933.jpg
|
4933_evjvqa
|
[{"from": "human", "value": "what stalls is everyone standing at?\n<image>"}, {"from": "gpt", "value": "standing at vegetable stalls"}]
|
00000004936.jpg
|
4936_evjvqa
|
[{"from": "human", "value": "what color shirt is the tour guide wearing?\n<image>"}, {"from": "gpt", "value": "wearing white shirt"}]
|
00000004936.jpg
|
4936_evjvqa
|
[{"from": "human", "value": "what area is everyone standing at?\n<image>"}, {"from": "gpt", "value": "standing at a museum"}]
|
00000004938.jpg
|
4938_evjvqa
|
[{"from": "human", "value": "where are the paintings hanged?\n<image>"}, {"from": "gpt", "value": "hanged in a museum"}]
|
00000004938.jpg
|
4938_evjvqa
|
[{"from": "human", "value": "what are displayed on the wall?\n<image>"}, {"from": "gpt", "value": "the paintings are displayed on the wall"}]
|
00000004938.jpg
|
4938_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this is a gallery in a museum"}]
|
00000004942.jpg
|
4942_evjvqa
|
[{"from": "human", "value": "what place is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
|
00000004942.jpg
|
4942_evjvqa
|
[{"from": "human", "value": "how is the market?\n<image>"}, {"from": "gpt", "value": "the market is very crowded"}]
|
00000004942.jpg
|
4942_evjvqa
|
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is morning"}]
|
00000004943.jpg
|
4943_evjvqa
|
[{"from": "human", "value": "how many people are crossing the road?\n<image>"}, {"from": "gpt", "value": "there is one person crossing the road"}]
|
00000004943.jpg
|
4943_evjvqa
|
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is morning"}]
|
00000004943.jpg
|
4943_evjvqa
|
[{"from": "human", "value": "what place is this?\n<image>"}, {"from": "gpt", "value": "this is a street"}]
|
00000004945.jpg
|
4945_evjvqa
|
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing in a museum"}]
|
00000004945.jpg
|
4945_evjvqa
|
[{"from": "human", "value": "how many men are standing in the museum?\n<image>"}, {"from": "gpt", "value": "there are five men standing in the museum"}]
|
00000004945.jpg
|
4945_evjvqa
|
[{"from": "human", "value": "where is the guide standing compared to the men?\n<image>"}, {"from": "gpt", "value": "standing opposite the men"}]
|
00000004946.jpg
|
4946_evjvqa
|
[{"from": "human", "value": "what color ao dai is the woman wearing?\n<image>"}, {"from": "gpt", "value": "wearing a yellow ao dai"}]
|
00000004946.jpg
|
4946_evjvqa
|
[{"from": "human", "value": "how many men are standing opposite the woman?\n<image>"}, {"from": "gpt", "value": "there are two men standing opposite the woman"}]
|
00000004946.jpg
|
4946_evjvqa
|
[{"from": "human", "value": "what color face mask is the woman wearing?\n<image>"}, {"from": "gpt", "value": "wearing black face mask"}]
|
00000004948.jpg
|
4948_evjvqa
|
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "in a restaurant"}]
|
00000004948.jpg
|
4948_evjvqa
|
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
|
00000004948.jpg
|
4948_evjvqa
|
[{"from": "human", "value": "how is the restaurant?\n<image>"}, {"from": "gpt", "value": "the restaurant is crowded"}]
|
00000004950.jpg
|
4950_evjvqa
|
[{"from": "human", "value": "what color shirt is the boy wearing?\n<image>"}, {"from": "gpt", "value": "wearing black shirt"}]
|
00000004950.jpg
|
4950_evjvqa
|
[{"from": "human", "value": "what is on the right of the boy?\n<image>"}, {"from": "gpt", "value": "on the right of the boy is a christmas tree model"}]
|
00000004954.jpg
|
4954_evjvqa
|
[{"from": "human", "value": "what stall are the two men standing at?\n<image>"}, {"from": "gpt", "value": "standing at milk stall"}]
|
00000004954.jpg
|
4954_evjvqa
|
[{"from": "human", "value": "what are the two men doing?\n<image>"}, {"from": "gpt", "value": "checking the milk boxes"}]
|
00000004954.jpg
|
4954_evjvqa
|
[{"from": "human", "value": "how many men are standing at the milk stall?\n<image>"}, {"from": "gpt", "value": "there are two men standing at the milk stall"}]
|
00000004956.jpg
|
4956_evjvqa
|
[{"from": "human", "value": "what color is the pickup truck running in the street?\n<image>"}, {"from": "gpt", "value": "the pickup truck running in the street is gray"}]
|
00000004956.jpg
|
4956_evjvqa
|
[{"from": "human", "value": "how many black cars are parked on the roadside?\n<image>"}, {"from": "gpt", "value": "there is one black car parked on the roadside"}]
|
00000004956.jpg
|
4956_evjvqa
|
[{"from": "human", "value": "how many cars are behind the pickup truck?\n<image>"}, {"from": "gpt", "value": "there is one car behind the pickup truck"}]
|
00000004958.jpg
|
4958_evjvqa
|
[{"from": "human", "value": "what color is the staff uniform of the supermarket?\n<image>"}, {"from": "gpt", "value": "the uniform is blue"}]
|
00000004958.jpg
|
4958_evjvqa
|
[{"from": "human", "value": "what is the staff standing at the cabbage stall holding?\n<image>"}, {"from": "gpt", "value": "holding a cabbage"}]
|
00000004958.jpg
|
4958_evjvqa
|
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing at a vegetale stall of the supermarket"}]
|
00000004959.jpg
|
4959_evjvqa
|
[{"from": "human", "value": "how many ships are moving on the river?\n<image>"}, {"from": "gpt", "value": "there is one ship moving on the river"}]
|
00000004959.jpg
|
4959_evjvqa
|
[{"from": "human", "value": "how many towers are on the shore?\n<image>"}, {"from": "gpt", "value": "there are three towers on the shore"}]
|
00000004959.jpg
|
4959_evjvqa
|
[{"from": "human", "value": "what type is the ship on the river?\n<image>"}, {"from": "gpt", "value": "the ship is a cruise"}]
|
00000004961.jpg
|
4961_evjvqa
|
[{"from": "human", "value": "how many people are installing the sign for the store?\n<image>"}, {"from": "gpt", "value": "there is one person installing the sign"}]
|
00000004964.jpg
|
4964_evjvqa
|
[{"from": "human", "value": "how many people riding cyclo in the street?\n<image>"}, {"from": "gpt", "value": "there is one person riding cyclo in the street"}]
|
00000004964.jpg
|
4964_evjvqa
|
[{"from": "human", "value": "how many people are carrying pole in the street?\n<image>"}, {"from": "gpt", "value": "there are three people carrying pole in the street"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.