image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003284.jpg
|
3284_evjvqa
|
[{"from": "human", "value": "who is the lady in the brown shirt standing with?\n<image>"}, {"from": "gpt", "value": "with three beside women"}]
|
00000003284.jpg
|
3284_evjvqa
|
[{"from": "human", "value": "how many people are standing in front of the lantern shop?\n<image>"}, {"from": "gpt", "value": "there are four women standing in front of the lantern shop"}]
|
00000003286.jpg
|
3286_evjvqa
|
[{"from": "human", "value": "what is the woman in the orange shirt doing?\n<image>"}, {"from": "gpt", "value": "restructure milk shelves"}]
|
00000003286.jpg
|
3286_evjvqa
|
[{"from": "human", "value": "what is the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "standing and preparing goods"}]
|
00000003286.jpg
|
3286_evjvqa
|
[{"from": "human", "value": "where are the two boys in yellow shirts standing?\n<image>"}, {"from": "gpt", "value": "at a milk shelf in a supermarket"}]
|
00000003286.jpg
|
3286_evjvqa
|
[{"from": "human", "value": "at which stall are the two boys in yellow shirts standing?\n<image>"}, {"from": "gpt", "value": "standing at the dairy products shelves"}]
|
00000003286.jpg
|
3286_evjvqa
|
[{"from": "human", "value": "what is the boy in the yellow shirt planning to buy?\n<image>"}, {"from": "gpt", "value": "buy milk"}]
|
00000003291.jpg
|
3291_evjvqa
|
[{"from": "human", "value": "where are these people going sightseeing?\n<image>"}, {"from": "gpt", "value": "at a historic site"}]
|
00000003292.jpg
|
3292_evjvqa
|
[{"from": "human", "value": "what is the woman in the yellow shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting goods on the shelves"}]
|
00000003294.jpg
|
3294_evjvqa
|
[{"from": "human", "value": "where was this pagoda built?\n<image>"}, {"from": "gpt", "value": "on the mountain"}]
|
00000006000.jpg
|
6000_evjvqa
|
[{"from": "human", "value": "what items does this area sell?\n<image>"}, {"from": "gpt", "value": "this area sells tet decorations"}]
|
00000006000.jpg
|
6000_evjvqa
|
[{"from": "human", "value": "what color are the shirts that the two tourists walking on the street are wearing?\n<image>"}, {"from": "gpt", "value": "the two tourists are wearing white shirts"}]
|
00000006000.jpg
|
6000_evjvqa
|
[{"from": "human", "value": "what is the main color of tet decorations?\n<image>"}, {"from": "gpt", "value": "the main color of tet decorations is red"}]
|
00000006003.jpg
|
6003_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
|
00000006003.jpg
|
6003_evjvqa
|
[{"from": "human", "value": "what is the scene of visitors to the market?\n<image>"}, {"from": "gpt", "value": "the visitors crowded the market"}]
|
00000006005.jpg
|
6005_evjvqa
|
[{"from": "human", "value": "how many people are going on the stairs?\n<image>"}, {"from": "gpt", "value": "there is one person going on the stairs"}]
|
00000006005.jpg
|
6005_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the person going on the stairs is wearing?\n<image>"}, {"from": "gpt", "value": "the person going on the stairs is wearing a black ao dai"}]
|
00000006005.jpg
|
6005_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is the stairway leading up to a pagoda"}]
|
00000006006.jpg
|
6006_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a border marker between countries"}]
|
00000006011.jpg
|
6011_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "the man is drinking"}]
|
00000006011.jpg
|
6011_evjvqa
|
[{"from": "human", "value": "what color is the shirt of the man?\n<image>"}, {"from": "gpt", "value": "the shirt of the man is white"}]
|
00000006011.jpg
|
6011_evjvqa
|
[{"from": "human", "value": "which hand is the man holding the cup in?\n<image>"}, {"from": "gpt", "value": "the man is holding the cup in right hand"}]
|
00000006012.jpg
|
6012_evjvqa
|
[{"from": "human", "value": "what is the girl doing?\n<image>"}, {"from": "gpt", "value": "the girl is hugging the man"}]
|
00000006012.jpg
|
6012_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the man is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing a blue shirt"}]
|
00000006012.jpg
|
6012_evjvqa
|
[{"from": "human", "value": "where are the man and the girl standing?\n<image>"}, {"from": "gpt", "value": "the man and the girl are standing at an ancient town"}]
|
00000006014.jpg
|
6014_evjvqa
|
[{"from": "human", "value": "what means of transportation are moving in rows on the street?\n<image>"}, {"from": "gpt", "value": "the cyclos are moving in rows on the street"}]
|
00000006014.jpg
|
6014_evjvqa
|
[{"from": "human", "value": "what time of day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is late afternoon"}]
|
00000006014.jpg
|
6014_evjvqa
|
[{"from": "human", "value": "what color are the houses in the town painted?\n<image>"}, {"from": "gpt", "value": "the houses in the town are painted yellow"}]
|
00000006018.jpg
|
6018_evjvqa
|
[{"from": "human", "value": "which hand is the man in black coat reaching forward?\n<image>"}, {"from": "gpt", "value": "the man in black coat is reaching right hand forward"}]
|
00000006018.jpg
|
6018_evjvqa
|
[{"from": "human", "value": "what department is the woman in black shirt standing at?\n<image>"}, {"from": "gpt", "value": "the woman in black shirt is standing at the vegetable department"}]
|
00000006020.jpg
|
6020_evjvqa
|
[{"from": "human", "value": "what color is the uniform of the lion dance team?\n<image>"}, {"from": "gpt", "value": "the uniform of the lion dance team is yellow"}]
|
00000006020.jpg
|
6020_evjvqa
|
[{"from": "human", "value": "what color is the hair of the unicorn?\n<image>"}, {"from": "gpt", "value": "the hair of the unicorn is white"}]
|
00000006020.jpg
|
6020_evjvqa
|
[{"from": "human", "value": "what is the unicorn of the lion dance team doing?\n<image>"}, {"from": "gpt", "value": "the unicorn of the lion dance team is performing a dance between the poles"}]
|
00000006021.jpg
|
6021_evjvqa
|
[{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is raining in this area"}]
|
00000006021.jpg
|
6021_evjvqa
|
[{"from": "human", "value": "how many people are standing under the umbrella?\n<image>"}, {"from": "gpt", "value": "there are two people standing under the umbrella"}]
|
00000006022.jpg
|
6022_evjvqa
|
[{"from": "human", "value": "how many pine trees are decorated in front of the building?\n<image>"}, {"from": "gpt", "value": "there are two pine trees decorated in front of the building"}]
|
00000006022.jpg
|
6022_evjvqa
|
[{"from": "human", "value": "what occasion are the pine trees displayed for?\n<image>"}, {"from": "gpt", "value": "the pine trees are displayed for christmas"}]
|
00000006025.jpg
|
6025_evjvqa
|
[{"from": "human", "value": "what is the supermarket employee doing?\n<image>"}, {"from": "gpt", "value": "the supermarket employee is checking the items"}]
|
00000006025.jpg
|
6025_evjvqa
|
[{"from": "human", "value": "what color is the uniform of the supermarket staff?\n<image>"}, {"from": "gpt", "value": "the uniform of the supermarket staff is blue"}]
|
00000006025.jpg
|
6025_evjvqa
|
[{"from": "human", "value": "which hand is the supermarket employee holding the basket with?\n<image>"}, {"from": "gpt", "value": "the supermarket employee is holding the basket with the left hand"}]
|
00000006026.jpg
|
6026_evjvqa
|
[{"from": "human", "value": "which hand of the girl is the man holding?\n<image>"}, {"from": "gpt", "value": "the man is holding the left hand of the girl"}]
|
00000006026.jpg
|
6026_evjvqa
|
[{"from": "human", "value": "which hand is the girl holding the bodice of the ao dai with?\n<image>"}, {"from": "gpt", "value": "the girl is holding the bodice of the ao dai with right hand"}]
|
00000006026.jpg
|
6026_evjvqa
|
[{"from": "human", "value": "where are the man and the girl standing?\n<image>"}, {"from": "gpt", "value": "the man and the girl are standing on the stairs"}]
|
00000006028.jpg
|
6028_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is a river in a forest"}]
|
00000006028.jpg
|
6028_evjvqa
|
[{"from": "human", "value": "how many canoes are on the river?\n<image>"}, {"from": "gpt", "value": "there are three canoes on the river"}]
|
00000006028.jpg
|
6028_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the woman rowing the middle canoe is wearing?\n<image>"}, {"from": "gpt", "value": "the woman rowing the middle canoe is wearing a yellow shirt"}]
|
00000006031.jpg
|
6031_evjvqa
|
[{"from": "human", "value": "how many people are driving motorbikes in front of the market?\n<image>"}, {"from": "gpt", "value": "there is one person driving motorbike in front of the market"}]
|
00000006031.jpg
|
6031_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the person driving motorbike in front of the market is wearing?\n<image>"}, {"from": "gpt", "value": "the person driving motorbike in front of the market is wearing a white shirt"}]
|
00000006033.jpg
|
6033_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is a market of ethnic minorities"}]
|
00000006033.jpg
|
6033_evjvqa
|
[{"from": "human", "value": "where is the man carrying a camera standing?\n<image>"}, {"from": "gpt", "value": "the man carrying a camera is standing to the left of the woman in red shirt"}]
|
00000006033.jpg
|
6033_evjvqa
|
[{"from": "human", "value": "which side of the woman holding her wallet is the man in blue shirt sitting on?\n<image>"}, {"from": "gpt", "value": "the man in blue shirt is sitting on the right side of the woman holding her wallet"}]
|
00000006035.jpg
|
6035_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the girl is wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a black shirt"}]
|
00000006036.jpg
|
6036_evjvqa
|
[{"from": "human", "value": "what color is the car moving on the road?\n<image>"}, {"from": "gpt", "value": "the car is white"}]
|
00000006036.jpg
|
6036_evjvqa
|
[{"from": "human", "value": "what color is the coat that the man using the phone is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing an orange and gray coat"}]
|
00000006036.jpg
|
6036_evjvqa
|
[{"from": "human", "value": "how many people are sitting on the right side of the man in orange coat?\n<image>"}, {"from": "gpt", "value": "there are three people sitting on the right side of the man in orange coat"}]
|
00000006037.jpg
|
6037_evjvqa
|
[{"from": "human", "value": "what kind of plant are grown in pots?\n<image>"}, {"from": "gpt", "value": "kumquats are grown in pots"}]
|
00000006037.jpg
|
6037_evjvqa
|
[{"from": "human", "value": "what color is the coat that the manis wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing a black coat"}]
|
00000006037.jpg
|
6037_evjvqa
|
[{"from": "human", "value": "what color is the hat that the man is wearing?\n<image>"}, {"from": "gpt", "value": "the hat that the man is wearing is black"}]
|
00000006039.jpg
|
6039_evjvqa
|
[{"from": "human", "value": "how many motorbikes are propped up in front of the pharmacy?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes propped up in front of the pharmacy"}]
|
00000006041.jpg
|
6041_evjvqa
|
[{"from": "human", "value": "how many motorbikes are propped up in front of the tea shop?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes propped up in front of the tea shop"}]
|
00000006043.jpg
|
6043_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the girl is wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing yellow shirt"}]
|
00000006043.jpg
|
6043_evjvqa
|
[{"from": "human", "value": "how many people are going behind the girl in yellow shirt?\n<image>"}, {"from": "gpt", "value": "there are two people going behind the girl in yellow shirt"}]
|
00000006047.jpg
|
6047_evjvqa
|
[{"from": "human", "value": "what means of transportation is the group of children standing next to?\n<image>"}, {"from": "gpt", "value": "the group of children is standing next to a helicopter"}]
|
00000006047.jpg
|
6047_evjvqa
|
[{"from": "human", "value": "how many children stand on the roof of the helicopter?\n<image>"}, {"from": "gpt", "value": "there are three chilren standing on the roof of the helicopter"}]
|
00000006047.jpg
|
6047_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is a military equipment display area"}]
|
00000006049.jpg
|
6049_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "the man is drawing a light bulb"}]
|
00000006049.jpg
|
6049_evjvqa
|
[{"from": "human", "value": "which hand is the man using to hold the pen?\n<image>"}, {"from": "gpt", "value": "the man is using right hand to hold the pen"}]
|
00000006049.jpg
|
6049_evjvqa
|
[{"from": "human", "value": "what color is the ink of the pen that the man is holding?\n<image>"}, {"from": "gpt", "value": "the ink of the pen that the man is holding is black"}]
|
00000006054.jpg
|
6054_evjvqa
|
[{"from": "human", "value": "how many people are standing next to the wedding dress display stand?\n<image>"}, {"from": "gpt", "value": "there is one peson standing next to the wedding dress display stand"}]
|
00000006054.jpg
|
6054_evjvqa
|
[{"from": "human", "value": "what does the stand that the woman is standing next to display?\n<image>"}, {"from": "gpt", "value": "the woman is standing next to the stand displaying wedding dresses"}]
|
00000006054.jpg
|
6054_evjvqa
|
[{"from": "human", "value": "what shop is the woman standing in?\n<image>"}, {"from": "gpt", "value": "the woman is standing in the bridal shop"}]
|
00000006056.jpg
|
6056_evjvqa
|
[{"from": "human", "value": "what outfit is the woman wearing?\n<image>"}, {"from": "gpt", "value": "the woman is wearing ao dai"}]
|
00000006056.jpg
|
6056_evjvqa
|
[{"from": "human", "value": "what color is the ao dai that the woman is wearing?\n<image>"}, {"from": "gpt", "value": "the ao dai is blue"}]
|
00000006056.jpg
|
6056_evjvqa
|
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "the woman is holding out her hand to pamper the flower"}]
|
00000006062.jpg
|
6062_evjvqa
|
[{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "the girl is standing in a supermarket"}]
|
00000006062.jpg
|
6062_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the man on the right side of the girl is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing a black shirt"}]
|
00000006062.jpg
|
6062_evjvqa
|
[{"from": "human", "value": "which side of the man in black shirt is the girl standing on?\n<image>"}, {"from": "gpt", "value": "the girl is standing on the left side of the man in black shirt"}]
|
00000006064.jpg
|
6064_evjvqa
|
[{"from": "human", "value": "what does the shop that everyone is standing at sell?\n<image>"}, {"from": "gpt", "value": "everyone is standing at a shop selling christmas decorations"}]
|
00000006064.jpg
|
6064_evjvqa
|
[{"from": "human", "value": "what occasion do people buy goods?\n<image>"}, {"from": "gpt", "value": "people buy goods on christmas"}]
|
00000006064.jpg
|
6064_evjvqa
|
[{"from": "human", "value": "how many people in red shirts are shopping?\n<image>"}, {"from": "gpt", "value": "there is one person in red shirt shopping"}]
|
00000006066.jpg
|
6066_evjvqa
|
[{"from": "human", "value": "how many people are standing near the vegetable counter?\n<image>"}, {"from": "gpt", "value": "there are two people standing near the vegetable counter"}]
|
00000006067.jpg
|
6067_evjvqa
|
[{"from": "human", "value": "what time of day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is noon"}]
|
00000006071.jpg
|
6071_evjvqa
|
[{"from": "human", "value": "how many bridges cross the river?\n<image>"}, {"from": "gpt", "value": "two bridges cross the river"}]
|
00000006071.jpg
|
6071_evjvqa
|
[{"from": "human", "value": "what color is the flag hanging above the blockhouse?\n<image>"}, {"from": "gpt", "value": "the flag hanging above the blockhouse is red"}]
|
00000006071.jpg
|
6071_evjvqa
|
[{"from": "human", "value": "what kind of house is the house next to the blockhouse?\n<image>"}, {"from": "gpt", "value": "the house next to the blockhouse is a stilt house"}]
|
00000006072.jpg
|
6072_evjvqa
|
[{"from": "human", "value": "what is the man wearing suit doing?\n<image>"}, {"from": "gpt", "value": "the man is using a laptop"}]
|
00000006072.jpg
|
6072_evjvqa
|
[{"from": "human", "value": "what is on the left side of the man wearing suit?\n<image>"}, {"from": "gpt", "value": "on the left side of the man wearing suit is a camera"}]
|
00000006072.jpg
|
6072_evjvqa
|
[{"from": "human", "value": "what color is the coat that the man walking next to the camera is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing a blue coat"}]
|
00000006082.jpg
|
6082_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is a market"}]
|
00000006082.jpg
|
6082_evjvqa
|
[{"from": "human", "value": "what stall is on the right side of the woman wearing black shirt?\n<image>"}, {"from": "gpt", "value": "on the right side of the woman wearing black shirt is the pineapple and mango stall"}]
|
00000006082.jpg
|
6082_evjvqa
|
[{"from": "human", "value": "which hand is the woman in black shirt holding the plastic bag in?\n<image>"}, {"from": "gpt", "value": "the woman in black shirt is holding the plastic bag in right hand"}]
|
00000006085.jpg
|
6085_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the shoe stall owner is wearing?\n<image>"}, {"from": "gpt", "value": "the shoe stall owner is wearing a black shirt"}]
|
00000006085.jpg
|
6085_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the girl standing in front of the shoe stall is wearing?\n<image>"}, {"from": "gpt", "value": "the girl standing in front of the shoe stall is wearing a white shirt"}]
|
00000006085.jpg
|
6085_evjvqa
|
[{"from": "human", "value": "what stall is to the left of the girl in white shirt?\n<image>"}, {"from": "gpt", "value": "to the left of the girl in white shirt is the clothing stall"}]
|
00000006087.jpg
|
6087_evjvqa
|
[{"from": "human", "value": "how many people are standing in front of the house?\n<image>"}, {"from": "gpt", "value": "there are two people standing in front of the house"}]
|
00000006087.jpg
|
6087_evjvqa
|
[{"from": "human", "value": "where is the car parked?\n<image>"}, {"from": "gpt", "value": "the car is parked before the house"}]
|
00000006087.jpg
|
6087_evjvqa
|
[{"from": "human", "value": "how many cars are parked before the house?\n<image>"}, {"from": "gpt", "value": "there is one car parked before the house"}]
|
00000006088.jpg
|
6088_evjvqa
|
[{"from": "human", "value": "how many people are standing in front of the children's toy store next to the children's clothing store?\n<image>"}, {"from": "gpt", "value": "there are two people standing in front of the children's toy store"}]
|
00000006089.jpg
|
6089_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is a harbour"}]
|
00000006089.jpg
|
6089_evjvqa
|
[{"from": "human", "value": "how many cranes are there in the harbour?\n<image>"}, {"from": "gpt", "value": "there are two cranes in the harbour"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.