image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000004812.jpg
4812_evjvqa
[{"from": "human", "value": "how many canoes are moving on the river?\n<image>"}, {"from": "gpt", "value": "there is one canoe moving on the river"}]
00000004813.jpg
4813_evjvqa
[{"from": "human", "value": "how many man-shaped statues are there?\n<image>"}, {"from": "gpt", "value": "there are two man-shaped statues"}]
00000004813.jpg
4813_evjvqa
[{"from": "human", "value": "how many people are holding umbrella?\n<image>"}, {"from": "gpt", "value": "there are two people holding umbrella"}]
00000004813.jpg
4813_evjvqa
[{"from": "human", "value": "how many colors does the bridge leading to the statues have?\n<image>"}, {"from": "gpt", "value": "the brigde has two colors"}]
00000004815.jpg
4815_evjvqa
[{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "shopping in a supermarket"}]
00000004815.jpg
4815_evjvqa
[{"from": "human", "value": "what side of the little boy is the man standing?\n<image>"}, {"from": "gpt", "value": "standing behind the little boy"}]
00000004815.jpg
4815_evjvqa
[{"from": "human", "value": "who is the woman walking beside?\n<image>"}, {"from": "gpt", "value": "walking beside a little girl"}]
00000004816.jpg
4816_evjvqa
[{"from": "human", "value": "what is the boy doing?\n<image>"}, {"from": "gpt", "value": "eating a dish"}]
00000004816.jpg
4816_evjvqa
[{"from": "human", "value": "which hand does this boy used to hold the chopsticks?\n<image>"}, {"from": "gpt", "value": "holding chopsticks by left hand"}]
00000004820.jpg
4820_evjvqa
[{"from": "human", "value": "what color jacket is the woman wearing?\n<image>"}, {"from": "gpt", "value": "wearing a black jacket"}]
00000004820.jpg
4820_evjvqa
[{"from": "human", "value": "how many packs of noodle are the woman holding?\n<image>"}, {"from": "gpt", "value": "holding two packs of noodle"}]
00000004820.jpg
4820_evjvqa
[{"from": "human", "value": "what stall is the woman standing at?\n<image>"}, {"from": "gpt", "value": "standing at dry food stall"}]
00000004821.jpg
4821_evjvqa
[{"from": "human", "value": "what is the lady wearing scarf holding?\n<image>"}, {"from": "gpt", "value": "holding a bunch of vegetables"}]
00000004821.jpg
4821_evjvqa
[{"from": "human", "value": "what color jacket is the girl in a scarf wearing?\n<image>"}, {"from": "gpt", "value": "wearing black jacket"}]
00000004821.jpg
4821_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing in a supermarket"}]
00000004822.jpg
4822_evjvqa
[{"from": "human", "value": "what place is this?\n<image>"}, {"from": "gpt", "value": "this is a monument"}]
00000004822.jpg
4822_evjvqa
[{"from": "human", "value": "what action are the figures in the monument doing?\n<image>"}, {"from": "gpt", "value": "pulling a cannon"}]
00000004822.jpg
4822_evjvqa
[{"from": "human", "value": "how many man-shaped statues are in front of the cannon?\n<image>"}, {"from": "gpt", "value": "there are two man-shaped statues"}]
00000004827.jpg
4827_evjvqa
[{"from": "human", "value": "where are the two women?\n<image>"}, {"from": "gpt", "value": "at a floating market"}]
00000004827.jpg
4827_evjvqa
[{"from": "human", "value": "how many women are sitting on a boat?\n<image>"}, {"from": "gpt", "value": "there is one woman sitting on a boat"}]
00000004827.jpg
4827_evjvqa
[{"from": "human", "value": "what color shirt is the woman holding a bunch of fruit wearing?\n<image>"}, {"from": "gpt", "value": "wearing a red shirt"}]
00000004830.jpg
4830_evjvqa
[{"from": "human", "value": "where is this place?\n<image>"}, {"from": "gpt", "value": "this is a morning market"}]
00000004830.jpg
4830_evjvqa
[{"from": "human", "value": "how is the market?\n<image>"}, {"from": "gpt", "value": "the market is crowded"}]
00000004830.jpg
4830_evjvqa
[{"from": "human", "value": "what color helmet is the person holding red bucket wearing?\n<image>"}, {"from": "gpt", "value": "wearing a yellow helmet"}]
00000004831.jpg
4831_evjvqa
[{"from": "human", "value": "what color shirt is the man wearing?\n<image>"}, {"from": "gpt", "value": "wearing a white shirt"}]
00000004834.jpg
4834_evjvqa
[{"from": "human", "value": "how many cars are in front of the market?\n<image>"}, {"from": "gpt", "value": "there is one car in front of the market"}]
00000004834.jpg
4834_evjvqa
[{"from": "human", "value": "which side of the car is the truck parking?\n<image>"}, {"from": "gpt", "value": "the truck is parking in front of the car"}]
00000004835.jpg
4835_evjvqa
[{"from": "human", "value": "what is on the left of the man walking alongside his bicycle?\n<image>"}, {"from": "gpt", "value": "on the left is the buoys"}]
00000004835.jpg
4835_evjvqa
[{"from": "human", "value": "what color is the car parked on the left of the street?\n<image>"}, {"from": "gpt", "value": "white"}]
00000004837.jpg
4837_evjvqa
[{"from": "human", "value": "how many bicycles are parked by the street?\n<image>"}, {"from": "gpt", "value": "there is one bicycle is parked by the street"}]
00000004837.jpg
4837_evjvqa
[{"from": "human", "value": "how many motorcycles are on the left of the girl?\n<image>"}, {"from": "gpt", "value": "there are two motorcycles on the left of the girl"}]
00000004837.jpg
4837_evjvqa
[{"from": "human", "value": "which store is the girl standing in the front?\n<image>"}, {"from": "gpt", "value": "the girl is standing in front of a souvenir store"}]
00000004838.jpg
4838_evjvqa
[{"from": "human", "value": "what is around everyone?\n<image>"}, {"from": "gpt", "value": "around are christmas decoration items"}]
00000004838.jpg
4838_evjvqa
[{"from": "human", "value": "what color coat is the girl with backpack wearing?\n<image>"}, {"from": "gpt", "value": "wearing gray coat"}]
00000004838.jpg
4838_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing in a decoration store"}]
00000004842.jpg
4842_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing in an art museum"}]
00000004842.jpg
4842_evjvqa
[{"from": "human", "value": "what are displayed on the wall?\n<image>"}, {"from": "gpt", "value": "on the wall are artworks displayed"}]
00000004842.jpg
4842_evjvqa
[{"from": "human", "value": "how many people are enjoying the pictures?\n<image>"}, {"from": "gpt", "value": "there are three people enjoying the pictures"}]
00000004843.jpg
4843_evjvqa
[{"from": "human", "value": "where is everyone standing?\n<image>"}, {"from": "gpt", "value": "standing in a museum"}]
00000004843.jpg
4843_evjvqa
[{"from": "human", "value": "what is displayed in front of the group of people?\n<image>"}, {"from": "gpt", "value": "in front of the group of people are ancient pots"}]
00000004843.jpg
4843_evjvqa
[{"from": "human", "value": "what is the woman in yellow dress doing?\n<image>"}, {"from": "gpt", "value": "presenting the artifacts to everyone"}]
00000004844.jpg
4844_evjvqa
[{"from": "human", "value": "what is drawn in the picture?\n<image>"}, {"from": "gpt", "value": "a street is drawn in the picture"}]
00000004844.jpg
4844_evjvqa
[{"from": "human", "value": "how many people are riding cyclo in the picture?\n<image>"}, {"from": "gpt", "value": "there is one person riding cyclo in the picture"}]
00000004844.jpg
4844_evjvqa
[{"from": "human", "value": "where is the person carrying pole going?\n<image>"}, {"from": "gpt", "value": "going in the street"}]
00000004845.jpg
4845_evjvqa
[{"from": "human", "value": "what color shirt is the boy picking up food wearing?\n<image>"}, {"from": "gpt", "value": "wearing a purple shirt"}]
00000004845.jpg
4845_evjvqa
[{"from": "human", "value": "what color shirt is the korean boy wearing?\n<image>"}, {"from": "gpt", "value": "wearing white shirt"}]
00000004848.jpg
4848_evjvqa
[{"from": "human", "value": "where is the man wearing white hat sitting?\n<image>"}, {"from": "gpt", "value": "sitting on a cyclo"}]
00000004848.jpg
4848_evjvqa
[{"from": "human", "value": "how many traffic signs are on the roadside?\n<image>"}, {"from": "gpt", "value": "there are four traffic signs on the roadside"}]
00000004848.jpg
4848_evjvqa
[{"from": "human", "value": "what main color are the houses in the town?\n<image>"}, {"from": "gpt", "value": "the houses are yellow"}]
00000004849.jpg
4849_evjvqa
[{"from": "human", "value": "which hand is the girl using to hold the hat?\n<image>"}, {"from": "gpt", "value": "using the right hand to hold the hat"}]
00000004849.jpg
4849_evjvqa
[{"from": "human", "value": "what color is the shirt of the girl?\n<image>"}, {"from": "gpt", "value": "the shirt is yellow"}]
00000004849.jpg
4849_evjvqa
[{"from": "human", "value": "where is the girl?\n<image>"}, {"from": "gpt", "value": "at a floating market on a river"}]
00000004850.jpg
4850_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is a museum"}]
00000004850.jpg
4850_evjvqa
[{"from": "human", "value": "what is around the uncle ho statue?\n<image>"}, {"from": "gpt", "value": "the statues of people surrounding uncle ho"}]
00000004850.jpg
4850_evjvqa
[{"from": "human", "value": "where are the statues located?\n<image>"}, {"from": "gpt", "value": "located in the hall of the museum"}]
00000004851.jpg
4851_evjvqa
[{"from": "human", "value": "what product is on this stall?\n<image>"}, {"from": "gpt", "value": "this stall has some types of fruit"}]
00000004851.jpg
4851_evjvqa
[{"from": "human", "value": "what color is the lantern?\n<image>"}, {"from": "gpt", "value": "the lantern is red"}]
00000004851.jpg
4851_evjvqa
[{"from": "human", "value": "how many people are standing at the meat stall?\n<image>"}, {"from": "gpt", "value": "three people are standing at the meat stall"}]
00000004852.jpg
4852_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is a temple"}]
00000004852.jpg
4852_evjvqa
[{"from": "human", "value": "how many people are standing in the temple?\n<image>"}, {"from": "gpt", "value": "there is one person standing in the temple"}]
00000004852.jpg
4852_evjvqa
[{"from": "human", "value": "what is at the center of the temple?\n<image>"}, {"from": "gpt", "value": "placed at the center is an altar"}]
00000004853.jpg
4853_evjvqa
[{"from": "human", "value": "how many people are standing in front of the eating house?\n<image>"}, {"from": "gpt", "value": "there is one person standing in front of the eating house"}]
00000004854.jpg
4854_evjvqa
[{"from": "human", "value": "what animal is moving in the yard?\n<image>"}, {"from": "gpt", "value": "the chickens are moving in the yard"}]
00000004854.jpg
4854_evjvqa
[{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "standing beside the entry of the house"}]
00000004855.jpg
4855_evjvqa
[{"from": "human", "value": "what is the boy in white shirt holding in his hand?\n<image>"}, {"from": "gpt", "value": "holding a camera in his hand"}]
00000004855.jpg
4855_evjvqa
[{"from": "human", "value": "where is the girl in white shirt sitting compared to the boy holding camera?\n<image>"}, {"from": "gpt", "value": "the girl is sitting face to face with the boy"}]
00000004855.jpg
4855_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a chinese food court"}]
00000004857.jpg
4857_evjvqa
[{"from": "human", "value": "how many women are standing in front of the vegetable stall?\n<image>"}, {"from": "gpt", "value": "there are two women standing in front of the vegetable stall"}]
00000004857.jpg
4857_evjvqa
[{"from": "human", "value": "what stall are the two women standing at?\n<image>"}, {"from": "gpt", "value": "standing at the vegetable stall"}]
00000004859.jpg
4859_evjvqa
[{"from": "human", "value": "which hand is this man using to hold the mic?\n<image>"}, {"from": "gpt", "value": "using left hand to hold the mic"}]
00000004859.jpg
4859_evjvqa
[{"from": "human", "value": "what suit is this man wearing?\n<image>"}, {"from": "gpt", "value": "wearing a military uniform"}]
00000004861.jpg
4861_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000004861.jpg
4861_evjvqa
[{"from": "human", "value": "how is the market?\n<image>"}, {"from": "gpt", "value": "the market is very crowded"}]
00000004862.jpg
4862_evjvqa
[{"from": "human", "value": "how many unicorns are on the stage?\n<image>"}, {"from": "gpt", "value": "there are four unicorns on the stage"}]
00000004862.jpg
4862_evjvqa
[{"from": "human", "value": "how many men are cutting the open ceremony ribbon?\n<image>"}, {"from": "gpt", "value": "two men are cutting the open ceremony ribbon"}]
00000004862.jpg
4862_evjvqa
[{"from": "human", "value": "what color ao dai are the girls holding the ribbons wearing?\n<image>"}, {"from": "gpt", "value": "wearing red ao dai"}]
00000004863.jpg
4863_evjvqa
[{"from": "human", "value": "how many cars are running out of the supermarket?\n<image>"}, {"from": "gpt", "value": "there is one car running out of the supermarket"}]
00000004865.jpg
4865_evjvqa
[{"from": "human", "value": "what store is next to the bakery?\n<image>"}, {"from": "gpt", "value": "next to it is a non la store"}]
00000004865.jpg
4865_evjvqa
[{"from": "human", "value": "how many people are walking in front of the bakery?\n<image>"}, {"from": "gpt", "value": "there are two people walking in front of the bakery"}]
00000004867.jpg
4867_evjvqa
[{"from": "human", "value": "how many flags are in the crowd?\n<image>"}, {"from": "gpt", "value": "there is one flag in the crowd"}]
00000004867.jpg
4867_evjvqa
[{"from": "human", "value": "what icon is on the red shirts?\n<image>"}, {"from": "gpt", "value": "a five-pointed star icon is on the red shirts"}]
00000004867.jpg
4867_evjvqa
[{"from": "human", "value": "by whom is the flag held?\n<image>"}, {"from": "gpt", "value": "the flag is held by a woman standing in the back row"}]
00000004868.jpg
4868_evjvqa
[{"from": "human", "value": "which side of the cycler in gray shirt is the cycler in red shirt cycling?\n<image>"}, {"from": "gpt", "value": "cycling behind the cycler in gray shirt"}]
00000004868.jpg
4868_evjvqa
[{"from": "human", "value": "how many people are cycling?\n<image>"}, {"from": "gpt", "value": "there are two people cycling"}]
00000004868.jpg
4868_evjvqa
[{"from": "human", "value": "what color shirt is the cycler behind the cycler in gray shirt wearing?\n<image>"}, {"from": "gpt", "value": "wearing red shirt"}]
00000004871.jpg
4871_evjvqa
[{"from": "human", "value": "what time of the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000004871.jpg
4871_evjvqa
[{"from": "human", "value": "how many bikes are parked in front of the café?\n<image>"}, {"from": "gpt", "value": "there is one bike parked in front of the café"}]
00000004872.jpg
4872_evjvqa
[{"from": "human", "value": "what color shirt is the man leaning against the pillar wearing?\n<image>"}, {"from": "gpt", "value": "wearing blue shirt"}]
00000004872.jpg
4872_evjvqa
[{"from": "human", "value": "what is behind the crowd?\n<image>"}, {"from": "gpt", "value": "behind is a house"}]
00000004872.jpg
4872_evjvqa
[{"from": "human", "value": "what is the house in the back made of?\n<image>"}, {"from": "gpt", "value": "the house in the back is made of leaf and wood"}]
00000004874.jpg
4874_evjvqa
[{"from": "human", "value": "what store is this?\n<image>"}, {"from": "gpt", "value": "this is a clothing store"}]
00000004874.jpg
4874_evjvqa
[{"from": "human", "value": "how many poles of shirts are on promotion?\n<image>"}, {"from": "gpt", "value": "there are two poles of shirts on promotion"}]
00000004876.jpg
4876_evjvqa
[{"from": "human", "value": "what have just happened in this area?\n<image>"}, {"from": "gpt", "value": "this area has just had an accident caused by a coach"}]
00000004876.jpg
4876_evjvqa
[{"from": "human", "value": "how many police officers are standing in the accident area?\n<image>"}, {"from": "gpt", "value": "there is one police officer standing in the accident area"}]
00000004877.jpg
4877_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000004877.jpg
4877_evjvqa
[{"from": "human", "value": "what color is the car?\n<image>"}, {"from": "gpt", "value": "the car is white"}]
00000004877.jpg
4877_evjvqa
[{"from": "human", "value": "how many people are cycling on the left of the car?\n<image>"}, {"from": "gpt", "value": "there is one person cycling on the left of the car"}]
00000004879.jpg
4879_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is a beach"}]
00000004879.jpg
4879_evjvqa
[{"from": "human", "value": "where is the water park located?\n<image>"}, {"from": "gpt", "value": "the water park is located on the shore of the beach"}]
00000004879.jpg
4879_evjvqa
[{"from": "human", "value": "how is the beach?\n<image>"}, {"from": "gpt", "value": "the beach is very crowded"}]