image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000006396.jpg
6396_evjvqa
[{"from": "human", "value": "what color are the flip-flops that the man feeding fish is wearing?\n<image>"}, {"from": "gpt", "value": "the man feeding fish is wearing a pair of black flip-flops."}]
00000004702.jpg
4702_evjvqa
[{"from": "human", "value": "how many pillows are placed on the sofa?\n<image>"}, {"from": "gpt", "value": "there are three pillows placed on the sofa"}]
00000004702.jpg
4702_evjvqa
[{"from": "human", "value": "where is the stack of newspapers placed?\n<image>"}, {"from": "gpt", "value": "a stack of newspapers is placed under the table"}]
00000004702.jpg
4702_evjvqa
[{"from": "human", "value": "how many cups are on the table?\n<image>"}, {"from": "gpt", "value": "there is a glass on the table"}]
00000004704.jpg
4704_evjvqa
[{"from": "human", "value": "what area are people in?\n<image>"}, {"from": "gpt", "value": "people are in a market"}]
00000004704.jpg
4704_evjvqa
[{"from": "human", "value": "how many girls are there standing to the left of the guy?\n<image>"}, {"from": "gpt", "value": "there are two girls standing to the left of the guy"}]
00000004704.jpg
4704_evjvqa
[{"from": "human", "value": "what color shirt is the girl behind the guy wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a yellow t-shirt"}]
00000004706.jpg
4706_evjvqa
[{"from": "human", "value": "what is above the potted plant?\n<image>"}, {"from": "gpt", "value": "above the potted plant is a mirror"}]
00000004706.jpg
4706_evjvqa
[{"from": "human", "value": "where is the buddha face statue placed?\n<image>"}, {"from": "gpt", "value": "placed on a shelf"}]
00000004706.jpg
4706_evjvqa
[{"from": "human", "value": "where is the big potted plant placed?\n<image>"}, {"from": "gpt", "value": "the big potted plant is placed in front of the door"}]
00000004708.jpg
4708_evjvqa
[{"from": "human", "value": "which arm does the girl put on her chin?\n<image>"}, {"from": "gpt", "value": "the girl is putting her left hand on her chin"}]
00000004708.jpg
4708_evjvqa
[{"from": "human", "value": "what is the girl holding in her left hand?\n<image>"}, {"from": "gpt", "value": "the girl is holding a coat"}]
00000004708.jpg
4708_evjvqa
[{"from": "human", "value": "how many pine trees are there behind the girl?\n<image>"}, {"from": "gpt", "value": "there are three pine trees behind the girl"}]
00000004711.jpg
4711_evjvqa
[{"from": "human", "value": "how many people are standing in front of the notice board?\n<image>"}, {"from": "gpt", "value": "there are four people who are standing in front of the notice board"}]
00000004713.jpg
4713_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000004713.jpg
4713_evjvqa
[{"from": "human", "value": "what is the butcher doing?\n<image>"}, {"from": "gpt", "value": "slicing meat for a customer"}]
00000004713.jpg
4713_evjvqa
[{"from": "human", "value": "how many people are buying meat at the meat stall?\n<image>"}, {"from": "gpt", "value": "there is a person buying meat"}]
00000004714.jpg
4714_evjvqa
[{"from": "human", "value": "what color shirt is the man wearing by the road?\n<image>"}, {"from": "gpt", "value": "wearing a blue shirt"}]
00000004714.jpg
4714_evjvqa
[{"from": "human", "value": "what is the woman pulling on the street?\n<image>"}, {"from": "gpt", "value": "pulling a rickshaw"}]
00000004714.jpg
4714_evjvqa
[{"from": "human", "value": "what is in front of the man in blue?\n<image>"}, {"from": "gpt", "value": "in front of him is a garbage truck"}]
00000004722.jpg
4722_evjvqa
[{"from": "human", "value": "what is the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "selling to customers"}]
00000004722.jpg
4722_evjvqa
[{"from": "human", "value": "who is the woman in blue selling to?\n<image>"}, {"from": "gpt", "value": "selling to tourists"}]
00000004722.jpg
4722_evjvqa
[{"from": "human", "value": "what is the woman in blue selling?\n<image>"}, {"from": "gpt", "value": "selling cassava tubers"}]
00000004723.jpg
4723_evjvqa
[{"from": "human", "value": "what area are people in?\n<image>"}, {"from": "gpt", "value": "are in a supermarket"}]
00000004725.jpg
4725_evjvqa
[{"from": "human", "value": "how many people are sitting in front of the café?\n<image>"}, {"from": "gpt", "value": "there are two people sitting in front of the café"}]
00000004725.jpg
4725_evjvqa
[{"from": "human", "value": "how many televisions are there in the café?\n<image>"}, {"from": "gpt", "value": "there is a tv in the café"}]
00000004725.jpg
4725_evjvqa
[{"from": "human", "value": "where is the coffee shop clock located?\n<image>"}, {"from": "gpt", "value": "the clock is hanging on the door inside the cafe"}]
00000004727.jpg
4727_evjvqa
[{"from": "human", "value": "what outfit is the guy wearing?\n<image>"}, {"from": "gpt", "value": "wearing ao dai"}]
00000004727.jpg
4727_evjvqa
[{"from": "human", "value": "where is the young man standing?\n<image>"}, {"from": "gpt", "value": "standing in front of hue citadel"}]
00000004727.jpg
4727_evjvqa
[{"from": "human", "value": "which hand does the guy put in front of his chest?\n<image>"}, {"from": "gpt", "value": "put the left hand in front of his chest"}]
00000004728.jpg
4728_evjvqa
[{"from": "human", "value": "how many customers is the store serving?\n<image>"}, {"from": "gpt", "value": "the store is serving three customers"}]
00000004728.jpg
4728_evjvqa
[{"from": "human", "value": "with which hand does the stall owner give the item to the customer?\n<image>"}, {"from": "gpt", "value": "give with his right hand"}]
00000004728.jpg
4728_evjvqa
[{"from": "human", "value": "who is receiving the item from the stall owner?\n<image>"}, {"from": "gpt", "value": "the man in the middle is receiving the item"}]
00000004733.jpg
4733_evjvqa
[{"from": "human", "value": "where are the kids standing?\n<image>"}, {"from": "gpt", "value": "standing in a field"}]
00000004733.jpg
4733_evjvqa
[{"from": "human", "value": "how many cows are eating grass?\n<image>"}, {"from": "gpt", "value": "there is a cow eating grass"}]
00000004733.jpg
4733_evjvqa
[{"from": "human", "value": "what are the children doing in the field?\n<image>"}, {"from": "gpt", "value": "the children are playing in the field"}]
00000004735.jpg
4735_evjvqa
[{"from": "human", "value": "how many fruits are there on the tree that are not wrapped in plastic bags?\n<image>"}, {"from": "gpt", "value": "there is one fruit that is not in a plastic bag"}]
00000004736.jpg
4736_evjvqa
[{"from": "human", "value": "what color is the company uniform?\n<image>"}, {"from": "gpt", "value": "the company uniform is white"}]
00000004736.jpg
4736_evjvqa
[{"from": "human", "value": "what are people in the company doing?\n<image>"}, {"from": "gpt", "value": "people are dealing with the company's products"}]
00000004736.jpg
4736_evjvqa
[{"from": "human", "value": "how many people are behind the group standing next to the table?\n<image>"}, {"from": "gpt", "value": "there are three people behind the group"}]
00000004738.jpg
4738_evjvqa
[{"from": "human", "value": "what color is the shirt of the customer standing at the vegetable stall?\n<image>"}, {"from": "gpt", "value": "wearing a white shirt"}]
00000004739.jpg
4739_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "everyone is in a diner"}]
00000004739.jpg
4739_evjvqa
[{"from": "human", "value": "who is holding the orange baby?\n<image>"}, {"from": "gpt", "value": "the baby is being held by a man"}]
00000004739.jpg
4739_evjvqa
[{"from": "human", "value": "how many menus does the store have next to the order counter?\n<image>"}, {"from": "gpt", "value": "the store has three menus next to the ordering counter"}]
00000004744.jpg
4744_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000004744.jpg
4744_evjvqa
[{"from": "human", "value": "how many women are standing by the stand?\n<image>"}, {"from": "gpt", "value": "there is one woman standing by the stand"}]
00000004744.jpg
4744_evjvqa
[{"from": "human", "value": "what color is the shirt of the man holding the goods?\n<image>"}, {"from": "gpt", "value": "wearing a blue shirt"}]
00000004746.jpg
4746_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a roundabout"}]
00000004746.jpg
4746_evjvqa
[{"from": "human", "value": "what is in the middle of the roundabout?\n<image>"}, {"from": "gpt", "value": "there is a small pool in the middle of the roundabout"}]
00000004746.jpg
4746_evjvqa
[{"from": "human", "value": "what color is the bus parked by the roadside?\n<image>"}, {"from": "gpt", "value": "the bus is blue"}]
00000004748.jpg
4748_evjvqa
[{"from": "human", "value": "what color is the staff uniform of the restaurant?\n<image>"}, {"from": "gpt", "value": "the uniform is blue"}]
00000004748.jpg
4748_evjvqa
[{"from": "human", "value": "what is the staff doing at the restaurant?\n<image>"}, {"from": "gpt", "value": "cleaning the glass windows"}]
00000004748.jpg
4748_evjvqa
[{"from": "human", "value": "how many light bulbs are there inside the shop?\n<image>"}, {"from": "gpt", "value": "there are three light bulbs hanging"}]
00000004754.jpg
4754_evjvqa
[{"from": "human", "value": "what is this zone?\n<image>"}, {"from": "gpt", "value": "this is a blockhouse"}]
00000004754.jpg
4754_evjvqa
[{"from": "human", "value": "how many flags are in this zone?\n<image>"}, {"from": "gpt", "value": "there is a flag in this zone"}]
00000004754.jpg
4754_evjvqa
[{"from": "human", "value": "what color is the flag?\n<image>"}, {"from": "gpt", "value": "the flag is red"}]
00000004755.jpg
4755_evjvqa
[{"from": "human", "value": "how many cars are driving in front of the shop?\n<image>"}, {"from": "gpt", "value": "there is a car driving in front of the shop"}]
00000004756.jpg
4756_evjvqa
[{"from": "human", "value": "how many pitches are there in front of the citadel gate?\n<image>"}, {"from": "gpt", "value": "there are two pitches in front of the citadel gate"}]
00000004756.jpg
4756_evjvqa
[{"from": "human", "value": "how many people are entering the citadel?\n<image>"}, {"from": "gpt", "value": "there is a person entering the citadel"}]
00000004759.jpg
4759_evjvqa
[{"from": "human", "value": "what is this store selling?\n<image>"}, {"from": "gpt", "value": "this is a souvenir shop"}]
00000004759.jpg
4759_evjvqa
[{"from": "human", "value": "what is under the two lanterns?\n<image>"}, {"from": "gpt", "value": "the underside has woven baskets and bowls"}]
00000004759.jpg
4759_evjvqa
[{"from": "human", "value": "where are the paintings placed?\n<image>"}, {"from": "gpt", "value": "the paintings are hung on the wall"}]
00000004760.jpg
4760_evjvqa
[{"from": "human", "value": "what is the supermarket employee doing?\n<image>"}, {"from": "gpt", "value": "the employee is compiling an inventory of the goods"}]
00000004760.jpg
4760_evjvqa
[{"from": "human", "value": "what product is in front of the staff member?\n<image>"}, {"from": "gpt", "value": "in front of the staff member is instant noodles"}]
00000004760.jpg
4760_evjvqa
[{"from": "human", "value": "what color is the supermarket uniform?\n<image>"}, {"from": "gpt", "value": "the supermarket uniform is red"}]
00000004761.jpg
4761_evjvqa
[{"from": "human", "value": "what is the girl holding in her hand?\n<image>"}, {"from": "gpt", "value": "the girl is holding a mobile phone in her hand"}]
00000004761.jpg
4761_evjvqa
[{"from": "human", "value": "what color is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a blue t-shirt"}]
00000004761.jpg
4761_evjvqa
[{"from": "human", "value": "which side of the girl is the boy standing on?\n<image>"}, {"from": "gpt", "value": "standing on the left-hand side of the girl"}]
00000004766.jpg
4766_evjvqa
[{"from": "human", "value": "how many umbrellas are there in front of the shop?\n<image>"}, {"from": "gpt", "value": "there are two umbrellas in front of the shop"}]
00000004766.jpg
4766_evjvqa
[{"from": "human", "value": "where is the customer sitting in front of the door?\n<image>"}, {"from": "gpt", "value": "sitting under an umbrella on the balcony"}]
00000004769.jpg
4769_evjvqa
[{"from": "human", "value": "where is the vending machine located?\n<image>"}, {"from": "gpt", "value": "the vending machine is located in front of the shop"}]
00000004769.jpg
4769_evjvqa
[{"from": "human", "value": "where are the cartons located?\n<image>"}, {"from": "gpt", "value": "the cartons are placed in front of the shop"}]
00000004771.jpg
4771_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a crowded neighborhood"}]
00000004771.jpg
4771_evjvqa
[{"from": "human", "value": "what is the color of the lantern hanging in front of the house?\n<image>"}, {"from": "gpt", "value": "the lantern is red"}]
00000004771.jpg
4771_evjvqa
[{"from": "human", "value": "on which side is the walking girl carrying her bag on the street?\n<image>"}, {"from": "gpt", "value": "the girl is carrying the bag on the right"}]
00000004772.jpg
4772_evjvqa
[{"from": "human", "value": "how many chairs are placed next to the table?\n<image>"}, {"from": "gpt", "value": "there are three chairs placed next to the table"}]
00000004774.jpg
4774_evjvqa
[{"from": "human", "value": "what color is the silver sheet on the boat?\n<image>"}, {"from": "gpt", "value": "the silver sheet has gold color"}]
00000004774.jpg
4774_evjvqa
[{"from": "human", "value": "where is the canoe?\n<image>"}, {"from": "gpt", "value": "on the river in a forest"}]
00000004774.jpg
4774_evjvqa
[{"from": "human", "value": "how many canoes are there on the river?\n<image>"}, {"from": "gpt", "value": "there is a canoe on the river"}]
00000004776.jpg
4776_evjvqa
[{"from": "human", "value": "what color is the coat of the person wearing the conical hat?\n<image>"}, {"from": "gpt", "value": "wearing a red coat"}]
00000004776.jpg
4776_evjvqa
[{"from": "human", "value": "how many people are sitting on the coracle?\n<image>"}, {"from": "gpt", "value": "there are two people sitting on a coracle"}]
00000004776.jpg
4776_evjvqa
[{"from": "human", "value": "what area are two people in?\n<image>"}, {"from": "gpt", "value": "navigating a river in a forest"}]
00000004777.jpg
4777_evjvqa
[{"from": "human", "value": "what color t-shirt is the guy wearing next to the wallet seller?\n<image>"}, {"from": "gpt", "value": "wearing a brown t-shirt"}]
00000004778.jpg
4778_evjvqa
[{"from": "human", "value": "where is the girl shopping?\n<image>"}, {"from": "gpt", "value": "the girl is shopping at the supermarket"}]
00000004778.jpg
4778_evjvqa
[{"from": "human", "value": "what is the girl holding in her hand?\n<image>"}, {"from": "gpt", "value": "holding a product in hand"}]
00000004778.jpg
4778_evjvqa
[{"from": "human", "value": "what color shirt is the guy wearing?\n<image>"}, {"from": "gpt", "value": "wearing a white t-shirt"}]
00000004779.jpg
4779_evjvqa
[{"from": "human", "value": "what booth are people standing at?\n<image>"}, {"from": "gpt", "value": "standing at a confectionery booth"}]
00000004779.jpg
4779_evjvqa
[{"from": "human", "value": "who is holding the cake box?\n<image>"}, {"from": "gpt", "value": "the woman in the middle is holding a box of cakes"}]
00000004779.jpg
4779_evjvqa
[{"from": "human", "value": "how many people are standing to the left of the woman?\n<image>"}, {"from": "gpt", "value": "there is a person standing to the left of the woman"}]
00000004780.jpg
4780_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "in a woman's shop"}]
00000004780.jpg
4780_evjvqa
[{"from": "human", "value": "what is the woman in the apron doing?\n<image>"}, {"from": "gpt", "value": "selling to customers"}]
00000004780.jpg
4780_evjvqa
[{"from": "human", "value": "what color is the basket held by the woman in the shop?\n<image>"}, {"from": "gpt", "value": "the basket is blue"}]
00000004781.jpg
4781_evjvqa
[{"from": "human", "value": "what is in front of people?\n<image>"}, {"from": "gpt", "value": "in front of everyone is a canoe"}]
00000004781.jpg
4781_evjvqa
[{"from": "human", "value": "how many people are wearing police uniforms?\n<image>"}, {"from": "gpt", "value": "two people are wearing police uniforms"}]
00000004781.jpg
4781_evjvqa
[{"from": "human", "value": "where are people standing?\n<image>"}, {"from": "gpt", "value": "standing in a history museum"}]
00000004782.jpg
4782_evjvqa
[{"from": "human", "value": "where are people standing?\n<image>"}, {"from": "gpt", "value": "standing in the florist area"}]
00000004782.jpg
4782_evjvqa
[{"from": "human", "value": "what color are the flowers?\n<image>"}, {"from": "gpt", "value": "the flowers are yellow"}]
00000004782.jpg
4782_evjvqa
[{"from": "human", "value": "how many people are looking at flowers?\n<image>"}, {"from": "gpt", "value": "there are three people looking at flowers"}]
00000004783.jpg
4783_evjvqa
[{"from": "human", "value": "how many people are leading bicycles?\n<image>"}, {"from": "gpt", "value": "there is a person leading a bicycle"}]
00000004783.jpg
4783_evjvqa
[{"from": "human", "value": "where are the motorbikes parked?\n<image>"}, {"from": "gpt", "value": "parked on the side of the road"}]