image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000002672.jpg
2672_evjvqa
[{"from": "human", "value": "how many female teachers stand to guide the students to play tug?\n<image>"}, {"from": "gpt", "value": "three"}]
00000002672.jpg
2672_evjvqa
[{"from": "human", "value": "how many teachers do the students play tug under the guidance of?\n<image>"}, {"from": "gpt", "value": "three female teachers"}]
00000002675.jpg
2675_evjvqa
[{"from": "human", "value": "where is the woman in pink shirt standing?\n<image>"}, {"from": "gpt", "value": "in a street exhibiting photos"}]
00000002675.jpg
2675_evjvqa
[{"from": "human", "value": "what is the woman in pink shirt doing?\n<image>"}, {"from": "gpt", "value": "viewing the photos on display"}]
00000002675.jpg
2675_evjvqa
[{"from": "human", "value": "what subject are the photos displayed at the place where the woman in pink shirt is looking at?\n<image>"}, {"from": "gpt", "value": "medical, health"}]
00000002675.jpg
2675_evjvqa
[{"from": "human", "value": "whom do the four photos at the place where the woman is standing watching mainly mention?\n<image>"}, {"from": "gpt", "value": "patients and doctors"}]
00000002675.jpg
2675_evjvqa
[{"from": "human", "value": "where are the pictures that the woman in the pink shirt is looking at mainly taken?\n<image>"}, {"from": "gpt", "value": "in the hospitals"}]
00000002676.jpg
2676_evjvqa
[{"from": "human", "value": "what are on the two sides of this road?\n<image>"}, {"from": "gpt", "value": "displayed rows of tables and chairs"}]
00000002676.jpg
2676_evjvqa
[{"from": "human", "value": "where are the tables and chairs displayed?\n<image>"}, {"from": "gpt", "value": "on both sides of the road"}]
00000002676.jpg
2676_evjvqa
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "in a sidewalk diner"}]
00000002681.jpg
2681_evjvqa
[{"from": "human", "value": "where are the two young people standing?\n<image>"}, {"from": "gpt", "value": "on a pavement, in front of a yellow painted house"}]
00000002681.jpg
2681_evjvqa
[{"from": "human", "value": "whom does the man in white shirt stand with?\n<image>"}, {"from": "gpt", "value": "with a woman in blue dress"}]
00000002681.jpg
2681_evjvqa
[{"from": "human", "value": "whom does the woman in blue dress stand with?\n<image>"}, {"from": "gpt", "value": "with a man in white shirt"}]
00000002686.jpg
2686_evjvqa
[{"from": "human", "value": "where is this woman standing?\n<image>"}, {"from": "gpt", "value": "at a fruit department"}]
00000002686.jpg
2686_evjvqa
[{"from": "human", "value": "what products does this department sell?\n<image>"}, {"from": "gpt", "value": "fruits"}]
00000002686.jpg
2686_evjvqa
[{"from": "human", "value": "what is the woman holding in hands?\n<image>"}, {"from": "gpt", "value": "a box of fruits"}]
00000002686.jpg
2686_evjvqa
[{"from": "human", "value": "what products is the woman intending to buy?\n<image>"}, {"from": "gpt", "value": "fruits"}]
00000002689.jpg
2689_evjvqa
[{"from": "human", "value": "who is selling in the stall that sells needles and thread?\n<image>"}, {"from": "gpt", "value": "a man in black shirt"}]
00000002691.jpg
2691_evjvqa
[{"from": "human", "value": "what means of transportation does the woman in a conical hat go by?\n<image>"}, {"from": "gpt", "value": "a bicycle"}]
00000002697.jpg
2697_evjvqa
[{"from": "human", "value": "how many vehicles are moving on the road?\n<image>"}, {"from": "gpt", "value": "one vehicle is moving on the road"}]
00000002699.jpg
2699_evjvqa
[{"from": "human", "value": "where is this row of street houses built?\n<image>"}, {"from": "gpt", "value": "beside a river"}]
00000000508.jpg
508_evjvqa
[{"from": "human", "value": "how many cars are passing the night market?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000508.jpg
508_evjvqa
[{"from": "human", "value": "what means of transport are people traveling in the market?\n<image>"}, {"from": "gpt", "value": "motorbike"}]
00000000508.jpg
508_evjvqa
[{"from": "human", "value": "how do visitors to the market travel?\n<image>"}, {"from": "gpt", "value": "on foot and by motorbike"}]
00000000511.jpg
511_evjvqa
[{"from": "human", "value": "what is the man in the green uniform doing?\n<image>"}, {"from": "gpt", "value": "stretch the tape to separate the path"}]
00000000511.jpg
511_evjvqa
[{"from": "human", "value": "what is the person wearing the police uniform stretching the tape for?\n<image>"}, {"from": "gpt", "value": "blocking the path"}]
00000000511.jpg
511_evjvqa
[{"from": "human", "value": "who is stretching the tape blocking the path?\n<image>"}, {"from": "gpt", "value": "the person wearing the police uniform"}]
00000000513.jpg
513_evjvqa
[{"from": "human", "value": "what types of vehicles are parked near the steps?\n<image>"}, {"from": "gpt", "value": "bikes and motobikes"}]
00000000515.jpg
515_evjvqa
[{"from": "human", "value": "what status is this cafe in?\n<image>"}, {"from": "gpt", "value": "closure"}]
00000000515.jpg
515_evjvqa
[{"from": "human", "value": "what paint colors are used for the exterior of this cafe?\n<image>"}, {"from": "gpt", "value": "white and yellow"}]
00000000516.jpg
516_evjvqa
[{"from": "human", "value": "what is the woman in red doing while standing shopping?\n<image>"}, {"from": "gpt", "value": "answering the phone"}]
00000000516.jpg
516_evjvqa
[{"from": "human", "value": "what color dress is the person on the phone wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000516.jpg
516_evjvqa
[{"from": "human", "value": "what are the people here moving in the supermarket for?\n<image>"}, {"from": "gpt", "value": "shopping"}]
00000000517.jpg
517_evjvqa
[{"from": "human", "value": "what are all these young people gathered on the steps for?\n<image>"}, {"from": "gpt", "value": "group photo"}]
00000000517.jpg
517_evjvqa
[{"from": "human", "value": "how many statues are placed at the temple gate?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000518.jpg
518_evjvqa
[{"from": "human", "value": "what kind of fruit is this woman intending to choose?\n<image>"}, {"from": "gpt", "value": "apple"}]
00000000518.jpg
518_evjvqa
[{"from": "human", "value": "how is this woman dressed?\n<image>"}, {"from": "gpt", "value": "black shirt and black pants"}]
00000000518.jpg
518_evjvqa
[{"from": "human", "value": "what kind of fruit is mainly displayed in the counter the woman is standing in?\n<image>"}, {"from": "gpt", "value": "apple"}]
00000000518.jpg
518_evjvqa
[{"from": "human", "value": "what kind of fruit is the woman looking at?\n<image>"}, {"from": "gpt", "value": "apple"}]
00000000519.jpg
519_evjvqa
[{"from": "human", "value": "what kind of items does this store specialize in?\n<image>"}, {"from": "gpt", "value": "shoes and bags"}]
00000000519.jpg
519_evjvqa
[{"from": "human", "value": "what is the girl in the white shirt holding in her hand?\n<image>"}, {"from": "gpt", "value": "blue plastic bag"}]
00000000519.jpg
519_evjvqa
[{"from": "human", "value": "how is the person standing next to the girl in white dressed?\n<image>"}, {"from": "gpt", "value": "red jacket and black pants"}]
00000000519.jpg
519_evjvqa
[{"from": "human", "value": "what is the man in the striped shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing shoes"}]
00000000521.jpg
521_evjvqa
[{"from": "human", "value": "what kind of vehicles are parked in front of the supermarket?\n<image>"}, {"from": "gpt", "value": "there is no vehicle"}]
00000000521.jpg
521_evjvqa
[{"from": "human", "value": "how many people are standing in the sunshine outside the supermarket?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000521.jpg
521_evjvqa
[{"from": "human", "value": "what colors are the trash cans outside the supermarket?\n<image>"}, {"from": "gpt", "value": "yellow and green"}]
00000000523.jpg
523_evjvqa
[{"from": "human", "value": "what products do the shelves in this row display?\n<image>"}, {"from": "gpt", "value": "fresh vegetables"}]
00000000523.jpg
523_evjvqa
[{"from": "human", "value": "what is the girl in the striped shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing vegetables to buy"}]
00000000523.jpg
523_evjvqa
[{"from": "human", "value": "what is the girl behind the supermarket trolley holding in her hand?\n<image>"}, {"from": "gpt", "value": "pack of fresh vegetables"}]
00000000525.jpg
525_evjvqa
[{"from": "human", "value": "what red thing are both these two adults wearing?\n<image>"}, {"from": "gpt", "value": "helmet"}]
00000000525.jpg
525_evjvqa
[{"from": "human", "value": "who is in between the two people wearing red helmets?\n<image>"}, {"from": "gpt", "value": "a baby"}]
00000000525.jpg
525_evjvqa
[{"from": "human", "value": "who is the woman with glasses holding in her arm?\n<image>"}, {"from": "gpt", "value": "a baby"}]
00000000525.jpg
525_evjvqa
[{"from": "human", "value": "what is the woman wearing glasses and red helmet doing in the motorbike?\n<image>"}, {"from": "gpt", "value": "holding her baby"}]
00000000527.jpg
527_evjvqa
[{"from": "human", "value": "how many motorbikes are parked in front of the store?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000527.jpg
527_evjvqa
[{"from": "human", "value": "who is walking in front of the store?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000531.jpg
531_evjvqa
[{"from": "human", "value": "how many tiers do these shelves have in all?\n<image>"}, {"from": "gpt", "value": "six"}]
00000000531.jpg
531_evjvqa
[{"from": "human", "value": "who is standing to choose biscuits?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000531.jpg
531_evjvqa
[{"from": "human", "value": "what type of items is displayed on these shelves?\n<image>"}, {"from": "gpt", "value": "biscuit"}]
00000000533.jpg
533_evjvqa
[{"from": "human", "value": "what is this man carrying on the back?\n<image>"}, {"from": "gpt", "value": "bags containing ornamental fish"}]
00000000533.jpg
533_evjvqa
[{"from": "human", "value": "what kind of vehicle does this man use to carry fish for sale?\n<image>"}, {"from": "gpt", "value": "motorbike"}]
00000000533.jpg
533_evjvqa
[{"from": "human", "value": "what is on this man's head?\n<image>"}, {"from": "gpt", "value": "helmet"}]
00000000539.jpg
539_evjvqa
[{"from": "human", "value": "what is in front of the door?\n<image>"}, {"from": "gpt", "value": "a scooter and a pair of shoes"}]
00000000539.jpg
539_evjvqa
[{"from": "human", "value": "is anyone standing behind the door?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000540.jpg
540_evjvqa
[{"from": "human", "value": "what is this person wearing on the shoulder?\n<image>"}, {"from": "gpt", "value": "bag"}]
00000000540.jpg
540_evjvqa
[{"from": "human", "value": "what is this person doing?\n<image>"}, {"from": "gpt", "value": "going shopping in the supermarket"}]
00000000540.jpg
540_evjvqa
[{"from": "human", "value": "what is this person pushing along the aisle in the supermarket?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
00000000541.jpg
541_evjvqa
[{"from": "human", "value": "how many windows does this cafe have?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000541.jpg
541_evjvqa
[{"from": "human", "value": "what color shirt is the man sitting alone outside the cafe wearing?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000542.jpg
542_evjvqa
[{"from": "human", "value": "how many people in black uniforms are standing in front of the store?\n<image>"}, {"from": "gpt", "value": "three"}]
00000000542.jpg
542_evjvqa
[{"from": "human", "value": "how many people in green uniforms are in front of the store?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000542.jpg
542_evjvqa
[{"from": "human", "value": "what kind of vehicles are parked in front of the store?\n<image>"}, {"from": "gpt", "value": "motorbikes and cars"}]
00000000543.jpg
543_evjvqa
[{"from": "human", "value": "how many people are choosing washing powder on these shelves to buy?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000543.jpg
543_evjvqa
[{"from": "human", "value": "what color tile is the floor of this supermarket?\n<image>"}, {"from": "gpt", "value": "white"}]
00000000549.jpg
549_evjvqa
[{"from": "human", "value": "what is the woman in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "pushing the supermarket trolley"}]
00000000549.jpg
549_evjvqa
[{"from": "human", "value": "what kind of items is displayed on the shelf to the right of the woman in blue?\n<image>"}, {"from": "gpt", "value": "cooking oil"}]
00000000549.jpg
549_evjvqa
[{"from": "human", "value": "what kind of items is displayed on the shelf to the left of the woman pushing the supermarket trolley?\n<image>"}, {"from": "gpt", "value": "monosodium glutamate"}]
00000000549.jpg
549_evjvqa
[{"from": "human", "value": "what is the woman in the blue shirt pushing in the supermarket?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
00000000550.jpg
550_evjvqa
[{"from": "human", "value": "how many people wear masks at this fair?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000550.jpg
550_evjvqa
[{"from": "human", "value": "what color dress is the woman holding a red object wearing?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000550.jpg
550_evjvqa
[{"from": "human", "value": "what is the old man behind the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "viewing the display items"}]
00000000550.jpg
550_evjvqa
[{"from": "human", "value": "where are the glasses of the man standing behind the woman in blue?\n<image>"}, {"from": "gpt", "value": "on his hands"}]
00000000551.jpg
551_evjvqa
[{"from": "human", "value": "how many people are standing by the yellow wall?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000551.jpg
551_evjvqa
[{"from": "human", "value": "what is the young man holding on his hand?\n<image>"}, {"from": "gpt", "value": "the girl's hand"}]
00000000551.jpg
551_evjvqa
[{"from": "human", "value": "how are these two dressed?\n<image>"}, {"from": "gpt", "value": "the man in white shirt, white shoes and gray pants, the woman in white dress and black shoes"}]
00000000551.jpg
551_evjvqa
[{"from": "human", "value": "what is this couple standing with such posture for?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}]
00000000559.jpg
559_evjvqa
[{"from": "human", "value": "how is the woman standing in front of the door dressed?\n<image>"}, {"from": "gpt", "value": "yellow dress and brown hat"}]
00000000559.jpg
559_evjvqa
[{"from": "human", "value": "who is the woman standing with in front of the house?\n<image>"}, {"from": "gpt", "value": "alone"}]
00000000559.jpg
559_evjvqa
[{"from": "human", "value": "what color are the walls of this house painted?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000000562.jpg
562_evjvqa
[{"from": "human", "value": "what vehicle are these two women using on the road?\n<image>"}, {"from": "gpt", "value": "bike"}]
00000000562.jpg
562_evjvqa
[{"from": "human", "value": "what are the two women doing on the road?\n<image>"}, {"from": "gpt", "value": "riding bicycle"}]
00000000562.jpg
562_evjvqa
[{"from": "human", "value": "which side of the road is the motorbike parked?\n<image>"}, {"from": "gpt", "value": "right"}]
00000000571.jpg
571_evjvqa
[{"from": "human", "value": "who is walking on the beach?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000571.jpg
571_evjvqa
[{"from": "human", "value": "where are the leaf huts being placed along?\n<image>"}, {"from": "gpt", "value": "sand"}]
00000000571.jpg
571_evjvqa
[{"from": "human", "value": "what things are placed on the sand near the sea?\n<image>"}, {"from": "gpt", "value": "leaf huts"}]
00000000571.jpg
571_evjvqa
[{"from": "human", "value": "what is placed under the leaf huts?\n<image>"}, {"from": "gpt", "value": "beach beds"}]
00000000573.jpg
573_evjvqa
[{"from": "human", "value": "how many motorbikes are parked in front of the taller floor house?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000573.jpg
573_evjvqa
[{"from": "human", "value": "who is eating at the restaurant next door?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000573.jpg
573_evjvqa
[{"from": "human", "value": "how many floors except the ground floor does the tallest house on this street corner have?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000574.jpg
574_evjvqa
[{"from": "human", "value": "what are these people standing there for?\n<image>"}, {"from": "gpt", "value": "viewing the relics"}]
00000000574.jpg
574_evjvqa
[{"from": "human", "value": "where are these people visiting?\n<image>"}, {"from": "gpt", "value": "in the museum"}]