image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000006171.jpg
|
6171_evjvqa
|
[{"from": "human", "value": "how many girls are walking together on the street?\n<image>"}, {"from": "gpt", "value": "there are four girls walking together on the street."}]
|
00000006171.jpg
|
6171_evjvqa
|
[{"from": "human", "value": "what color is the rightmost girl's top?\n<image>"}, {"from": "gpt", "value": "the rightmost girl is wearing a red top. "}]
|
00000006172.jpg
|
6172_evjvqa
|
[{"from": "human", "value": "how many men are driving on the road?\n<image>"}, {"from": "gpt", "value": "there is one man driving on the road."}]
|
00000006173.jpg
|
6173_evjvqa
|
[{"from": "human", "value": "what color are their uniform?\n<image>"}, {"from": "gpt", "value": "their uniform is white. "}]
|
00000006173.jpg
|
6173_evjvqa
|
[{"from": "human", "value": "where is the man in blue jacket?\n<image>"}, {"from": "gpt", "value": "he is standing at the rightmost position. "}]
|
00000006173.jpg
|
6173_evjvqa
|
[{"from": "human", "value": "how many men in black jacket are there?\n<image>"}, {"from": "gpt", "value": "there is one man in black jacket."}]
|
00000006174.jpg
|
6174_evjvqa
|
[{"from": "human", "value": "where are they?z\n<image>"}, {"from": "gpt", "value": "they are in a shopping area."}]
|
00000006174.jpg
|
6174_evjvqa
|
[{"from": "human", "value": "what color is the shirt worn by the man sitting on the red stool?\n<image>"}, {"from": "gpt", "value": "it is navy blue. "}]
|
00000006174.jpg
|
6174_evjvqa
|
[{"from": "human", "value": "where is the man in navy blue shirt sitting?\n<image>"}, {"from": "gpt", "value": "he is sitting on the red stool."}]
|
00000006179.jpg
|
6179_evjvqa
|
[{"from": "human", "value": "what are the elder woman and the young girl doing?\n<image>"}, {"from": "gpt", "value": "they are picking vegetables."}]
|
00000006179.jpg
|
6179_evjvqa
|
[{"from": "human", "value": "what color is the young girl' shirt?\n<image>"}, {"from": "gpt", "value": "her shirt is white. "}]
|
00000006179.jpg
|
6179_evjvqa
|
[{"from": "human", "value": "what color is the elder woman's jacket?\n<image>"}, {"from": "gpt", "value": "the elder woman is wearing a green jacket."}]
|
00000006182.jpg
|
6182_evjvqa
|
[{"from": "human", "value": "what color are the children's scarves?\n<image>"}, {"from": "gpt", "value": "they are red."}]
|
00000006182.jpg
|
6182_evjvqa
|
[{"from": "human", "value": "what color is the ao dai worn by the teacher in the middle?\n<image>"}, {"from": "gpt", "value": "it is red. "}]
|
00000006182.jpg
|
6182_evjvqa
|
[{"from": "human", "value": "what color are the students' school uniforms?\n<image>"}, {"from": "gpt", "value": "they are white."}]
|
00000006188.jpg
|
6188_evjvqa
|
[{"from": "human", "value": "where are they?\n<image>"}, {"from": "gpt", "value": "they are at the book street."}]
|
00000006197.jpg
|
6197_evjvqa
|
[{"from": "human", "value": "what color is the tie worn by the man standing front and center?\n<image>"}, {"from": "gpt", "value": "it is blue."}]
|
00000006197.jpg
|
6197_evjvqa
|
[{"from": "human", "value": "what color are their uniforms?\n<image>"}, {"from": "gpt", "value": "they are black."}]
|
00000006197.jpg
|
6197_evjvqa
|
[{"from": "human", "value": "what color are the women's skirts?\n<image>"}, {"from": "gpt", "value": "they are black. "}]
|
00000002502.jpg
|
2502_evjvqa
|
[{"from": "human", "value": "what are people wearing militia uniforms doing?\n<image>"}, {"from": "gpt", "value": "checking vehicle registration"}]
|
00000002507.jpg
|
2507_evjvqa
|
[{"from": "human", "value": "what pose is the man in the white shirt in?\n<image>"}, {"from": "gpt", "value": "standing pose"}]
|
00000002507.jpg
|
2507_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the woman sitting at the cafe wears?\n<image>"}, {"from": "gpt", "value": "the woman sitting at the cafe wears yellow shirt"}]
|
00000002507.jpg
|
2507_evjvqa
|
[{"from": "human", "value": "where is the trash can of this cafe located?\n<image>"}, {"from": "gpt", "value": "close to the left partition in front of the entrance to the cafe"}]
|
00000002509.jpg
|
2509_evjvqa
|
[{"from": "human", "value": "what is the woman in conical hat doing?\n<image>"}, {"from": "gpt", "value": "the woman in conical hat is carrying street goods to sell"}]
|
00000002509.jpg
|
2509_evjvqa
|
[{"from": "human", "value": "what is the woman carrying street goods wearing on her head?\n<image>"}, {"from": "gpt", "value": "the woman carrying street goods is wearing a conical hat"}]
|
00000002509.jpg
|
2509_evjvqa
|
[{"from": "human", "value": "who is wearing a conical hat on this street?\n<image>"}, {"from": "gpt", "value": "the woman carrying street goods"}]
|
00000002512.jpg
|
2512_evjvqa
|
[{"from": "human", "value": "how many pink coaches are there?\n<image>"}, {"from": "gpt", "value": "there are four pink coaches in total"}]
|
00000002512.jpg
|
2512_evjvqa
|
[{"from": "human", "value": "what does the man walking dress?\n<image>"}, {"from": "gpt", "value": "he wears pink shirt and black trousers"}]
|
00000002514.jpg
|
2514_evjvqa
|
[{"from": "human", "value": "what color is the coat that this girl wears?\n<image>"}, {"from": "gpt", "value": "the coat that this girl wears is pink and violet"}]
|
00000002514.jpg
|
2514_evjvqa
|
[{"from": "human", "value": "what is this girl going to do at this supermarket?\n<image>"}, {"from": "gpt", "value": "this girl is going to take a package of meat down from the shelf"}]
|
00000002514.jpg
|
2514_evjvqa
|
[{"from": "human", "value": "what are on this girl's face?\n<image>"}, {"from": "gpt", "value": "glasses and a face mask"}]
|
00000002515.jpg
|
2515_evjvqa
|
[{"from": "human", "value": "what color is the hair band that this girl uses?\n<image>"}, {"from": "gpt", "value": "the hair band that this girl uses is white"}]
|
00000002515.jpg
|
2515_evjvqa
|
[{"from": "human", "value": "what does this girl hold in hand?\n<image>"}, {"from": "gpt", "value": "a camera"}]
|
00000002515.jpg
|
2515_evjvqa
|
[{"from": "human", "value": "where can the camera be seen?\n<image>"}, {"from": "gpt", "value": "the camera is held by the girl in front of her belly"}]
|
00000002517.jpg
|
2517_evjvqa
|
[{"from": "human", "value": "what does this woman lean on?\n<image>"}, {"from": "gpt", "value": "this woman leans on a supermarket cart"}]
|
00000002517.jpg
|
2517_evjvqa
|
[{"from": "human", "value": "who stands next to the supermarket cart?\n<image>"}, {"from": "gpt", "value": "next to the supermarket cart is a woman in black shirt"}]
|
00000002517.jpg
|
2517_evjvqa
|
[{"from": "human", "value": "how many floors does the rice cooker display shelf have?\n<image>"}, {"from": "gpt", "value": "four"}]
|
00000002518.jpg
|
2518_evjvqa
|
[{"from": "human", "value": "whose hand is this young man holding?\n<image>"}, {"from": "gpt", "value": "this young man is holding the hand of the girl in white"}]
|
00000002518.jpg
|
2518_evjvqa
|
[{"from": "human", "value": "what does the left hand of this man hold?\n<image>"}, {"from": "gpt", "value": "the man holds an umbrella on his left hand"}]
|
00000002518.jpg
|
2518_evjvqa
|
[{"from": "human", "value": "what does this young man hold an umbrella on his left hand for?\n<image>"}, {"from": "gpt", "value": "shading his girlfriend"}]
|
00000002519.jpg
|
2519_evjvqa
|
[{"from": "human", "value": "what are the traffic policemen doing?\n<image>"}, {"from": "gpt", "value": "control people passing on the road"}]
|
00000002519.jpg
|
2519_evjvqa
|
[{"from": "human", "value": "how many traffic policeman wear helmets in total?\n<image>"}, {"from": "gpt", "value": "there is one traffic policeman wearing helmet"}]
|
00000002519.jpg
|
2519_evjvqa
|
[{"from": "human", "value": "where can the handheld transceiver be seen?\n<image>"}, {"from": "gpt", "value": "on the belt of the rightmost policeman"}]
|
00000002526.jpg
|
2526_evjvqa
|
[{"from": "human", "value": "what color is the building on the corner of the street painted?\n<image>"}, {"from": "gpt", "value": "the building on the corner of the street is painted yellow"}]
|
00000002530.jpg
|
2530_evjvqa
|
[{"from": "human", "value": "what shirts does the young man use to go out with his girlfriend?\n<image>"}, {"from": "gpt", "value": "a white t-shirt and a yellow shirt"}]
|
00000002530.jpg
|
2530_evjvqa
|
[{"from": "human", "value": "what color is the dress that the girl is wearing?\n<image>"}, {"from": "gpt", "value": "the dress that the girl is wearing is yellow"}]
|
00000002530.jpg
|
2530_evjvqa
|
[{"from": "human", "value": "what is this couple doing together?\n<image>"}, {"from": "gpt", "value": "they are holding hands"}]
|
00000002533.jpg
|
2533_evjvqa
|
[{"from": "human", "value": "where is the baby sitting?\n<image>"}, {"from": "gpt", "value": "the baby is sitting on the supermarket cart"}]
|
00000002533.jpg
|
2533_evjvqa
|
[{"from": "human", "value": "what item is used to put the baby sitting in it?\n<image>"}, {"from": "gpt", "value": "the cart"}]
|
00000002533.jpg
|
2533_evjvqa
|
[{"from": "human", "value": "what are on both sides of the aisle?\n<image>"}, {"from": "gpt", "value": "both sides of the aisle are shelves"}]
|
00000002535.jpg
|
2535_evjvqa
|
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "on the sand"}]
|
00000002535.jpg
|
2535_evjvqa
|
[{"from": "human", "value": "what color are the umbrellas placed on the beach?\n<image>"}, {"from": "gpt", "value": "the umbrellas placed on the beach are blue"}]
|
00000002535.jpg
|
2535_evjvqa
|
[{"from": "human", "value": "what is the color of the tables present on the sand?\n<image>"}, {"from": "gpt", "value": "these tables are blue"}]
|
00000002536.jpg
|
2536_evjvqa
|
[{"from": "human", "value": "what does the woman standing next to the bookshelf wear?\n<image>"}, {"from": "gpt", "value": "she wears an ao dai"}]
|
00000002536.jpg
|
2536_evjvqa
|
[{"from": "human", "value": "how does the man opening the book look like with a tie?\n<image>"}, {"from": "gpt", "value": "he does not wear a tie"}]
|
00000002536.jpg
|
2536_evjvqa
|
[{"from": "human", "value": "what is the expression of the man in blue shirt and tie?\n<image>"}, {"from": "gpt", "value": "he is smiling"}]
|
00000002538.jpg
|
2538_evjvqa
|
[{"from": "human", "value": "what color is the shirt of the girl wearing a handbag?\n<image>"}, {"from": "gpt", "value": "she wears a red shirt"}]
|
00000002538.jpg
|
2538_evjvqa
|
[{"from": "human", "value": "which scooter is hung two helmets on the handlebars?\n<image>"}, {"from": "gpt", "value": "the scooter to the left of the girl"}]
|
00000002541.jpg
|
2541_evjvqa
|
[{"from": "human", "value": "what is the old lady selling?\n<image>"}, {"from": "gpt", "value": "tet ornamental plants"}]
|
00000002541.jpg
|
2541_evjvqa
|
[{"from": "human", "value": "what is the old woman sitting by the aisle for?\n<image>"}, {"from": "gpt", "value": "she is selling tet ornamental plants"}]
|
00000002541.jpg
|
2541_evjvqa
|
[{"from": "human", "value": "who is the little baby led by?\n<image>"}, {"from": "gpt", "value": "the little baby is led by the woman in the white shirt and the man in the blue shirt"}]
|
00000002543.jpg
|
2543_evjvqa
|
[{"from": "human", "value": "what is the woman in the white shirt present at this clothing store for?\n<image>"}, {"from": "gpt", "value": "selecting clothes"}]
|
00000002543.jpg
|
2543_evjvqa
|
[{"from": "human", "value": "what is the purpose of the woman wearing a red handbag coming to the clothing store?\n<image>"}, {"from": "gpt", "value": "she comes to buy clothes"}]
|
00000002546.jpg
|
2546_evjvqa
|
[{"from": "human", "value": "what are in the hands of the person wearing a black helmet?\n<image>"}, {"from": "gpt", "value": "there are two peach branches in the hands of the person wearing a black helmet"}]
|
00000002546.jpg
|
2546_evjvqa
|
[{"from": "human", "value": "what hat does the person holding two peach branches wear?\n<image>"}, {"from": "gpt", "value": "the person holding two peach branches wears a black helmet"}]
|
00000002546.jpg
|
2546_evjvqa
|
[{"from": "human", "value": "who stands next to the person holding two peach branches?\n<image>"}, {"from": "gpt", "value": "next to the person holding two peach branches is a woman in a pink fur coat"}]
|
00000002550.jpg
|
2550_evjvqa
|
[{"from": "human", "value": "who is not wearing a mask in this room?\n<image>"}, {"from": "gpt", "value": "the old lady wearing glasses"}]
|
00000002550.jpg
|
2550_evjvqa
|
[{"from": "human", "value": "who is the person wearing a blue face mask?\n<image>"}, {"from": "gpt", "value": "the young man in blue shirt"}]
|
00000002550.jpg
|
2550_evjvqa
|
[{"from": "human", "value": "what does the man talking to the old lady wear?\n<image>"}, {"from": "gpt", "value": "he wears a white long-sleeved shirt and black trousers"}]
|
00000002553.jpg
|
2553_evjvqa
|
[{"from": "human", "value": "what is the girl holding in hand?\n<image>"}, {"from": "gpt", "value": "she is holding a bouquet"}]
|
00000002553.jpg
|
2553_evjvqa
|
[{"from": "human", "value": "what is posted on the bulletin board?\n<image>"}, {"from": "gpt", "value": "the newspapers"}]
|
00000002553.jpg
|
2553_evjvqa
|
[{"from": "human", "value": "who does the young man in white stand next to?\n<image>"}, {"from": "gpt", "value": "he is standing next to the girl holding the bouquet"}]
|
00000002554.jpg
|
2554_evjvqa
|
[{"from": "human", "value": "what is the man in red shirt selling to the customer in striped shirt?\n<image>"}, {"from": "gpt", "value": "a watermelon"}]
|
00000002554.jpg
|
2554_evjvqa
|
[{"from": "human", "value": "what kind of fruit is the man in the black and white striped shirt buying at this store?\n<image>"}, {"from": "gpt", "value": "he buys watermelons"}]
|
00000002554.jpg
|
2554_evjvqa
|
[{"from": "human", "value": "how many watermelons does the man in the striped shirt hold in his hand?\n<image>"}, {"from": "gpt", "value": "he holds two watermelons"}]
|
00000002556.jpg
|
2556_evjvqa
|
[{"from": "human", "value": "what background color does the sign in front of this house use?\n<image>"}, {"from": "gpt", "value": "the sign has a background color of white and green"}]
|
00000002556.jpg
|
2556_evjvqa
|
[{"from": "human", "value": "where can the green trash can be seen?\n<image>"}, {"from": "gpt", "value": "the green trash can is to the left of the doors"}]
|
00000002563.jpg
|
2563_evjvqa
|
[{"from": "human", "value": "what are the children sitting side by side in the museum for?\n<image>"}, {"from": "gpt", "value": "for taking a group photo"}]
|
00000002563.jpg
|
2563_evjvqa
|
[{"from": "human", "value": "where are the children sitting?\n<image>"}, {"from": "gpt", "value": "the children are sitting in front of a tank in the museum"}]
|
00000002563.jpg
|
2563_evjvqa
|
[{"from": "human", "value": "where is the tank located?\n<image>"}, {"from": "gpt", "value": "on the display stand in the middle of the museum room"}]
|
00000002570.jpg
|
2570_evjvqa
|
[{"from": "human", "value": "how many types of canvas with different colors are used to line on the lawn?\n<image>"}, {"from": "gpt", "value": "there are two types of canvas with blue and orange colors"}]
|
00000002570.jpg
|
2570_evjvqa
|
[{"from": "human", "value": "how many people are standing in total?\n<image>"}, {"from": "gpt", "value": "three people are standing in total"}]
|
00000002570.jpg
|
2570_evjvqa
|
[{"from": "human", "value": "what color are the shirts that most of the young people sitting on the orange tarp wear?\n<image>"}, {"from": "gpt", "value": "blue shirts"}]
|
00000002577.jpg
|
2577_evjvqa
|
[{"from": "human", "value": "how many women are standing?\n<image>"}, {"from": "gpt", "value": "one woman is standing"}]
|
00000002577.jpg
|
2577_evjvqa
|
[{"from": "human", "value": "what shirt do all the men wear?\n<image>"}, {"from": "gpt", "value": "all the men wear white shirts"}]
|
00000002577.jpg
|
2577_evjvqa
|
[{"from": "human", "value": "what color is the tablecloth used on the stage?\n<image>"}, {"from": "gpt", "value": "the tablecloth is white"}]
|
00000002579.jpg
|
2579_evjvqa
|
[{"from": "human", "value": "what does the girl with a piece of paper wear on her back?\n<image>"}, {"from": "gpt", "value": "she wears a backpack"}]
|
00000002579.jpg
|
2579_evjvqa
|
[{"from": "human", "value": "how does the girl with a paper in her hand dress?\n<image>"}, {"from": "gpt", "value": "she wears a blue shirt and blue jeans"}]
|
00000002579.jpg
|
2579_evjvqa
|
[{"from": "human", "value": "what color is this supermarket shopping basket?\n<image>"}, {"from": "gpt", "value": "the shopping basket is red"}]
|
00000002580.jpg
|
2580_evjvqa
|
[{"from": "human", "value": "what is on this woman's knee?\n<image>"}, {"from": "gpt", "value": "there is a helmet on the girl's knee"}]
|
00000002580.jpg
|
2580_evjvqa
|
[{"from": "human", "value": "what does this girl wear on her back?\n<image>"}, {"from": "gpt", "value": "she wears a red backpack"}]
|
00000002580.jpg
|
2580_evjvqa
|
[{"from": "human", "value": "where is this girl sitting?\n<image>"}, {"from": "gpt", "value": "she is sitting on the cliff"}]
|
00000002581.jpg
|
2581_evjvqa
|
[{"from": "human", "value": "where the car is parked?\n<image>"}, {"from": "gpt", "value": "the car is parked right on the left of the electric pole"}]
|
00000002581.jpg
|
2581_evjvqa
|
[{"from": "human", "value": "where is the motorcycle parked?\n<image>"}, {"from": "gpt", "value": "the motorcycle is parked in front of the car"}]
|
00000002581.jpg
|
2581_evjvqa
|
[{"from": "human", "value": "where can the electric pole be seen in this alley?\n<image>"}, {"from": "gpt", "value": "the electric pole is on the right side of the car"}]
|
00000002582.jpg
|
2582_evjvqa
|
[{"from": "human", "value": "what does this man hold in hands?\n<image>"}, {"from": "gpt", "value": "he holds an oar"}]
|
00000002582.jpg
|
2582_evjvqa
|
[{"from": "human", "value": "what does this man hold an oar for?\n<image>"}, {"from": "gpt", "value": "he holds an oar for rowing"}]
|
00000002582.jpg
|
2582_evjvqa
|
[{"from": "human", "value": "what means of transportation is the man in blue shirt controlling?\n<image>"}, {"from": "gpt", "value": "he is rowing a boat"}]
|
00000002590.jpg
|
2590_evjvqa
|
[{"from": "human", "value": "what does the building on the right have on display on the floors?\n<image>"}, {"from": "gpt", "value": "motorbikes"}]
|
00000002591.jpg
|
2591_evjvqa
|
[{"from": "human", "value": "what are these two people doing to each other?\n<image>"}, {"from": "gpt", "value": "one person gives a food bag to the other person to receive"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.