image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
|---|---|---|
00000000955.jpg
|
955_evjvqa
|
[{"from": "human", "value": "what is the distance between the two women wearing blue and gray face masks?\n<image>"}, {"from": "gpt", "value": "very close"}]
|
00000000957.jpg
|
957_evjvqa
|
[{"from": "human", "value": "what is the type of the two machines on the left of the entrance to the supermarket?\n<image>"}, {"from": "gpt", "value": "full body massage chair"}]
|
00000000961.jpg
|
961_evjvqa
|
[{"from": "human", "value": "what is the young man with tied hair in black shirt holding in his hand?\n<image>"}, {"from": "gpt", "value": "a glass of ice water"}]
|
00000000964.jpg
|
964_evjvqa
|
[{"from": "human", "value": "what is the girl in gray coat pushing in the supermarket?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
|
00000000964.jpg
|
964_evjvqa
|
[{"from": "human", "value": "what is the girl walking behind the girl in gray coat doing?\n<image>"}, {"from": "gpt", "value": "going shopping in the supermarket"}]
|
00000000964.jpg
|
964_evjvqa
|
[{"from": "human", "value": "what is the man in striped shirt to the left of the girl in gray coat doing?\n<image>"}, {"from": "gpt", "value": "going shopping in the supermarket"}]
|
00000000966.jpg
|
966_evjvqa
|
[{"from": "human", "value": "what is the woman in white coat doing in front of everyone?\n<image>"}, {"from": "gpt", "value": "burning incense for ho chi minh"}]
|
00000000966.jpg
|
966_evjvqa
|
[{"from": "human", "value": "where is the children lining up?\n<image>"}, {"from": "gpt", "value": "before ho chi minh altar"}]
|
00000000966.jpg
|
966_evjvqa
|
[{"from": "human", "value": "what is the woman holding micro doing?\n<image>"}, {"from": "gpt", "value": "lecturing to the students"}]
|
00000000970.jpg
|
970_evjvqa
|
[{"from": "human", "value": "what is the woman in white shirt doing?\n<image>"}, {"from": "gpt", "value": "introducing the product to the man"}]
|
00000000970.jpg
|
970_evjvqa
|
[{"from": "human", "value": "what is this man looking at?\n<image>"}, {"from": "gpt", "value": "the product introduced by the woman"}]
|
00000000970.jpg
|
970_evjvqa
|
[{"from": "human", "value": "on which floor of this shelf are toothbrushes displayed?\n<image>"}, {"from": "gpt", "value": "top floor"}]
|
00000000971.jpg
|
971_evjvqa
|
[{"from": "human", "value": "what do these two women wear?\n<image>"}, {"from": "gpt", "value": "the woman on the left wear yellow shirt and blue jeans, the one on the right wear red coat and black trousers"}]
|
00000000979.jpg
|
979_evjvqa
|
[{"from": "human", "value": "what is the girl wearing ao dai leaning on?\n<image>"}, {"from": "gpt", "value": "a motorbike"}]
|
00000000979.jpg
|
979_evjvqa
|
[{"from": "human", "value": "what does the girl standing under the flag wear?\n<image>"}, {"from": "gpt", "value": "patterned ao dai, black pants and white shoes"}]
|
00000000979.jpg
|
979_evjvqa
|
[{"from": "human", "value": "what is right in front of the house number 30?\n<image>"}, {"from": "gpt", "value": "a girl wearing ao dai and a motorbike"}]
|
00000000981.jpg
|
981_evjvqa
|
[{"from": "human", "value": "what is the woman in blue coat holding?\n<image>"}, {"from": "gpt", "value": "paddle"}]
|
00000000981.jpg
|
981_evjvqa
|
[{"from": "human", "value": "what are some people standing on the boats for?\n<image>"}, {"from": "gpt", "value": "rowing"}]
|
00000000995.jpg
|
995_evjvqa
|
[{"from": "human", "value": "what does the stall on the right sell?\n<image>"}, {"from": "gpt", "value": "kinds of fruits"}]
|
00000000995.jpg
|
995_evjvqa
|
[{"from": "human", "value": "what do people wear to ride on the rain?\n<image>"}, {"from": "gpt", "value": "raincoat"}]
|
00000000995.jpg
|
995_evjvqa
|
[{"from": "human", "value": "what item must motorcyclists use to avoid getting wet when passing by the stalls? \n<image>"}, {"from": "gpt", "value": "raincoat"}]
|
00000000996.jpg
|
996_evjvqa
|
[{"from": "human", "value": "what is the man selling balloons sitting on?\n<image>"}, {"from": "gpt", "value": "a chair"}]
|
00000000996.jpg
|
996_evjvqa
|
[{"from": "human", "value": "what is the man in white shirt next to the bicycle doing?\n<image>"}, {"from": "gpt", "value": "sitting on the ground and hawking"}]
|
00000000998.jpg
|
998_evjvqa
|
[{"from": "human", "value": "what means of transport do people going from left to right use?\n<image>"}, {"from": "gpt", "value": "motorbikes"}]
|
00000000998.jpg
|
998_evjvqa
|
[{"from": "human", "value": "how many floors does the tallest building have ?\n<image>"}, {"from": "gpt", "value": "seven"}]
|
00000000998.jpg
|
998_evjvqa
|
[{"from": "human", "value": "what floors does the tallest building on this street have?\n<image>"}, {"from": "gpt", "value": "one ground floor and six upper floors"}]
|
00000000999.jpg
|
999_evjvqa
|
[{"from": "human", "value": "what are these people lining up for?\n<image>"}, {"from": "gpt", "value": "group photographing"}]
|
00000000999.jpg
|
999_evjvqa
|
[{"from": "human", "value": "how many people are wearing blue shirts?\n<image>"}, {"from": "gpt", "value": "one person"}]
|
00000000999.jpg
|
999_evjvqa
|
[{"from": "human", "value": "how many other people are wearing the shirts that have the same color with the girl in blue shirt?\n<image>"}, {"from": "gpt", "value": "nobody"}]
|
00000000301.jpg
|
301_evjvqa
|
[{"from": "human", "value": "what are the four girls standing in the middle of the road doing?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}]
|
00000000301.jpg
|
301_evjvqa
|
[{"from": "human", "value": "how many girls do not wear a white shirt?\n<image>"}, {"from": "gpt", "value": "one"}]
|
00000000301.jpg
|
301_evjvqa
|
[{"from": "human", "value": "how many people do not wear glasses?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000301.jpg
|
301_evjvqa
|
[{"from": "human", "value": "where are the girls posing for a photo?\n<image>"}, {"from": "gpt", "value": "in the middle of the road"}]
|
00000000301.jpg
|
301_evjvqa
|
[{"from": "human", "value": "how many people wear white shoes in all?\n<image>"}, {"from": "gpt", "value": "four"}]
|
00000000307.jpg
|
307_evjvqa
|
[{"from": "human", "value": "what is flying in the sky?\n<image>"}, {"from": "gpt", "value": "bird"}]
|
00000000307.jpg
|
307_evjvqa
|
[{"from": "human", "value": "what color is the diamond plaza building glazed with?\n<image>"}, {"from": "gpt", "value": "turquoise"}]
|
00000000310.jpg
|
310_evjvqa
|
[{"from": "human", "value": "who is the young boy looking at?\n<image>"}, {"from": "gpt", "value": "the girl sitting in the back"}]
|
00000000310.jpg
|
310_evjvqa
|
[{"from": "human", "value": "what vehicle are the couple sitting on?\n<image>"}, {"from": "gpt", "value": "bicycle"}]
|
00000000310.jpg
|
310_evjvqa
|
[{"from": "human", "value": "what products does the store that two people stopping their bicycles in front of sell?\n<image>"}, {"from": "gpt", "value": "clothes"}]
|
00000000310.jpg
|
310_evjvqa
|
[{"from": "human", "value": "what color is the flower balcony in front of the shop?\n<image>"}, {"from": "gpt", "value": "pink"}]
|
00000000311.jpg
|
311_evjvqa
|
[{"from": "human", "value": "how many people are sitting on the chair?\n<image>"}, {"from": "gpt", "value": "one"}]
|
00000000311.jpg
|
311_evjvqa
|
[{"from": "human", "value": "what is the quantity of chairs in front of the store?\n<image>"}, {"from": "gpt", "value": "three"}]
|
00000000311.jpg
|
311_evjvqa
|
[{"from": "human", "value": "how many men are standing in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000311.jpg
|
311_evjvqa
|
[{"from": "human", "value": "what is the woman with the head scarf holding in her hand?\n<image>"}, {"from": "gpt", "value": "camera"}]
|
00000000314.jpg
|
314_evjvqa
|
[{"from": "human", "value": "what color is the shirt the man wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000000314.jpg
|
314_evjvqa
|
[{"from": "human", "value": "what product display counter is this man standing at?\n<image>"}, {"from": "gpt", "value": "meat"}]
|
00000000314.jpg
|
314_evjvqa
|
[{"from": "human", "value": "what is this man holding on his hands?\n<image>"}, {"from": "gpt", "value": "mobile phone and meat tray"}]
|
00000000314.jpg
|
314_evjvqa
|
[{"from": "human", "value": "what is the purpose of the man holding the phone in front of the meat tray?\n<image>"}, {"from": "gpt", "value": "take a photo of the product"}]
|
00000000316.jpg
|
316_evjvqa
|
[{"from": "human", "value": "what are two colors children's clothes?\n<image>"}, {"from": "gpt", "value": "white and yellow"}]
|
00000000316.jpg
|
316_evjvqa
|
[{"from": "human", "value": "how does the woman carrying a bag dress?\n<image>"}, {"from": "gpt", "value": "black pants and red shirt"}]
|
00000000316.jpg
|
316_evjvqa
|
[{"from": "human", "value": "what are the children standing behind each other for?\n<image>"}, {"from": "gpt", "value": "line up"}]
|
00000000316.jpg
|
316_evjvqa
|
[{"from": "human", "value": "how many women wear red shirt?\n<image>"}, {"from": "gpt", "value": "three"}]
|
00000000316.jpg
|
316_evjvqa
|
[{"from": "human", "value": "how many cars are parked on the far?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000000317.jpg
|
317_evjvqa
|
[{"from": "human", "value": "what does the sky look like there?\n<image>"}, {"from": "gpt", "value": "blue and cloudy sky"}]
|
00000000317.jpg
|
317_evjvqa
|
[{"from": "human", "value": "what is the girl walking along?\n<image>"}, {"from": "gpt", "value": "fence wall"}]
|
00000000317.jpg
|
317_evjvqa
|
[{"from": "human", "value": "which side is the gate currently located compared with the girl?\n<image>"}, {"from": "gpt", "value": "behind"}]
|
00000000317.jpg
|
317_evjvqa
|
[{"from": "human", "value": "what does the girl bring with her besides clothes?\n<image>"}, {"from": "gpt", "value": "sunglasses, bag and water cup"}]
|
00000000317.jpg
|
317_evjvqa
|
[{"from": "human", "value": "what material is the wall erected mainly?\n<image>"}, {"from": "gpt", "value": "stone"}]
|
00000000320.jpg
|
320_evjvqa
|
[{"from": "human", "value": "what color are the walls of this house painted?\n<image>"}, {"from": "gpt", "value": "yellow"}]
|
00000000320.jpg
|
320_evjvqa
|
[{"from": "human", "value": "how many doors does the fence of the building have?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000000320.jpg
|
320_evjvqa
|
[{"from": "human", "value": "how many people are walking outside the fence?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000320.jpg
|
320_evjvqa
|
[{"from": "human", "value": "what types of floors does this building have?\n<image>"}, {"from": "gpt", "value": "ground floor, first floor and attic floor"}]
|
00000000321.jpg
|
321_evjvqa
|
[{"from": "human", "value": "what color is the goat logo of the restaurant?\n<image>"}, {"from": "gpt", "value": "yellow"}]
|
00000000325.jpg
|
325_evjvqa
|
[{"from": "human", "value": "what does the sky look like there?\n<image>"}, {"from": "gpt", "value": "blue and cloudy sky"}]
|
00000000325.jpg
|
325_evjvqa
|
[{"from": "human", "value": "what is the weather like at sea there?\n<image>"}, {"from": "gpt", "value": "sunny and unclouded"}]
|
00000000326.jpg
|
326_evjvqa
|
[{"from": "human", "value": "where is the altar being placed?\n<image>"}, {"from": "gpt", "value": "on the brick yard"}]
|
00000000326.jpg
|
326_evjvqa
|
[{"from": "human", "value": "how many rows of trees are next to the brick yard?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000000326.jpg
|
326_evjvqa
|
[{"from": "human", "value": "what color is the table on which there are two water bottles painted?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000000327.jpg
|
327_evjvqa
|
[{"from": "human", "value": "what posture are all the people present there in?\n<image>"}, {"from": "gpt", "value": "sitting"}]
|
00000000327.jpg
|
327_evjvqa
|
[{"from": "human", "value": "which arm of the woman in black shirt is the stack of plastic chairs located near?\n<image>"}, {"from": "gpt", "value": "left arm"}]
|
00000000327.jpg
|
327_evjvqa
|
[{"from": "human", "value": "what kind of items do these booths mainly sell?\n<image>"}, {"from": "gpt", "value": "footwear"}]
|
00000000327.jpg
|
327_evjvqa
|
[{"from": "human", "value": "how many customers are standing watching the items?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000327.jpg
|
327_evjvqa
|
[{"from": "human", "value": "in which direction are the stripes on the shirts of people wearing striped shirts distributed?\n<image>"}, {"from": "gpt", "value": "horizontal"}]
|
00000000329.jpg
|
329_evjvqa
|
[{"from": "human", "value": "how many trucks are crossing the intersection?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000329.jpg
|
329_evjvqa
|
[{"from": "human", "value": "how many people are sitting on the motorbike?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000329.jpg
|
329_evjvqa
|
[{"from": "human", "value": "what is the traffic like at this intersection?\n<image>"}, {"from": "gpt", "value": "deserted"}]
|
00000000330.jpg
|
330_evjvqa
|
[{"from": "human", "value": "how is the girl standing next to the milk counter dressed?\n<image>"}, {"from": "gpt", "value": "white shirt, glasses, black shoes and dark blue jeans"}]
|
00000000330.jpg
|
330_evjvqa
|
[{"from": "human", "value": "what kind of products are on the shelf next to the girl?\n<image>"}, {"from": "gpt", "value": "yogurt"}]
|
00000000330.jpg
|
330_evjvqa
|
[{"from": "human", "value": "what is the girl wearing on her face?\n<image>"}, {"from": "gpt", "value": "glasses"}]
|
00000000332.jpg
|
332_evjvqa
|
[{"from": "human", "value": "what status are all the shops here?\n<image>"}, {"from": "gpt", "value": "closure"}]
|
00000000332.jpg
|
332_evjvqa
|
[{"from": "human", "value": "what animal is standing near the road side?\n<image>"}, {"from": "gpt", "value": "dog"}]
|
00000000332.jpg
|
332_evjvqa
|
[{"from": "human", "value": "what animal is standing in front of closed stores?\n<image>"}, {"from": "gpt", "value": "dog"}]
|
00000000332.jpg
|
332_evjvqa
|
[{"from": "human", "value": "what pattern do the tarps on the eaves of the shops have?\n<image>"}, {"from": "gpt", "value": "white and blue stripes"}]
|
00000000332.jpg
|
332_evjvqa
|
[{"from": "human", "value": "who is walking on the sidewalk?\n<image>"}, {"from": "gpt", "value": "no one"}]
|
00000000334.jpg
|
334_evjvqa
|
[{"from": "human", "value": "what does the walking man wear?\n<image>"}, {"from": "gpt", "value": "yellow shorts and green t-shirt"}]
|
00000000334.jpg
|
334_evjvqa
|
[{"from": "human", "value": "what kind of store is phu thinh?\n<image>"}, {"from": "gpt", "value": "grocery"}]
|
00000000335.jpg
|
335_evjvqa
|
[{"from": "human", "value": "what are these people doing next to the vegetable counter?\n<image>"}, {"from": "gpt", "value": "choosing vegetables"}]
|
00000000335.jpg
|
335_evjvqa
|
[{"from": "human", "value": "what color is the mask of the person between two people wearing blue masks?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000000335.jpg
|
335_evjvqa
|
[{"from": "human", "value": "what the counter right next to the bell pepper counter is?\n<image>"}, {"from": "gpt", "value": "carrot"}]
|
00000000335.jpg
|
335_evjvqa
|
[{"from": "human", "value": "what kind of agricultural products is the woman in the blue shirt putting in the bag?\n<image>"}, {"from": "gpt", "value": "carrot"}]
|
00000000335.jpg
|
335_evjvqa
|
[{"from": "human", "value": "which side does the woman wearing the purple jacket stand compared to the woman picking carrots?\n<image>"}, {"from": "gpt", "value": "right"}]
|
00000000336.jpg
|
336_evjvqa
|
[{"from": "human", "value": "what is the man in the blue shirt doing in the park?\n<image>"}, {"from": "gpt", "value": "walking"}]
|
00000000336.jpg
|
336_evjvqa
|
[{"from": "human", "value": "who is the woman in the purple shirt sitting with?\n<image>"}, {"from": "gpt", "value": "no one"}]
|
00000000336.jpg
|
336_evjvqa
|
[{"from": "human", "value": "where is the girl in the pink bag going to?\n<image>"}, {"from": "gpt", "value": "where two people are sitting"}]
|
00000000336.jpg
|
336_evjvqa
|
[{"from": "human", "value": "what is the girl in the pink bag intending to?\n<image>"}, {"from": "gpt", "value": "go to where two people are sitting on the bench"}]
|
00000000338.jpg
|
338_evjvqa
|
[{"from": "human", "value": "what do they display on this counter?\n<image>"}, {"from": "gpt", "value": "tet gift baskets"}]
|
00000000338.jpg
|
338_evjvqa
|
[{"from": "human", "value": "who is watching these gift baskets?\n<image>"}, {"from": "gpt", "value": "no one"}]
|
00000000340.jpg
|
340_evjvqa
|
[{"from": "human", "value": "how many motorbikes are parked in front of the shops?\n<image>"}, {"from": "gpt", "value": "three"}]
|
00000000340.jpg
|
340_evjvqa
|
[{"from": "human", "value": "how many people are going into these shops?\n<image>"}, {"from": "gpt", "value": "zero"}]
|
00000000340.jpg
|
340_evjvqa
|
[{"from": "human", "value": "what status are these shops in?\n<image>"}, {"from": "gpt", "value": "closure"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.