image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000000298.jpg
|
298_evjvqa
|
[{"from": "human", "value": "what structures do people build on the water?\n<image>"}, {"from": "gpt", "value": "the houses"}]
|
00000000299.jpg
|
299_evjvqa
|
[{"from": "human", "value": "how many pillars are there in front of the tower?\n<image>"}, {"from": "gpt", "value": "there are two pillars in front of the tower"}]
|
00000010500.jpg
|
10500_evjvqa
|
[{"from": "human", "value": "where is the green scale?\n<image>"}, {"from": "gpt", "value": "in front of the woman hunching over hher back"}]
|
00000010501.jpg
|
10501_evjvqa
|
[{"from": "human", "value": "what is hanging on the wall in the room?\n<image>"}, {"from": "gpt", "value": "the drawings"}]
|
00000010503.jpg
|
10503_evjvqa
|
[{"from": "human", "value": "what flags do the households here hang on the side of the road?\n<image>"}, {"from": "gpt", "value": "the red flag with yellow star"}]
|
00000010504.jpg
|
10504_evjvqa
|
[{"from": "human", "value": "what is the woman holding?\n<image>"}, {"from": "gpt", "value": "she is holding the dipping sauce bottle"}]
|
00000010509.jpg
|
10509_evjvqa
|
[{"from": "human", "value": "what is the man in white helmet intending to buy?\n<image>"}, {"from": "gpt", "value": "the peach bloosoms"}]
|
00000010510.jpg
|
10510_evjvqa
|
[{"from": "human", "value": "what are these officers doing?\n<image>"}, {"from": "gpt", "value": "they are burning incense"}]
|
00000010511.jpg
|
10511_evjvqa
|
[{"from": "human", "value": "what color are the letters on the market's nameplate designed?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000010516.jpg
|
10516_evjvqa
|
[{"from": "human", "value": "what posture is the girl in the black shirt and white skirt in?\n<image>"}, {"from": "gpt", "value": "she is sitting"}]
|
00000010519.jpg
|
10519_evjvqa
|
[{"from": "human", "value": "what does this woman pour the broth into to bring to the guests?\n<image>"}, {"from": "gpt", "value": "the white plastic bowl"}]
|
00000010521.jpg
|
10521_evjvqa
|
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "she is speaking in front of the press"}]
|
00000010522.jpg
|
10522_evjvqa
|
[{"from": "human", "value": "who is this girl standing with?\n<image>"}, {"from": "gpt", "value": "she is standing alone"}]
|
00000010523.jpg
|
10523_evjvqa
|
[{"from": "human", "value": "who is this man sitting with?\n<image>"}, {"from": "gpt", "value": "he is sitting alone"}]
|
00000010527.jpg
|
10527_evjvqa
|
[{"from": "human", "value": "what does the nameplate of this wedding restaurant look like?\n<image>"}, {"from": "gpt", "value": "it has red and white text, green background"}]
|
00000010528.jpg
|
10528_evjvqa
|
[{"from": "human", "value": "what color bag is the woman in the blue shirt carrying?\n<image>"}, {"from": "gpt", "value": "the rad handbag"}]
|
00000010529.jpg
|
10529_evjvqa
|
[{"from": "human", "value": "what is cylindrical and gray next to the shop's table?\n<image>"}, {"from": "gpt", "value": "the gas cylinder"}]
|
00000010532.jpg
|
10532_evjvqa
|
[{"from": "human", "value": "what is the person in the pink dress doing?\n<image>"}, {"from": "gpt", "value": "singing"}]
|
00000010533.jpg
|
10533_evjvqa
|
[{"from": "human", "value": "what is the bike carrying?\n<image>"}, {"from": "gpt", "value": "the flower pots"}]
|
00000010536.jpg
|
10536_evjvqa
|
[{"from": "human", "value": "what is the woman using to write on the paper?\n<image>"}, {"from": "gpt", "value": "the pen"}]
|
00000010537.jpg
|
10537_evjvqa
|
[{"from": "human", "value": "how many floors does the building on the corner have?\n<image>"}, {"from": "gpt", "value": "just one"}]
|
00000010538.jpg
|
10538_evjvqa
|
[{"from": "human", "value": "where are the two women walking?\n<image>"}, {"from": "gpt", "value": "on the steps"}]
|
00000010539.jpg
|
10539_evjvqa
|
[{"from": "human", "value": "what kind of agricultural products is the woman wrapped in the yellow scarf putting in the bag?\n<image>"}, {"from": "gpt", "value": "tomatoes"}]
|
00000010540.jpg
|
10540_evjvqa
|
[{"from": "human", "value": "what color overcoat is the woman in the red helmet wearing?\n<image>"}, {"from": "gpt", "value": "pink"}]
|
00000010542.jpg
|
10542_evjvqa
|
[{"from": "human", "value": "who is the man in purple with in the market?\n<image>"}, {"from": "gpt", "value": "the woman is wearing the orange jacket"}]
|
00000010543.jpg
|
10543_evjvqa
|
[{"from": "human", "value": "what is the woman in the red shirt kneeling down for?\n<image>"}, {"from": "gpt", "value": "to take a photo for the boy in white t-chirt"}]
|
00000010546.jpg
|
10546_evjvqa
|
[{"from": "human", "value": "what is the woman in red holding?\n<image>"}, {"from": "gpt", "value": "the biscuit box "}]
|
00000010547.jpg
|
10547_evjvqa
|
[{"from": "human", "value": "what is the red cabinet selling?\n<image>"}, {"from": "gpt", "value": "kinds of soft drinks"}]
|
00000010548.jpg
|
10548_evjvqa
|
[{"from": "human", "value": "what is the man in blue doing?\n<image>"}, {"from": "gpt", "value": "he is playing the announcement"}]
|
00000010549.jpg
|
10549_evjvqa
|
[{"from": "human", "value": "who is the woman in the red dress on the right?\n<image>"}, {"from": "gpt", "value": "the man is wearing the gray t-shirt"}]
|
00000010550.jpg
|
10550_evjvqa
|
[{"from": "human", "value": "what are the shirts hanging from the poles?\n<image>"}, {"from": "gpt", "value": "the hooks"}]
|
00000010555.jpg
|
10555_evjvqa
|
[{"from": "human", "value": "what does the sky in this area look like?\n<image>"}, {"from": "gpt", "value": "it has blue sky and white clouds"}]
|
00000010557.jpg
|
10557_evjvqa
|
[{"from": "human", "value": "what flower is put in this linh fish hot pot?\n<image>"}, {"from": "gpt", "value": "the common sesban"}]
|
00000010560.jpg
|
10560_evjvqa
|
[{"from": "human", "value": "what activities are these visitors engaged in?\n<image>"}, {"from": "gpt", "value": "they are visiting the museum"}]
|
00000010561.jpg
|
10561_evjvqa
|
[{"from": "human", "value": "what device is the man standing next to?\n<image>"}, {"from": "gpt", "value": "the man is standing next to microphone"}]
|
00000010563.jpg
|
10563_evjvqa
|
[{"from": "human", "value": "where is the woman in the pink shirt putting her hand?\n<image>"}, {"from": "gpt", "value": "in a supermarket trolley"}]
|
00000010564.jpg
|
10564_evjvqa
|
[{"from": "human", "value": "who is the staff member who is directly serving the woman in the yellow dress?\n<image>"}, {"from": "gpt", "value": "the staff is wearing blue t-shirt"}]
|
00000010565.jpg
|
10565_evjvqa
|
[{"from": "human", "value": "who is the young man in the sweater standing next to?\n<image>"}, {"from": "gpt", "value": "the girl"}]
|
00000010570.jpg
|
10570_evjvqa
|
[{"from": "human", "value": "what color are the balloons on the shelves?\n<image>"}, {"from": "gpt", "value": "red and yellow"}]
|
00000010571.jpg
|
10571_evjvqa
|
[{"from": "human", "value": "what is the woman next to the woman in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "she is counting money"}]
|
00000010572.jpg
|
10572_evjvqa
|
[{"from": "human", "value": "who is riding a motorbike through a pile of garbage?\n<image>"}, {"from": "gpt", "value": "the man is wearing the white shirt"}]
|
00000010573.jpg
|
10573_evjvqa
|
[{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is sunny"}]
|
00000010575.jpg
|
10575_evjvqa
|
[{"from": "human", "value": "what is hanging on ropes strung overhead?\n<image>"}, {"from": "gpt", "value": "the lanterns"}]
|
00000010578.jpg
|
10578_evjvqa
|
[{"from": "human", "value": "what is man in black hat doing?\n<image>"}, {"from": "gpt", "value": "he is packing food"}]
|
00000010580.jpg
|
10580_evjvqa
|
[{"from": "human", "value": "which side of the road is the car parked in?\n<image>"}, {"from": "gpt", "value": "the right side"}]
|
00000010581.jpg
|
10581_evjvqa
|
[{"from": "human", "value": "who is standing on the bike?\n<image>"}, {"from": "gpt", "value": "the woman is in black"}]
|
00000010582.jpg
|
10582_evjvqa
|
[{"from": "human", "value": "what is the woman in pink watching?\n<image>"}, {"from": "gpt", "value": "she looks at the artifacts displayed in the glass case"}]
|
00000010583.jpg
|
10583_evjvqa
|
[{"from": "human", "value": "how many floors does this shelf have?\n<image>"}, {"from": "gpt", "value": "five"}]
|
00000010587.jpg
|
10587_evjvqa
|
[{"from": "human", "value": "what kind of agricultural products is the woman in the pink coat standing next to?\n<image>"}, {"from": "gpt", "value": "the cabbage"}]
|
00000010588.jpg
|
10588_evjvqa
|
[{"from": "human", "value": "what is the woman watching?\n<image>"}, {"from": "gpt", "value": "this woman is looking at a milk carton in her hand"}]
|
00000010589.jpg
|
10589_evjvqa
|
[{"from": "human", "value": "what is placed next to the wall?\n<image>"}, {"from": "gpt", "value": "the shelves"}]
|
00000010595.jpg
|
10595_evjvqa
|
[{"from": "human", "value": "who is the man driving the cyclo carrying?\n<image>"}, {"from": "gpt", "value": "carrying the woman in a striped shirt"}]
|
00000010596.jpg
|
10596_evjvqa
|
[{"from": "human", "value": "what themed items does this stall sell?\n<image>"}, {"from": "gpt", "value": "chirstmas"}]
|
00000010599.jpg
|
10599_evjvqa
|
[{"from": "human", "value": "where is the girl in pink hat standing?\n<image>"}, {"from": "gpt", "value": "next to the fountain"}]
|
00000010600.jpg
|
10600_evjvqa
|
[{"from": "human", "value": "what color roof does this building have?\n<image>"}, {"from": "gpt", "value": "orange"}]
|
00000010601.jpg
|
10601_evjvqa
|
[{"from": "human", "value": "what color pant does the boy wear?\n<image>"}, {"from": "gpt", "value": "black"}]
|
00000010602.jpg
|
10602_evjvqa
|
[{"from": "human", "value": "in which hand is the man in the white coat holding the water bottle?\n<image>"}, {"from": "gpt", "value": "on the left hand"}]
|
00000010603.jpg
|
10603_evjvqa
|
[{"from": "human", "value": "what is the main color of this counter?\n<image>"}, {"from": "gpt", "value": "green"}]
|
00000010604.jpg
|
10604_evjvqa
|
[{"from": "human", "value": "what is this?\n<image>"}, {"from": "gpt", "value": "it is the map of this area "}]
|
00000010605.jpg
|
10605_evjvqa
|
[{"from": "human", "value": "how many motorbikes are there?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000010608.jpg
|
10608_evjvqa
|
[{"from": "human", "value": "what is the woman in ao dai doing?\n<image>"}, {"from": "gpt", "value": "she is giving a presentation about the item on display"}]
|
00000010609.jpg
|
10609_evjvqa
|
[{"from": "human", "value": "where are people going?\n<image>"}, {"from": "gpt", "value": "going shopping"}]
|
00000010610.jpg
|
10610_evjvqa
|
[{"from": "human", "value": "what products are these people looking at?\n<image>"}, {"from": "gpt", "value": "the agricultural products"}]
|
00000010611.jpg
|
10611_evjvqa
|
[{"from": "human", "value": "what do people have to do before entering this store?\n<image>"}, {"from": "gpt", "value": "washing their hands"}]
|
00000010613.jpg
|
10613_evjvqa
|
[{"from": "human", "value": "what color is the chili sauce bottle?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000010614.jpg
|
10614_evjvqa
|
[{"from": "human", "value": "where are the two women on the left going?\n<image>"}, {"from": "gpt", "value": "going to the supermarket"}]
|
00000010617.jpg
|
10617_evjvqa
|
[{"from": "human", "value": "what is the man in the black cap doing?\n<image>"}, {"from": "gpt", "value": "he is taking the temperature"}]
|
00000010619.jpg
|
10619_evjvqa
|
[{"from": "human", "value": "what are people doing?\n<image>"}, {"from": "gpt", "value": "choosing book"}]
|
00000010622.jpg
|
10622_evjvqa
|
[{"from": "human", "value": "what do people put in the glass?\n<image>"}, {"from": "gpt", "value": "the vase"}]
|
00000010625.jpg
|
10625_evjvqa
|
[{"from": "human", "value": "where are these two persons going?\n<image>"}, {"from": "gpt", "value": "they are going to the supermarket"}]
|
00000010626.jpg
|
10626_evjvqa
|
[{"from": "human", "value": "what is the area of this house?\n<image>"}, {"from": "gpt", "value": "the area of this house is 230m2"}]
|
00000010628.jpg
|
10628_evjvqa
|
[{"from": "human", "value": "what color shirt do most tourists wear?\n<image>"}, {"from": "gpt", "value": "orange"}]
|
00000010631.jpg
|
10631_evjvqa
|
[{"from": "human", "value": "what are these two persons doing?\n<image>"}, {"from": "gpt", "value": "participating in a meeting"}]
|
00000010632.jpg
|
10632_evjvqa
|
[{"from": "human", "value": "what color clothes are the two girls wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010633.jpg
|
10633_evjvqa
|
[{"from": "human", "value": "what color mask is the woman in black wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010634.jpg
|
10634_evjvqa
|
[{"from": "human", "value": "what does this stall sell?\n<image>"}, {"from": "gpt", "value": "meat"}]
|
00000010636.jpg
|
10636_evjvqa
|
[{"from": "human", "value": "what color are the motorcycles?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010640.jpg
|
10640_evjvqa
|
[{"from": "human", "value": "which hand is this woman holding the leaf?\n<image>"}, {"from": "gpt", "value": "on the right hand"}]
|
00000010641.jpg
|
10641_evjvqa
|
[{"from": "human", "value": "what color pants does this person wear?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010642.jpg
|
10642_evjvqa
|
[{"from": "human", "value": "what are you choosing to wear a black shirt with a blue mask?\n<image>"}, {"from": "gpt", "value": "choosing vegetable"}]
|
00000010643.jpg
|
10643_evjvqa
|
[{"from": "human", "value": "how many potted plants in this room?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000010645.jpg
|
10645_evjvqa
|
[{"from": "human", "value": "what is the man in white holding?\n<image>"}, {"from": "gpt", "value": "the bag"}]
|
00000010647.jpg
|
10647_evjvqa
|
[{"from": "human", "value": "what is the woman pulling?\n<image>"}, {"from": "gpt", "value": "the trolley"}]
|
00000010648.jpg
|
10648_evjvqa
|
[{"from": "human", "value": "what is the guard doing?\n<image>"}, {"from": "gpt", "value": "taking the temperature of the customer"}]
|
00000010652.jpg
|
10652_evjvqa
|
[{"from": "human", "value": "what color is this ship?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010654.jpg
|
10654_evjvqa
|
[{"from": "human", "value": "what color road markings?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010655.jpg
|
10655_evjvqa
|
[{"from": "human", "value": "whose cars are these outside the shop?\n<image>"}, {"from": "gpt", "value": "being owned by employees and customers"}]
|
00000010656.jpg
|
10656_evjvqa
|
[{"from": "human", "value": "what is this person carrying on his shoulder?\n<image>"}, {"from": "gpt", "value": "the yoke"}]
|
00000010657.jpg
|
10657_evjvqa
|
[{"from": "human", "value": "what is the person doing?\n<image>"}, {"from": "gpt", "value": "this person is taking a photo"}]
|
00000010662.jpg
|
10662_evjvqa
|
[{"from": "human", "value": "how many bulbs are there in each lamp?\n<image>"}, {"from": "gpt", "value": "four"}]
|
00000010663.jpg
|
10663_evjvqa
|
[{"from": "human", "value": "what color is the shopping cart of this supermarket?\n<image>"}, {"from": "gpt", "value": "blue"}]
|
00000010664.jpg
|
10664_evjvqa
|
[{"from": "human", "value": "what color shirt is this person wearing?\n<image>"}, {"from": "gpt", "value": "black"}]
|
00000010666.jpg
|
10666_evjvqa
|
[{"from": "human", "value": "what colors are the performers wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
|
00000010667.jpg
|
10667_evjvqa
|
[{"from": "human", "value": "whhat is the boy doing?\n<image>"}, {"from": "gpt", "value": "the male friend is going to get the vaccine"}]
|
00000010669.jpg
|
10669_evjvqa
|
[{"from": "human", "value": "người dân đang đi đâu?\n<image>"}, {"from": "gpt", "value": "they are going to the market"}]
|
00000010670.jpg
|
10670_evjvqa
|
[{"from": "human", "value": "how many glasses of water did this person buy?\n<image>"}, {"from": "gpt", "value": "two"}]
|
00000010671.jpg
|
10671_evjvqa
|
[{"from": "human", "value": "what does this stall sell?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
|
00000010672.jpg
|
10672_evjvqa
|
[{"from": "human", "value": "what color hair is this woman?\n<image>"}, {"from": "gpt", "value": "she has a blonde hair"}]
|
00000010673.jpg
|
10673_evjvqa
|
[{"from": "human", "value": "what color is the floor of this supermarket?\n<image>"}, {"from": "gpt", "value": "white"}]
|
00000010674.jpg
|
10674_evjvqa
|
[{"from": "human", "value": "what color shirt is the girl wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.