image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000003886.jpg
3886_evjvqa
[{"from": "human", "value": "how many people are standing in front of the clothes store?\n<image>"}, {"from": "gpt", "value": "there is one person standing in front of the clothes store"}]
00000003886.jpg
3886_evjvqa
[{"from": "human", "value": "what is the time in the day of this place?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000003889.jpg
3889_evjvqa
[{"from": "human", "value": "how many cars are moving on the road?\n<image>"}, {"from": "gpt", "value": "there is one car moving on the road"}]
00000003889.jpg
3889_evjvqa
[{"from": "human", "value": "what color is the car on the street?\n<image>"}, {"from": "gpt", "value": "the car on the street is black"}]
00000003889.jpg
3889_evjvqa
[{"from": "human", "value": "what destination is the car passing through?\n<image>"}, {"from": "gpt", "value": "the car is passing a school"}]
00000003890.jpg
3890_evjvqa
[{"from": "human", "value": "how many people are sitting in front of the decoration store?\n<image>"}, {"from": "gpt", "value": "there is one person sitting in front of the decoration store"}]
00000003891.jpg
3891_evjvqa
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "she is observing a product in the supermarket"}]
00000003891.jpg
3891_evjvqa
[{"from": "human", "value": "what color is the shirt of the woman?\n<image>"}, {"from": "gpt", "value": "the woman wears a grey shirt"}]
00000003891.jpg
3891_evjvqa
[{"from": "human", "value": "what color is the water inside the bottle which the woman is holding?\n<image>"}, {"from": "gpt", "value": "the water is blue"}]
00000003892.jpg
3892_evjvqa
[{"from": "human", "value": "what is the main color of this house?\n<image>"}, {"from": "gpt", "value": "the main color of this house is white"}]
00000003892.jpg
3892_evjvqa
[{"from": "human", "value": "how many pots of decorative plants on the table in front of the sofa?\n<image>"}, {"from": "gpt", "value": "there is one pot of decorative plant on the table"}]
00000003892.jpg
3892_evjvqa
[{"from": "human", "value": "what direction is the fan heading to?\n<image>"}, {"from": "gpt", "value": "the fan is heading to the sofa"}]
00000003893.jpg
3893_evjvqa
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this is a supermarket"}]
00000003893.jpg
3893_evjvqa
[{"from": "human", "value": "how was the scene of the supermarket?\n<image>"}, {"from": "gpt", "value": "it is very crowded"}]
00000003894.jpg
3894_evjvqa
[{"from": "human", "value": "what color is the shirt of the girl?\n<image>"}, {"from": "gpt", "value": "the girl wears yellow shirt"}]
00000003894.jpg
3894_evjvqa
[{"from": "human", "value": "what is she handling on her right hand?\n<image>"}, {"from": "gpt", "value": "she is holding a camera"}]
00000003894.jpg
3894_evjvqa
[{"from": "human", "value": "what area is she standing?\n<image>"}, {"from": "gpt", "value": "she is standing near the traffic lights"}]
00000000800.jpg
800_evjvqa
[{"from": "human", "value": "what fruit is next to the woman wearing red t-shirt?\n<image>"}, {"from": "gpt", "value": "coconut"}]
00000000800.jpg
800_evjvqa
[{"from": "human", "value": "what is the man wearing brown t-shirt doing at the left aisle?\n<image>"}, {"from": "gpt", "value": "drinking coconut"}]
00000000801.jpg
801_evjvqa
[{"from": "human", "value": "how many sugarcane mills are there in front of the store?\n<image>"}, {"from": "gpt", "value": "four mills"}]
00000000802.jpg
802_evjvqa
[{"from": "human", "value": "where is this stone put?\n<image>"}, {"from": "gpt", "value": "in front of an old house"}]
00000000803.jpg
803_evjvqa
[{"from": "human", "value": "where are the two boys wearing green jacket sitting?\n<image>"}, {"from": "gpt", "value": "to the right, in front of the woman wearing red jacket"}]
00000000804.jpg
804_evjvqa
[{"from": "human", "value": "how many people are there on the stage?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000805.jpg
805_evjvqa
[{"from": "human", "value": "how many motorcycles are coming from the left?\n<image>"}, {"from": "gpt", "value": "there are two motorcycles coming from the left"}]
00000000809.jpg
809_evjvqa
[{"from": "human", "value": "how many cars are presented?\n<image>"}, {"from": "gpt", "value": "six cars"}]
00000000809.jpg
809_evjvqa
[{"from": "human", "value": "how many orange care are presented?\n<image>"}, {"from": "gpt", "value": "one car"}]
00000000815.jpg
815_evjvqa
[{"from": "human", "value": "what does this area exhibit?\n<image>"}, {"from": "gpt", "value": "the aircraft models"}]
00000000815.jpg
815_evjvqa
[{"from": "human", "value": "what is this room exhibiting?\n<image>"}, {"from": "gpt", "value": "the aircraft models"}]
00000000816.jpg
816_evjvqa
[{"from": "human", "value": "what color is the wall of the house at the corner painted?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000000817.jpg
817_evjvqa
[{"from": "human", "value": "what is this store selling?\n<image>"}, {"from": "gpt", "value": "mainly sold seafood"}]
00000000823.jpg
823_evjvqa
[{"from": "human", "value": "where are these two people standing?\n<image>"}, {"from": "gpt", "value": "by the side of a river"}]
00000000825.jpg
825_evjvqa
[{"from": "human", "value": "where is this scene at?\n<image>"}, {"from": "gpt", "value": "in a historical destination"}]
00000000826.jpg
826_evjvqa
[{"from": "human", "value": "where is the girl wearing glasses standing at?\n<image>"}, {"from": "gpt", "value": "on a bus"}]
00000000826.jpg
826_evjvqa
[{"from": "human", "value": "how many people wearing black jacket on the bus?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000826.jpg
826_evjvqa
[{"from": "human", "value": "where is the girl in black jacket sitting?\n<image>"}, {"from": "gpt", "value": "on a bus"}]
00000000828.jpg
828_evjvqa
[{"from": "human", "value": "how many paintings of human are there on the wall?\n<image>"}, {"from": "gpt", "value": "two pictures"}]
00000000828.jpg
828_evjvqa
[{"from": "human", "value": "how many people are eating at the inner room?\n<image>"}, {"from": "gpt", "value": "three people"}]
00000000828.jpg
828_evjvqa
[{"from": "human", "value": "how many table are not used at the inner room?\n<image>"}, {"from": "gpt", "value": "two tables"}]
00000000829.jpg
829_evjvqa
[{"from": "human", "value": "how many fans are opened?\n<image>"}, {"from": "gpt", "value": "one fan"}]
00000000835.jpg
835_evjvqa
[{"from": "human", "value": "how many people are walking on the railway?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000835.jpg
835_evjvqa
[{"from": "human", "value": "what color is the man's shirt?\n<image>"}, {"from": "gpt", "value": "white"}]
00000000835.jpg
835_evjvqa
[{"from": "human", "value": "how many people are walking next to the railway?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000835.jpg
835_evjvqa
[{"from": "human", "value": "how many motorcycles are placed near the railway?\n<image>"}, {"from": "gpt", "value": "one motorcycle"}]
00000000836.jpg
836_evjvqa
[{"from": "human", "value": "which aisle are the woman in black t-shirt standing at?\n<image>"}, {"from": "gpt", "value": "meat aisle"}]
00000000836.jpg
836_evjvqa
[{"from": "human", "value": "what are the woman in black t-shirt doing in front of the meat aisle?\n<image>"}, {"from": "gpt", "value": "choosing meat"}]
00000000836.jpg
836_evjvqa
[{"from": "human", "value": "what are the woman in black t-shirt doing behind the meat aisle?\n<image>"}, {"from": "gpt", "value": "selling meat"}]
00000000837.jpg
837_evjvqa
[{"from": "human", "value": "where are the man and the woman holding hand at?\n<image>"}, {"from": "gpt", "value": "at an avenue"}]
00000000837.jpg
837_evjvqa
[{"from": "human", "value": "who is the man holding hand with?\n<image>"}, {"from": "gpt", "value": "the woman walking next to him"}]
00000000837.jpg
837_evjvqa
[{"from": "human", "value": "who is the woman holding hand with?\n<image>"}, {"from": "gpt", "value": "the man walking next to him"}]
00000000837.jpg
837_evjvqa
[{"from": "human", "value": "what are the man and the woman doing?\n<image>"}, {"from": "gpt", "value": "holding hands and walking"}]
00000000840.jpg
840_evjvqa
[{"from": "human", "value": "where are the belt aisle?\n<image>"}, {"from": "gpt", "value": "next to the cookies aisle"}]
00000000840.jpg
840_evjvqa
[{"from": "human", "value": "where is the cookies aisle?\n<image>"}, {"from": "gpt", "value": "next to the belt aisle"}]
00000000840.jpg
840_evjvqa
[{"from": "human", "value": "how many people are buying at the store?\n<image>"}, {"from": "gpt", "value": "nobody is buying"}]
00000000841.jpg
841_evjvqa
[{"from": "human", "value": "what aisle are those people at?\n<image>"}, {"from": "gpt", "value": "flower aisle"}]
00000000841.jpg
841_evjvqa
[{"from": "human", "value": "what is the man in black t-shirt who does not wear helmet putting behind his motorcycle?\n<image>"}, {"from": "gpt", "value": "bunches of flowers"}]
00000000841.jpg
841_evjvqa
[{"from": "human", "value": "what is thw girl in black shirt with red striped doing?\n<image>"}, {"from": "gpt", "value": "buying flowers"}]
00000000842.jpg
842_evjvqa
[{"from": "human", "value": "where are the fish sauce selling at?\n<image>"}, {"from": "gpt", "value": "at the stores"}]
00000000842.jpg
842_evjvqa
[{"from": "human", "value": "what is exhibited on the aisles?\n<image>"}, {"from": "gpt", "value": "soy sauce, fish sauce"}]
00000000842.jpg
842_evjvqa
[{"from": "human", "value": "what is selling under the fish sauce aisle?\n<image>"}, {"from": "gpt", "value": "coffee"}]
00000000844.jpg
844_evjvqa
[{"from": "human", "value": "what is this area exhibited with?\n<image>"}, {"from": "gpt", "value": "exhibiting the historical statues"}]
00000000844.jpg
844_evjvqa
[{"from": "human", "value": "where is this picture taken?\n<image>"}, {"from": "gpt", "value": "in the exhibition of a museum"}]
00000000844.jpg
844_evjvqa
[{"from": "human", "value": "what is this room mainly exhibited with?\n<image>"}, {"from": "gpt", "value": "historical statues"}]
00000000845.jpg
845_evjvqa
[{"from": "human", "value": "where is the shop that the girl is sitting at?\n<image>"}, {"from": "gpt", "value": "next to the railway"}]
00000000845.jpg
845_evjvqa
[{"from": "human", "value": "where is the girl sitting at?\n<image>"}, {"from": "gpt", "value": "at a coffee store"}]
00000000845.jpg
845_evjvqa
[{"from": "human", "value": "how many lanterns are used to decorate this place?\n<image>"}, {"from": "gpt", "value": "two lanterns"}]
00000000849.jpg
849_evjvqa
[{"from": "human", "value": "how many motorbikes are there at the market's entrance?\n<image>"}, {"from": "gpt", "value": "three motorbikes"}]
00000000849.jpg
849_evjvqa
[{"from": "human", "value": "how many people are standing at the market's entrance?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000000853.jpg
853_evjvqa
[{"from": "human", "value": "where is this picture taken?\n<image>"}, {"from": "gpt", "value": "in a park"}]
00000000864.jpg
864_evjvqa
[{"from": "human", "value": "how many christmas trees are decorating in front of the building?\n<image>"}, {"from": "gpt", "value": "two christmas trees"}]
00000000864.jpg
864_evjvqa
[{"from": "human", "value": "what ocassion are people going out on?\n<image>"}, {"from": "gpt", "value": "christmas"}]
00000000865.jpg
865_evjvqa
[{"from": "human", "value": "where are the three women choosing food at?\n<image>"}, {"from": "gpt", "value": "at the supermarket"}]
00000000865.jpg
865_evjvqa
[{"from": "human", "value": "what are the three women doing?\n<image>"}, {"from": "gpt", "value": "choosing meat"}]
00000000865.jpg
865_evjvqa
[{"from": "human", "value": "what is the girl next to the woman in white shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing meat"}]
00000000865.jpg
865_evjvqa
[{"from": "human", "value": "where are the three women choosing food at?\n<image>"}, {"from": "gpt", "value": "at the meat aisle"}]
00000000865.jpg
865_evjvqa
[{"from": "human", "value": "what is sold above the fresh meat aisle?\n<image>"}, {"from": "gpt", "value": "chilli sauce"}]
00000000875.jpg
875_evjvqa
[{"from": "human", "value": "how many people are using the cyclo?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000875.jpg
875_evjvqa
[{"from": "human", "value": "what color is the woman walking next to the cyclo's shirt?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000883.jpg
883_evjvqa
[{"from": "human", "value": "what are the two people in blue uniforms doing?\n<image>"}, {"from": "gpt", "value": "curing for a patient"}]
00000000883.jpg
883_evjvqa
[{"from": "human", "value": "who are the people in blue uniforms standing next to the patient's bed?\n<image>"}, {"from": "gpt", "value": "doctors"}]
00000000883.jpg
883_evjvqa
[{"from": "human", "value": "how many doctors are curing for the patience?\n<image>"}, {"from": "gpt", "value": "two doctors are curing for the patient"}]
00000000890.jpg
890_evjvqa
[{"from": "human", "value": "what costumes are these two girls wearing?\n<image>"}, {"from": "gpt", "value": "they are both wearing white ao dai"}]
00000000890.jpg
890_evjvqa
[{"from": "human", "value": "where are these two girls sitting at?\n<image>"}, {"from": "gpt", "value": "next to the river, under the flamboyant"}]
00000000890.jpg
890_evjvqa
[{"from": "human", "value": "what is put in the girls' conical hats?\n<image>"}, {"from": "gpt", "value": "flamboyant flowers"}]
00000000896.jpg
896_evjvqa
[{"from": "human", "value": "how many kids are going with the woman in red jacket?\n<image>"}, {"from": "gpt", "value": "two kids"}]
00000000896.jpg
896_evjvqa
[{"from": "human", "value": "how many people are using the trolleys?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000896.jpg
896_evjvqa
[{"from": "human", "value": "where are these aisle locating at?\n<image>"}, {"from": "gpt", "value": "supermarket"}]
00000000899.jpg
899_evjvqa
[{"from": "human", "value": "what vehicles are used by the women?\n<image>"}, {"from": "gpt", "value": "they are moving by bike"}]
00000000899.jpg
899_evjvqa
[{"from": "human", "value": "where are these women biking at?\n<image>"}, {"from": "gpt", "value": "on the road at an countryside"}]
00000000899.jpg
899_evjvqa
[{"from": "human", "value": "how many people are moving by bikes?\n<image>"}, {"from": "gpt", "value": "four people are moving by bikes"}]
00000002100.jpg
2100_evjvqa
[{"from": "human", "value": "what is the man in blue shirt who is sitting doing?\n<image>"}, {"from": "gpt", "value": "drinking wine"}]
00000002100.jpg
2100_evjvqa
[{"from": "human", "value": "who is drinking with the man in blue shirt?\n<image>"}, {"from": "gpt", "value": "the woman in red t-shirt is drinking with the man"}]
00000002100.jpg
2100_evjvqa
[{"from": "human", "value": "how many people are wearing blue shirts?\n<image>"}, {"from": "gpt", "value": "there are two people wearing blue shirts"}]
00000002104.jpg
2104_evjvqa
[{"from": "human", "value": "what is the aisle on the right side of the woman wearing yellow dress selling?\n<image>"}, {"from": "gpt", "value": "that aisle sells instant noodles"}]
00000002104.jpg
2104_evjvqa
[{"from": "human", "value": "what cubic-shaped box is put in the trolley?\n<image>"}, {"from": "gpt", "value": "a box of instant noodles"}]
00000002104.jpg
2104_evjvqa
[{"from": "human", "value": "what is the girl touching her hair wearing?\n<image>"}, {"from": "gpt", "value": "that woman wears a yellow and black dress"}]
00000002107.jpg
2107_evjvqa
[{"from": "human", "value": "what is she wearing on her face?\n<image>"}, {"from": "gpt", "value": "she is wearing sunglasses"}]
00000002108.jpg
2108_evjvqa
[{"from": "human", "value": "what is the women wearing white mask doing in the supermarket?\n<image>"}, {"from": "gpt", "value": "she is choosing pork"}]
00000002108.jpg
2108_evjvqa
[{"from": "human", "value": "what product is chosen by the woman wearing white mask?\n<image>"}, {"from": "gpt", "value": "pork"}]
00000002108.jpg
2108_evjvqa
[{"from": "human", "value": "what product is sold at this aisle?\n<image>"}, {"from": "gpt", "value": "pork"}]
00000002112.jpg
2112_evjvqa
[{"from": "human", "value": "how many electric power pole are there in front of the store?\n<image>"}, {"from": "gpt", "value": "there are three poles in front of the store"}]