image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000005198.jpg
5198_evjvqa
[{"from": "human", "value": "what is the woman wearing white and black striped shirt doing?\n<image>"}, {"from": "gpt", "value": "shopping for vegetable"}]
00000005198.jpg
5198_evjvqa
[{"from": "human", "value": "how many youths are wearing white t-shirt?\n<image>"}, {"from": "gpt", "value": "there is one youth wearing white t-shirt"}]
00000003800.jpg
3800_evjvqa
[{"from": "human", "value": "what is the man wearing red t-shirt holding on his hand?\n<image>"}, {"from": "gpt", "value": "he is holding a phone"}]
00000003800.jpg
3800_evjvqa
[{"from": "human", "value": "what is the man holding the phone for?\n<image>"}, {"from": "gpt", "value": "he is holding the phone to take the photo of the baby"}]
00000003800.jpg
3800_evjvqa
[{"from": "human", "value": "how many people are there in front of the man wearing red t-shirt?\n<image>"}, {"from": "gpt", "value": "there are two people in front of the man wearing red t-shirt"}]
00000003801.jpg
3801_evjvqa
[{"from": "human", "value": "what is the time of the day in the picture?\n<image>"}, {"from": "gpt", "value": "it is the evening"}]
00000003801.jpg
3801_evjvqa
[{"from": "human", "value": "how does the walking avenue look like?\n<image>"}, {"from": "gpt", "value": "the walking avenue is very crowded with pedestrians"}]
00000003801.jpg
3801_evjvqa
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this is a china town"}]
00000003802.jpg
3802_evjvqa
[{"from": "human", "value": "what is the man wearing blue t-shirt doing?\n<image>"}, {"from": "gpt", "value": "he is introduce the products to the woman"}]
00000003802.jpg
3802_evjvqa
[{"from": "human", "value": "how many colors are there on the dress of the staff?\n<image>"}, {"from": "gpt", "value": "the dress has two colors"}]
00000003802.jpg
3802_evjvqa
[{"from": "human", "value": "how many men are there on the right side of the woman wearing flowery shirt?\n<image>"}, {"from": "gpt", "value": "there is one man standing on the right side of the woman wearing flowery shirt"}]
00000003803.jpg
3803_evjvqa
[{"from": "human", "value": "which side of the boy wearing yellow t-shirt are the girl walking on?\n<image>"}, {"from": "gpt", "value": "the girl is walking on the right side of the boy wearing yellow t-shirt"}]
00000003803.jpg
3803_evjvqa
[{"from": "human", "value": "how many boys wearing white t-shirt are there behind the girl?\n<image>"}, {"from": "gpt", "value": "there are two men wearing white t-shirt moving behind the girl"}]
00000003803.jpg
3803_evjvqa
[{"from": "human", "value": "what color is the shirt of the by walking next to the girl?\n<image>"}, {"from": "gpt", "value": "the boy walking next to the girl wears a yellow t-shirt"}]
00000003804.jpg
3804_evjvqa
[{"from": "human", "value": "how many guards are there in front of the market?\n<image>"}, {"from": "gpt", "value": "there is one guard in front of the market"}]
00000003804.jpg
3804_evjvqa
[{"from": "human", "value": "what color is the shirt of the girl walking towards the market?\n<image>"}, {"from": "gpt", "value": "she wears a yellow shirt"}]
00000003805.jpg
3805_evjvqa
[{"from": "human", "value": "what animal's statues are placed in front of the historical destination?\n<image>"}, {"from": "gpt", "value": "there are two statues of lion in front on the historical destintion"}]
00000003806.jpg
3806_evjvqa
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this is a food aisle in the supermarket"}]
00000003806.jpg
3806_evjvqa
[{"from": "human", "value": "how many people are standing in front of the cabbage aisle?\n<image>"}, {"from": "gpt", "value": "there is a woman standing in frony of the cabbage aisle"}]
00000003807.jpg
3807_evjvqa
[{"from": "human", "value": "how many cars are there in front of the bookstore?\n<image>"}, {"from": "gpt", "value": "there is a car parking in front of the bookstore"}]
00000003809.jpg
3809_evjvqa
[{"from": "human", "value": "how was the traffic at this area?\n<image>"}, {"from": "gpt", "value": "it is very crowded"}]
00000003813.jpg
3813_evjvqa
[{"from": "human", "value": "where are the boys standing at?\n<image>"}, {"from": "gpt", "value": "they are standing in front of the beverage aisle"}]
00000003813.jpg
3813_evjvqa
[{"from": "human", "value": "what are the staff doing?\n<image>"}, {"from": "gpt", "value": "she is introducing the product to the boys"}]
00000003813.jpg
3813_evjvqa
[{"from": "human", "value": "what color is the shirt of the boy wearing bag?\n<image>"}, {"from": "gpt", "value": "the boy wearing bag wears a red t-shirt"}]
00000003814.jpg
3814_evjvqa
[{"from": "human", "value": "what is the staff of this store doing?\n<image>"}, {"from": "gpt", "value": "he is using the laptop"}]
00000003814.jpg
3814_evjvqa
[{"from": "human", "value": "what is the color of the staff's uniform?\n<image>"}, {"from": "gpt", "value": "the staff's uniform is blue"}]
00000003814.jpg
3814_evjvqa
[{"from": "human", "value": "how many chairs in front of the table where the staff is sitting?\n<image>"}, {"from": "gpt", "value": "there are three chairs in front of the table"}]
00000003817.jpg
3817_evjvqa
[{"from": "human", "value": "where is the woman standing at?\n<image>"}, {"from": "gpt", "value": " the woman is standing next to an aisle"}]
00000003817.jpg
3817_evjvqa
[{"from": "human", "value": "what color is the shirt of the woman?\n<image>"}, {"from": "gpt", "value": "the woman wears a red shirt"}]
00000003817.jpg
3817_evjvqa
[{"from": "human", "value": "how many trollys are there in front of the woman?\n<image>"}, {"from": "gpt", "value": "there is one trolley in front of the woman"}]
00000003821.jpg
3821_evjvqa
[{"from": "human", "value": "how many breads are there on the dish?\n<image>"}, {"from": "gpt", "value": "there are two breads on the dish"}]
00000003822.jpg
3822_evjvqa
[{"from": "human", "value": "how many traveling cars are there on the road?\n<image>"}, {"from": "gpt", "value": "there are two traveling cars on the road"}]
00000003823.jpg
3823_evjvqa
[{"from": "human", "value": "where are those people at?\n<image>"}, {"from": "gpt", "value": "they are in a restaurant"}]
00000003823.jpg
3823_evjvqa
[{"from": "human", "value": "how mnay girls are there in front of the man wearing black shirt?\n<image>"}, {"from": "gpt", "value": "there are two girls sitting in front of the man wearing black shirt"}]
00000003823.jpg
3823_evjvqa
[{"from": "human", "value": "what is the roof of the restaurant decorated with?\n<image>"}, {"from": "gpt", "value": "it is decorated with the lights"}]
00000003824.jpg
3824_evjvqa
[{"from": "human", "value": "what is the color of the jacket of the girl who is carrying a handbag?\n<image>"}, {"from": "gpt", "value": "the girl wears a yellow jacket"}]
00000003824.jpg
3824_evjvqa
[{"from": "human", "value": "how many people are ther wearing red ao dai?\n<image>"}, {"from": "gpt", "value": "there are two people wearing red ao dai"}]
00000003824.jpg
3824_evjvqa
[{"from": "human", "value": "what is the girl wearing white shirt with black stripe doing?\n<image>"}, {"from": "gpt", "value": "she is taking phototgraph"}]
00000003825.jpg
3825_evjvqa
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this is a market next to the sea"}]
00000003825.jpg
3825_evjvqa
[{"from": "human", "value": "how was the market?\n<image>"}, {"from": "gpt", "value": "the market is very busy"}]
00000003825.jpg
3825_evjvqa
[{"from": "human", "value": "what products are mainly sold in this market?\n<image>"}, {"from": "gpt", "value": "this market mainly sell seafood"}]
00000003826.jpg
3826_evjvqa
[{"from": "human", "value": "which hand are the girl raising up?\n<image>"}, {"from": "gpt", "value": "she is raising up the left hand"}]
00000003826.jpg
3826_evjvqa
[{"from": "human", "value": "what color is the bag of the girl?\n<image>"}, {"from": "gpt", "value": "her bag is black"}]
00000003826.jpg
3826_evjvqa
[{"from": "human", "value": "how many people are taking pictures near the river?\n<image>"}, {"from": "gpt", "value": "there are two people taking pictures next to the river"}]
00000003828.jpg
3828_evjvqa
[{"from": "human", "value": "what color is the car?\n<image>"}, {"from": "gpt", "value": "the car is blue"}]
00000003828.jpg
3828_evjvqa
[{"from": "human", "value": "where is this car parked?\n<image>"}, {"from": "gpt", "value": "this car is parked next to a river"}]
00000003829.jpg
3829_evjvqa
[{"from": "human", "value": "where are everyone sitting at?\n<image>"}, {"from": "gpt", "value": "they are sitting at a coffee store"}]
00000003829.jpg
3829_evjvqa
[{"from": "human", "value": "how many motorbikes are there next to the man wearing black t-shirt?\n<image>"}, {"from": "gpt", "value": "there is a car next to the man"}]
00000003829.jpg
3829_evjvqa
[{"from": "human", "value": "what color is the shirt of the man sitting next to the motorbike?\n<image>"}, {"from": "gpt", "value": "he wears a black shirt"}]
00000003831.jpg
3831_evjvqa
[{"from": "human", "value": "what is the main ingredient in this food?\n<image>"}, {"from": "gpt", "value": "the main ingredients are fruits"}]
00000003832.jpg
3832_evjvqa
[{"from": "human", "value": "where are those people traveling at?\n<image>"}, {"from": "gpt", "value": "they are playing in an amusement park"}]
00000003832.jpg
3832_evjvqa
[{"from": "human", "value": "how many people wearing yellow t-shirt standing in the back ?\n<image>"}, {"from": "gpt", "value": "there are two people wearing yellow shirt in the back"}]
00000003832.jpg
3832_evjvqa
[{"from": "human", "value": "what color is the shirt of the left-most boy who is sitting?\n<image>"}, {"from": "gpt", "value": "the boy wears a black shirt"}]
00000003834.jpg
3834_evjvqa
[{"from": "human", "value": "what is this region?\n<image>"}, {"from": "gpt", "value": "this is the place to exhibit the scientific products"}]
00000003835.jpg
3835_evjvqa
[{"from": "human", "value": "how many girls are wearing black t-shirt?\n<image>"}, {"from": "gpt", "value": "there is one girl wearing black t-shirt"}]
00000003835.jpg
3835_evjvqa
[{"from": "human", "value": "what side of the two girls are the girl in black t-shirt standing at?\n<image>"}, {"from": "gpt", "value": "she is standing on the left side of the two girls"}]
00000003835.jpg
3835_evjvqa
[{"from": "human", "value": "what color is the basket in front of the aisle?\n<image>"}, {"from": "gpt", "value": "the basket is red"}]
00000003838.jpg
3838_evjvqa
[{"from": "human", "value": "what is this region?\n<image>"}, {"from": "gpt", "value": "this region is at the seasoning aisle in a supermarket"}]
00000003838.jpg
3838_evjvqa
[{"from": "human", "value": "what hand is the woman using to get the food high on the aisle?\n<image>"}, {"from": "gpt", "value": "she is using left hand to reach higher"}]
00000003839.jpg
3839_evjvqa
[{"from": "human", "value": "what hand does the man use to hold the phone while riding motorbike?\n<image>"}, {"from": "gpt", "value": "the man uses his left hand to handle the phone"}]
00000003840.jpg
3840_evjvqa
[{"from": "human", "value": "what is the vegetables aisle owner doing?\n<image>"}, {"from": "gpt", "value": "she is packaging the vegetables for the customers"}]
00000003840.jpg
3840_evjvqa
[{"from": "human", "value": "how many customers are buying at the vegetables aisle?\n<image>"}, {"from": "gpt", "value": "there are three people buying at the vegetables aisle"}]
00000003840.jpg
3840_evjvqa
[{"from": "human", "value": "what color is the shirt of the kid sitting in frony of the aisle?\n<image>"}, {"from": "gpt", "value": "he is wearing a red shirt"}]
00000003841.jpg
3841_evjvqa
[{"from": "human", "value": "what is the girl doing?\n<image>"}, {"from": "gpt", "value": "she is getting some toys from the stall"}]
00000003841.jpg
3841_evjvqa
[{"from": "human", "value": "what is the girl getting at the stall?\n<image>"}, {"from": "gpt", "value": "she is getting toys"}]
00000003841.jpg
3841_evjvqa
[{"from": "human", "value": "what is the region the girl is standing at?\n<image>"}, {"from": "gpt", "value": "the girl is standing at the toy aisle in the supermarket"}]
00000003842.jpg
3842_evjvqa
[{"from": "human", "value": "how many girls are there in the group of pedestrians?\n<image>"}, {"from": "gpt", "value": "there are two girls in the group of pedestrians"}]
00000003842.jpg
3842_evjvqa
[{"from": "human", "value": "how many cars are there moving on the road?\n<image>"}, {"from": "gpt", "value": "there are three cars moving on the road"}]
00000003842.jpg
3842_evjvqa
[{"from": "human", "value": "what hand does the boy carrying the bag use to handle the phone?\n<image>"}, {"from": "gpt", "value": "he is handling the phone by his right hand"}]
00000003843.jpg
3843_evjvqa
[{"from": "human", "value": "how many employees are there standing in the nail store?\n<image>"}, {"from": "gpt", "value": "there are two employees in the nail store"}]
00000003843.jpg
3843_evjvqa
[{"from": "human", "value": "what color is the car opposite to the store's glasses?\n<image>"}, {"from": "gpt", "value": "the car is white"}]
00000003844.jpg
3844_evjvqa
[{"from": "human", "value": "what color is the shirt of the girl standing for photograph?\n<image>"}, {"from": "gpt", "value": "the girl who is taking pictures wears a white shirt"}]
00000003849.jpg
3849_evjvqa
[{"from": "human", "value": "how many people are wearing blue shirt?\n<image>"}, {"from": "gpt", "value": "there are three people wearing blue shirt"}]
00000003849.jpg
3849_evjvqa
[{"from": "human", "value": "where are the kids standing at?\n<image>"}, {"from": "gpt", "value": "the kids are standing at the road leading from house to the road"}]
00000003849.jpg
3849_evjvqa
[{"from": "human", "value": "how many people are walking with the bicycles?\n<image>"}, {"from": "gpt", "value": "there is one person walking with the bicycle"}]
00000003850.jpg
3850_evjvqa
[{"from": "human", "value": "what is the food owner doing?\n<image>"}, {"from": "gpt", "value": "the shopw owner is hiving the food to the shipper"}]
00000003850.jpg
3850_evjvqa
[{"from": "human", "value": "what hand does the man wearing red jacket use to handle the phone?\n<image>"}, {"from": "gpt", "value": "he is handling the phone on his left hand"}]
00000003850.jpg
3850_evjvqa
[{"from": "human", "value": "how many people wearing red shirt are there?\n<image>"}, {"from": "gpt", "value": "there are two people wearing red shirt"}]
00000003852.jpg
3852_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is an ancient town along the river"}]
00000003852.jpg
3852_evjvqa
[{"from": "human", "value": "what is the time in the day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000003852.jpg
3852_evjvqa
[{"from": "human", "value": "how many peoples are sitting on the boat?\n<image>"}, {"from": "gpt", "value": "there is one person sitting on the boat"}]
00000003854.jpg
3854_evjvqa
[{"from": "human", "value": "how many people are riding on the field?\n<image>"}, {"from": "gpt", "value": "there are four people riding on the road"}]
00000003854.jpg
3854_evjvqa
[{"from": "human", "value": "what is the last people doing?\n<image>"}, {"from": "gpt", "value": "carrying a pedlar"}]
00000003854.jpg
3854_evjvqa
[{"from": "human", "value": "how many trees are there on the field?\n<image>"}, {"from": "gpt", "value": "there are three trees on the field"}]
00000003855.jpg
3855_evjvqa
[{"from": "human", "value": "how many people are standing near the welcome gate?\n<image>"}, {"from": "gpt", "value": "there are two people standing near the welcome gate"}]
00000003855.jpg
3855_evjvqa
[{"from": "human", "value": "what color are the flowwers decorated around the welcome gate?\n<image>"}, {"from": "gpt", "value": "those flowers are yellow"}]
00000003857.jpg
3857_evjvqa
[{"from": "human", "value": "what side of the girl is the man standing?\n<image>"}, {"from": "gpt", "value": "the man is standing on the left side of the girl"}]
00000003857.jpg
3857_evjvqa
[{"from": "human", "value": "what stores does the girl in white t-shirt pass? \n<image>"}, {"from": "gpt", "value": "she passes the books and souvenir store"}]
00000003865.jpg
3865_evjvqa
[{"from": "human", "value": "how was the view of this place?\n<image>"}, {"from": "gpt", "value": "this place looks crowded"}]
00000003869.jpg
3869_evjvqa
[{"from": "human", "value": "what is the time in the day of this scene?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000003869.jpg
3869_evjvqa
[{"from": "human", "value": "how many staffs are standing in the store?\n<image>"}, {"from": "gpt", "value": "there is one staff standing in the store"}]
00000003871.jpg
3871_evjvqa
[{"from": "human", "value": "what color is the house in the picture?\n<image>"}, {"from": "gpt", "value": "the houses in the picture are yellow"}]
00000003871.jpg
3871_evjvqa
[{"from": "human", "value": "how mnay motorbikes are their parking next to the picture?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes parking next to th e picture"}]
00000003871.jpg
3871_evjvqa
[{"from": "human", "value": "how many people are walking on the street in the picture?\n<image>"}, {"from": "gpt", "value": "there are two people walking on the street in the picture"}]
00000003880.jpg
3880_evjvqa
[{"from": "human", "value": "what is this region?\n<image>"}, {"from": "gpt", "value": "this region is a town next to the river"}]
00000003880.jpg
3880_evjvqa
[{"from": "human", "value": "what is the time in day of this picture?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000003880.jpg
3880_evjvqa
[{"from": "human", "value": "what colors are the lanterns in those avenue?\n<image>"}, {"from": "gpt", "value": "the lanterns are yellow"}]
00000003884.jpg
3884_evjvqa
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "this area is a market"}]
00000003884.jpg
3884_evjvqa
[{"from": "human", "value": "what is the time in a day is this place?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000003884.jpg
3884_evjvqa
[{"from": "human", "value": "how was the customer conditions of the market?\n<image>"}, {"from": "gpt", "value": "the market is quite quiet"}]