image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003064.jpg
|
3064_evjvqa
|
[{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "standing on the street corner"}]
|
00000003064.jpg
|
3064_evjvqa
|
[{"from": "human", "value": "what is the little girl doing?\n<image>"}, {"from": "gpt", "value": "standing and posing for the man to take a photo"}]
|
00000003064.jpg
|
3064_evjvqa
|
[{"from": "human", "value": "where is the man sitting taking the photo?\n<image>"}, {"from": "gpt", "value": "in the middle of the street"}]
|
00000003065.jpg
|
3065_evjvqa
|
[{"from": "human", "value": "what are the men standing in line to purchase?\n<image>"}, {"from": "gpt", "value": "buy cooking oil"}]
|
00000003065.jpg
|
3065_evjvqa
|
[{"from": "human", "value": "what is the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "preparing cooking oil for customers"}]
|
00000003065.jpg
|
3065_evjvqa
|
[{"from": "human", "value": "what did the man in the black shirt buy just now?\n<image>"}, {"from": "gpt", "value": "just bought a bottle of cooking oil"}]
|
00000003065.jpg
|
3065_evjvqa
|
[{"from": "human", "value": "how many men are there standing in queue to buy cooking oil?\n<image>"}, {"from": "gpt", "value": "there are two men queuing to buy cooking oil"}]
|
00000003066.jpg
|
3066_evjvqa
|
[{"from": "human", "value": "how many cars are moving on the roadway?\n<image>"}, {"from": "gpt", "value": "one car"}]
|
00000003067.jpg
|
3067_evjvqa
|
[{"from": "human", "value": "where is this stream of people going?\n<image>"}, {"from": "gpt", "value": "go up to a historic site"}]
|
00000003069.jpg
|
3069_evjvqa
|
[{"from": "human", "value": "where are these two girls standing?\n<image>"}, {"from": "gpt", "value": "near a santa claus statue"}]
|
00000003069.jpg
|
3069_evjvqa
|
[{"from": "human", "value": "who is the girl in the white t-shirt hanging out with?\n<image>"}, {"from": "gpt", "value": "with a female friend wearing a red striped shirt"}]
|
00000003069.jpg
|
3069_evjvqa
|
[{"from": "human", "value": "what is the girl in the white shirt doing?\n<image>"}, {"from": "gpt", "value": "using the phone"}]
|
00000003070.jpg
|
3070_evjvqa
|
[{"from": "human", "value": "what store is this?\n<image>"}, {"from": "gpt", "value": "fashion shops"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "sitting on a boat"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "where is this group of people going?\n<image>"}, {"from": "gpt", "value": "traveling on the river"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "what is the man in red doing?\n<image>"}, {"from": "gpt", "value": "rowing"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "how many people are on the boat?\n<image>"}, {"from": "gpt", "value": "there are four people on the boat"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "how many people are sitting on the boat?\n<image>"}, {"from": "gpt", "value": "there are three people sitting on the boat"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "what is the hat man sitting on the boat doing?\n<image>"}, {"from": "gpt", "value": "holding the phone to take a picture"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "with whom does the woman in the blue dress travel?\n<image>"}, {"from": "gpt", "value": "with two men in the back"}]
|
00000003071.jpg
|
3071_evjvqa
|
[{"from": "human", "value": "what color is the boatman wearing?\n<image>"}, {"from": "gpt", "value": "red shirt"}]
|
00000003074.jpg
|
3074_evjvqa
|
[{"from": "human", "value": "where is this girl standing?\n<image>"}, {"from": "gpt", "value": "on the side of a long wall"}]
|
00000003076.jpg
|
3076_evjvqa
|
[{"from": "human", "value": "what items does this shelf display?\n<image>"}, {"from": "gpt", "value": "display kinds of snacks"}]
|
00000003077.jpg
|
3077_evjvqa
|
[{"from": "human", "value": "what do people build on the inner side of the road ?\n<image>"}, {"from": "gpt", "value": "the booths"}]
|
00000003077.jpg
|
3077_evjvqa
|
[{"from": "human", "value": "where are these people walking?\n<image>"}, {"from": "gpt", "value": "at a fairground"}]
|
00000003077.jpg
|
3077_evjvqa
|
[{"from": "human", "value": "what is the woman in red doing?\n<image>"}, {"from": "gpt", "value": "standing and watching at a booth"}]
|
00000003078.jpg
|
3078_evjvqa
|
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "around a well"}]
|
00000003078.jpg
|
3078_evjvqa
|
[{"from": "human", "value": "what is this group of people doing?\n<image>"}, {"from": "gpt", "value": "gathered around a well"}]
|
00000003078.jpg
|
3078_evjvqa
|
[{"from": "human", "value": "where is this group of people traveling?\n<image>"}, {"from": "gpt", "value": "at an eco-tourism area"}]
|
00000003083.jpg
|
3083_evjvqa
|
[{"from": "human", "value": "who is the woman holding the phone taking photos for?\n<image>"}, {"from": "gpt", "value": "for a girl"}]
|
00000003083.jpg
|
3083_evjvqa
|
[{"from": "human", "value": "what is the woman with the phone doing?\n<image>"}, {"from": "gpt", "value": "taking a picture of a little girl"}]
|
00000003083.jpg
|
3083_evjvqa
|
[{"from": "human", "value": "who is the girl going for a walk with?\n<image>"}, {"from": "gpt", "value": "with the woman holding a phone"}]
|
00000003083.jpg
|
3083_evjvqa
|
[{"from": "human", "value": "where is the little girl standing?\n<image>"}, {"from": "gpt", "value": "at a decoration stall"}]
|
00000003083.jpg
|
3083_evjvqa
|
[{"from": "human", "value": "what is the girl holding in her hand?\n<image>"}, {"from": "gpt", "value": "the girl is holding a dummy"}]
|
00000003084.jpg
|
3084_evjvqa
|
[{"from": "human", "value": "where is the man on the red motorbike about to pull over?\n<image>"}, {"from": "gpt", "value": "the curb"}]
|
00000003084.jpg
|
3084_evjvqa
|
[{"from": "human", "value": "where is the woman in red standing?\n<image>"}, {"from": "gpt", "value": "near a shop selling christmas ornaments"}]
|
00000003084.jpg
|
3084_evjvqa
|
[{"from": "human", "value": "what kinds of goods does the corner-shop sell?\n<image>"}, {"from": "gpt", "value": "christmas ornaments"}]
|
00000003085.jpg
|
3085_evjvqa
|
[{"from": "human", "value": "what is this area?\n<image>"}, {"from": "gpt", "value": "a market"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "how many people are on the ladder?\n<image>"}, {"from": "gpt", "value": "a woman and a kid"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "whom is the woman walking down the ladder with?\n<image>"}, {"from": "gpt", "value": "with a child"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "whom is the kid walking down the ladder with?\n<image>"}, {"from": "gpt", "value": "with a woman"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "walking the child down the ladder"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "what is the boy doing?\n<image>"}, {"from": "gpt", "value": "walking down the stairs with the woman"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "who is the boy going with?\n<image>"}, {"from": "gpt", "value": "with a woman"}]
|
00000003086.jpg
|
3086_evjvqa
|
[{"from": "human", "value": "where is the woman taking the boy?\n<image>"}, {"from": "gpt", "value": "going down the stairs"}]
|
00000003087.jpg
|
3087_evjvqa
|
[{"from": "human", "value": "how many men are riding motorbikes?\n<image>"}, {"from": "gpt", "value": "there are four men riding motorbikes"}]
|
00000003087.jpg
|
3087_evjvqa
|
[{"from": "human", "value": "what means of transport do these four people use to travel?\n<image>"}, {"from": "gpt", "value": "motorbikes"}]
|
00000003087.jpg
|
3087_evjvqa
|
[{"from": "human", "value": "what are these four people carrying on the backs of their motorbikes?\n<image>"}, {"from": "gpt", "value": "blue barrels"}]
|
00000003093.jpg
|
3093_evjvqa
|
[{"from": "human", "value": "how many lanterns is the house decorated with?\n<image>"}, {"from": "gpt", "value": "two lanterns"}]
|
00000003098.jpg
|
3098_evjvqa
|
[{"from": "human", "value": "how many beds are in this flat?\n<image>"}, {"from": "gpt", "value": "there is a bed"}]
|
00000001901.jpg
|
1901_evjvqa
|
[{"from": "human", "value": "what is hanging above the pedestrian's head?\n<image>"}, {"from": "gpt", "value": "the lanterns"}]
|
00000001901.jpg
|
1901_evjvqa
|
[{"from": "human", "value": "what is the girl standing next to the man in the hat holding the phone up for?\n<image>"}, {"from": "gpt", "value": "take pictures of the lanterns"}]
|
00000001901.jpg
|
1901_evjvqa
|
[{"from": "human", "value": "what is the girl standing next to the man in the hat holding?\n<image>"}, {"from": "gpt", "value": "smartphone"}]
|
00000001901.jpg
|
1901_evjvqa
|
[{"from": "human", "value": "what is the girl wearing the white shirt holding up?\n<image>"}, {"from": "gpt", "value": "selfie stick"}]
|
00000001901.jpg
|
1901_evjvqa
|
[{"from": "human", "value": "what is the girl holding a selfie stick and wearing a white shirt doing?\n<image>"}, {"from": "gpt", "value": "taking pictures"}]
|
00000001903.jpg
|
1903_evjvqa
|
[{"from": "human", "value": "who are the people wearing the uniforms?\n<image>"}, {"from": "gpt", "value": "the students"}]
|
00000001903.jpg
|
1903_evjvqa
|
[{"from": "human", "value": "what are the students doing?\n<image>"}, {"from": "gpt", "value": "listening to the speech"}]
|
00000001903.jpg
|
1903_evjvqa
|
[{"from": "human", "value": "where are the students in uniform sitting?\n<image>"}, {"from": "gpt", "value": "on the brick yard"}]
|
00000001903.jpg
|
1903_evjvqa
|
[{"from": "human", "value": "what is the speaker doing?\n<image>"}, {"from": "gpt", "value": "standing giving a speech for the students"}]
|
00000001904.jpg
|
1904_evjvqa
|
[{"from": "human", "value": "how many motorbikes are parked?\n<image>"}, {"from": "gpt", "value": "two motorbikes"}]
|
00000001909.jpg
|
1909_evjvqa
|
[{"from": "human", "value": "what products are displayed on the shelves?\n<image>"}, {"from": "gpt", "value": "types of mobile phones"}]
|
00000001909.jpg
|
1909_evjvqa
|
[{"from": "human", "value": "how many people are standing beside the shelves in the store?\n<image>"}, {"from": "gpt", "value": "no one"}]
|
00000001910.jpg
|
1910_evjvqa
|
[{"from": "human", "value": "what items does this shop sell?\n<image>"}, {"from": "gpt", "value": "clothing"}]
|
00000001910.jpg
|
1910_evjvqa
|
[{"from": "human", "value": "what is the woman in the black shirt and yellow pants holding in her hand?\n<image>"}, {"from": "gpt", "value": "yellow bag"}]
|
00000001910.jpg
|
1910_evjvqa
|
[{"from": "human", "value": "how is the woman standing behind the person holding the yellow bag wearing the mask?\n<image>"}, {"from": "gpt", "value": "take off the mask below the chin"}]
|
00000001912.jpg
|
1912_evjvqa
|
[{"from": "human", "value": "what is the man wearing the white shirt leading into the market?\n<image>"}, {"from": "gpt", "value": "bicycle"}]
|
00000001912.jpg
|
1912_evjvqa
|
[{"from": "human", "value": "what does the woman wearing the green shirt sell?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
|
00000001912.jpg
|
1912_evjvqa
|
[{"from": "human", "value": "what is the woman wearing yellow and standing on the motorcycle wearing on her head?\n<image>"}, {"from": "gpt", "value": "does not wear anything"}]
|
00000001922.jpg
|
1922_evjvqa
|
[{"from": "human", "value": "what pattern is the shirt the girl wearing?\n<image>"}, {"from": "gpt", "value": "red polka dot white shirt"}]
|
00000001922.jpg
|
1922_evjvqa
|
[{"from": "human", "value": "what is the girl wearing on her head?\n<image>"}, {"from": "gpt", "value": "black short bucket hat"}]
|
00000001922.jpg
|
1922_evjvqa
|
[{"from": "human", "value": "what scene is the girl taking the picture at?\n<image>"}, {"from": "gpt", "value": "a sea bay"}]
|
00000001922.jpg
|
1922_evjvqa
|
[{"from": "human", "value": "what is placed next to the girl?\n<image>"}, {"from": "gpt", "value": "white bag"}]
|
00000001923.jpg
|
1923_evjvqa
|
[{"from": "human", "value": "what does spring water look like?\n<image>"}, {"from": "gpt", "value": "very clear"}]
|
00000001923.jpg
|
1923_evjvqa
|
[{"from": "human", "value": "how many boats are moving on the stream?\n<image>"}, {"from": "gpt", "value": "no boats"}]
|
00000001927.jpg
|
1927_evjvqa
|
[{"from": "human", "value": "how many people are on the dirt road?\n<image>"}, {"from": "gpt", "value": "one person"}]
|
00000001927.jpg
|
1927_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "riding a bicycle"}]
|
00000001927.jpg
|
1927_evjvqa
|
[{"from": "human", "value": "what are on both sides of the road?\n<image>"}, {"from": "gpt", "value": "fences and trees"}]
|
00000001927.jpg
|
1927_evjvqa
|
[{"from": "human", "value": "what is the person riding a bicycle wearing on their head?\n<image>"}, {"from": "gpt", "value": "a blue bucket hat"}]
|
00000001928.jpg
|
1928_evjvqa
|
[{"from": "human", "value": "how many motorbikes are parked on the sidewalk?\n<image>"}, {"from": "gpt", "value": "six motorbikes"}]
|
00000001928.jpg
|
1928_evjvqa
|
[{"from": "human", "value": "what is the woman sitting on the brown chair on the sidewalk doing?\n<image>"}, {"from": "gpt", "value": "looking after the store"}]
|
00000001931.jpg
|
1931_evjvqa
|
[{"from": "human", "value": "what is the order of the stores from left to right?\n<image>"}, {"from": "gpt", "value": "grocery store, pharmacy and fresh beef stall"}]
|
00000001931.jpg
|
1931_evjvqa
|
[{"from": "human", "value": "what is behind the bicycle parked by the roadside?\n<image>"}, {"from": "gpt", "value": "a white and black cat"}]
|
00000001931.jpg
|
1931_evjvqa
|
[{"from": "human", "value": "what is a woman sitting in front of the grocery store on the left selling?\n<image>"}, {"from": "gpt", "value": "fruit"}]
|
00000001931.jpg
|
1931_evjvqa
|
[{"from": "human", "value": "how many vehicles are on moving the road?\n<image>"}, {"from": "gpt", "value": "no vehicle"}]
|
00000001939.jpg
|
1939_evjvqa
|
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "park"}]
|
00000001939.jpg
|
1939_evjvqa
|
[{"from": "human", "value": "how many people are sitting in the park?\n<image>"}, {"from": "gpt", "value": "eight people"}]
|
00000001943.jpg
|
1943_evjvqa
|
[{"from": "human", "value": "how many people are standing and buying medicine?\n<image>"}, {"from": "gpt", "value": "one person"}]
|
00000001945.jpg
|
1945_evjvqa
|
[{"from": "human", "value": "what does the girl standing on the left with a white cap and talking with a microphone do?\n<image>"}, {"from": "gpt", "value": "tour guide"}]
|
00000001945.jpg
|
1945_evjvqa
|
[{"from": "human", "value": "what is the tour guide girl doing?\n<image>"}, {"from": "gpt", "value": "introducing the tourist attraction to tourists"}]
|
00000001945.jpg
|
1945_evjvqa
|
[{"from": "human", "value": "who is the rest except the girl standing on the left?\n<image>"}, {"from": "gpt", "value": "foreign tourists"}]
|
00000001946.jpg
|
1946_evjvqa
|
[{"from": "human", "value": "what are the boy in the light blue shirt and the girl in the white dress doing?\n<image>"}, {"from": "gpt", "value": "walking hand in hand"}]
|
00000001946.jpg
|
1946_evjvqa
|
[{"from": "human", "value": "what is the girl in the white dress holding in her hand?\n<image>"}, {"from": "gpt", "value": "bouquet"}]
|
00000001946.jpg
|
1946_evjvqa
|
[{"from": "human", "value": "what color shoes is the man in the dark blue shirt wearing?\n<image>"}, {"from": "gpt", "value": "yellow"}]
|
00000001947.jpg
|
1947_evjvqa
|
[{"from": "human", "value": "what does the man wear on his hand?\n<image>"}, {"from": "gpt", "value": "watch and ring"}]
|
00000001947.jpg
|
1947_evjvqa
|
[{"from": "human", "value": "what is the man holding in his hand?\n<image>"}, {"from": "gpt", "value": "binoculars"}]
|
00000001947.jpg
|
1947_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "using binoculars to observe ships at sea"}]
|
00000001958.jpg
|
1958_evjvqa
|
[{"from": "human", "value": "what color is the notice board?\n<image>"}, {"from": "gpt", "value": "yellow text on red background"}]
|
00000001958.jpg
|
1958_evjvqa
|
[{"from": "human", "value": "what does the man standing on the left next to the notice board do?\n<image>"}, {"from": "gpt", "value": "police"}]
|
00000001959.jpg
|
1959_evjvqa
|
[{"from": "human", "value": "what does the store sell?\n<image>"}, {"from": "gpt", "value": "christmas decorations"}]
|
00000001959.jpg
|
1959_evjvqa
|
[{"from": "human", "value": "how many floors does the house have except the first floor?\n<image>"}, {"from": "gpt", "value": "one floor"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.