image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000004785.jpg
|
4785_evjvqa
|
[{"from": "human", "value": "what is the stall behind the woman sitting?\n<image>"}, {"from": "gpt", "value": "in the back is a clothing store"}]
|
00000004785.jpg
|
4785_evjvqa
|
[{"from": "human", "value": "which area is this?\n<image>"}, {"from": "gpt", "value": "this is a shopping mall"}]
|
00000004786.jpg
|
4786_evjvqa
|
[{"from": "human", "value": "where is the girl shopping?\n<image>"}, {"from": "gpt", "value": "shopping in a supermarket"}]
|
00000004786.jpg
|
4786_evjvqa
|
[{"from": "human", "value": "what color is the girl wearing?\n<image>"}, {"from": "gpt", "value": "wearing a blue shirt"}]
|
00000004786.jpg
|
4786_evjvqa
|
[{"from": "human", "value": "what products are displayed on the girl's left?\n<image>"}, {"from": "gpt", "value": "on the left is a stall selling all kinds of milk"}]
|
00000004787.jpg
|
4787_evjvqa
|
[{"from": "human", "value": "how many people are shopping in the supermarket?\n<image>"}, {"from": "gpt", "value": "there are three people shopping in the supermarket"}]
|
00000004787.jpg
|
4787_evjvqa
|
[{"from": "human", "value": "what area are the three girls standing in?\n<image>"}, {"from": "gpt", "value": "standing in the milk area"}]
|
00000004787.jpg
|
4787_evjvqa
|
[{"from": "human", "value": "what color is the t-shirt of the girl holding the basket?\n<image>"}, {"from": "gpt", "value": "the girl is in the red t-shirt"}]
|
00000004788.jpg
|
4788_evjvqa
|
[{"from": "human", "value": "how many flags are there in the center of the gate?\n<image>"}, {"from": "gpt", "value": "there are two flags in the center of the gate"}]
|
00000004788.jpg
|
4788_evjvqa
|
[{"from": "human", "value": "how many signs are in front of the gate?\n<image>"}, {"from": "gpt", "value": "there are two signs in front of the gate"}]
|
00000004788.jpg
|
4788_evjvqa
|
[{"from": "human", "value": "how many people are heading to the citadel gate?\n<image>"}, {"from": "gpt", "value": "there is a person walking towards the gate"}]
|
00000004789.jpg
|
4789_evjvqa
|
[{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is raining heavily"}]
|
00000004789.jpg
|
4789_evjvqa
|
[{"from": "human", "value": "what is the condition of the street?\n<image>"}, {"from": "gpt", "value": "the street is seriously flooded"}]
|
00000004789.jpg
|
4789_evjvqa
|
[{"from": "human", "value": "what color clothes do traffic controllers wear?\n<image>"}, {"from": "gpt", "value": "wearing yellow clothes"}]
|
00000004790.jpg
|
4790_evjvqa
|
[{"from": "human", "value": "how many trucks are there in the area?\n<image>"}, {"from": "gpt", "value": "there are two trucks in the area"}]
|
00000004790.jpg
|
4790_evjvqa
|
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a spontaneous market"}]
|
00000004790.jpg
|
4790_evjvqa
|
[{"from": "human", "value": "what is above the market?\n<image>"}, {"from": "gpt", "value": "above the market is a bridge"}]
|
00000004791.jpg
|
4791_evjvqa
|
[{"from": "human", "value": "which area is this?\n<image>"}, {"from": "gpt", "value": "this is a historical site"}]
|
00000004791.jpg
|
4791_evjvqa
|
[{"from": "human", "value": "how many entrances does the gate have?\n<image>"}, {"from": "gpt", "value": "the gate has three entrances"}]
|
00000004791.jpg
|
4791_evjvqa
|
[{"from": "human", "value": "how many dragon statues are there on the stairs?\n<image>"}, {"from": "gpt", "value": "there are two dragon statues on the stairs"}]
|
00000004796.jpg
|
4796_evjvqa
|
[{"from": "human", "value": "what type of ship is it?\n<image>"}, {"from": "gpt", "value": "the ship is of the aircraft carrier type"}]
|
00000004796.jpg
|
4796_evjvqa
|
[{"from": "human", "value": "what type of aircraft is the ship used as a landing spot for?\n<image>"}, {"from": "gpt", "value": "the ship is used as a landing spot for helicopters"}]
|
00000004799.jpg
|
4799_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "using a personal computer"}]
|
00000004799.jpg
|
4799_evjvqa
|
[{"from": "human", "value": "what color is the man's shirt?\n<image>"}, {"from": "gpt", "value": "the shirt is black"}]
|
00000004799.jpg
|
4799_evjvqa
|
[{"from": "human", "value": "how many certificates are there behind the man?\n<image>"}, {"from": "gpt", "value": "there are two certificates behind the man"}]
|
00000008601.jpg
|
8601_evjvqa
|
[{"from": "human", "value": "where is this girl standing?\n<image>"}, {"from": "gpt", "value": "next to a shrine"}]
|
00000008603.jpg
|
8603_evjvqa
|
[{"from": "human", "value": "what is the first row of people sitting holding in their hands?\n<image>"}, {"from": "gpt", "value": "the umbrellas"}]
|
00000008604.jpg
|
8604_evjvqa
|
[{"from": "human", "value": "what are these two people doing?\n<image>"}, {"from": "gpt", "value": "giving gifts to each other"}]
|
00000008605.jpg
|
8605_evjvqa
|
[{"from": "human", "value": "what are these people doing?\n<image>"}, {"from": "gpt", "value": "waiting in line to scan code"}]
|
00000008608.jpg
|
8608_evjvqa
|
[{"from": "human", "value": "where are these people?\n<image>"}, {"from": "gpt", "value": "on a boat in a mangrove forest"}]
|
00000008610.jpg
|
8610_evjvqa
|
[{"from": "human", "value": "where is this cart placed?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
|
00000008615.jpg
|
8615_evjvqa
|
[{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "taking a photo with the angle of view from the back"}]
|
00000008616.jpg
|
8616_evjvqa
|
[{"from": "human", "value": "what is the content on the two glowing boards?\n<image>"}, {"from": "gpt", "value": "menu of dishes in a restaurant"}]
|
00000008617.jpg
|
8617_evjvqa
|
[{"from": "human", "value": "where is this girl sitting?\n<image>"}, {"from": "gpt", "value": "in a newspaper stall"}]
|
00000008619.jpg
|
8619_evjvqa
|
[{"from": "human", "value": "how many people are displaying goods on the street to sell?\n<image>"}, {"from": "gpt", "value": "there are two women"}]
|
00000008622.jpg
|
8622_evjvqa
|
[{"from": "human", "value": "how do these two men travel?\n<image>"}, {"from": "gpt", "value": "by bicycle"}]
|
00000008623.jpg
|
8623_evjvqa
|
[{"from": "human", "value": "what does this stall sell?\n<image>"}, {"from": "gpt", "value": "fabrics"}]
|
00000008624.jpg
|
8624_evjvqa
|
[{"from": "human", "value": "what items do the stalls here sell?\n<image>"}, {"from": "gpt", "value": "these stalls sell toys"}]
|
00000008628.jpg
|
8628_evjvqa
|
[{"from": "human", "value": "where is the motorbike standing?\n<image>"}, {"from": "gpt", "value": "by a river"}]
|
00000008629.jpg
|
8629_evjvqa
|
[{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "exercising in the park"}]
|
00000008630.jpg
|
8630_evjvqa
|
[{"from": "human", "value": "where are these people visiting?\n<image>"}, {"from": "gpt", "value": "at a historical relic"}]
|
00000008634.jpg
|
8634_evjvqa
|
[{"from": "human", "value": "what are these two people doing?\n<image>"}, {"from": "gpt", "value": "acting in a play underwater"}]
|
00000008637.jpg
|
8637_evjvqa
|
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "kissing the girl"}]
|
00000008638.jpg
|
8638_evjvqa
|
[{"from": "human", "value": "what are these baskets used for?\n<image>"}, {"from": "gpt", "value": "for containing the goods in the supermarket"}]
|
00000008639.jpg
|
8639_evjvqa
|
[{"from": "human", "value": "what bag is the woman wearing a blue face mask holding in her hand?\n<image>"}, {"from": "gpt", "value": "a fruit bag"}]
|
00000008640.jpg
|
8640_evjvqa
|
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "on a canoe"}]
|
00000008643.jpg
|
8643_evjvqa
|
[{"from": "human", "value": "what are these flower stands used for?\n<image>"}, {"from": "gpt", "value": "to offer remembrance about the person who was carved into the statue"}]
|
00000008647.jpg
|
8647_evjvqa
|
[{"from": "human", "value": "where do these men come from?\n<image>"}, {"from": "gpt", "value": "from under the manholes"}]
|
00000008648.jpg
|
8648_evjvqa
|
[{"from": "human", "value": "what is this cabinet used for?\n<image>"}, {"from": "gpt", "value": "containing fast food"}]
|
00000008649.jpg
|
8649_evjvqa
|
[{"from": "human", "value": "what is this car being used for?\n<image>"}, {"from": "gpt", "value": "for display"}]
|
00000008651.jpg
|
8651_evjvqa
|
[{"from": "human", "value": "what are the soldiers doing?\n<image>"}, {"from": "gpt", "value": "distributing food"}]
|
00000008654.jpg
|
8654_evjvqa
|
[{"from": "human", "value": "what do people build in this area?\n<image>"}, {"from": "gpt", "value": "the wind turbines"}]
|
00000008656.jpg
|
8656_evjvqa
|
[{"from": "human", "value": "what is the woman carrying on her head?\n<image>"}, {"from": "gpt", "value": "a basket of lotuses"}]
|
00000008657.jpg
|
8657_evjvqa
|
[{"from": "human", "value": "where is this man standing?\n<image>"}, {"from": "gpt", "value": "near the stalls selling fruits in the market"}]
|
00000008674.jpg
|
8674_evjvqa
|
[{"from": "human", "value": "what is the young man in red shirt doing?\n<image>"}, {"from": "gpt", "value": "charging the woman in black shirt"}]
|
00000008675.jpg
|
8675_evjvqa
|
[{"from": "human", "value": "what are these people doing?\n<image>"}, {"from": "gpt", "value": "looking at the paintings hanging in the gallery"}]
|
00000008676.jpg
|
8676_evjvqa
|
[{"from": "human", "value": "where is this girl walking?\n<image>"}, {"from": "gpt", "value": "the girl is walking past an ancient structure"}]
|
00000008677.jpg
|
8677_evjvqa
|
[{"from": "human", "value": "what are these people sitting to do?\n<image>"}, {"from": "gpt", "value": "to drink water at a pavement cafe"}]
|
00000008680.jpg
|
8680_evjvqa
|
[{"from": "human", "value": "what item is on display in the room?\n<image>"}, {"from": "gpt", "value": "a bronze drum"}]
|
00000008681.jpg
|
8681_evjvqa
|
[{"from": "human", "value": "what does this stall display for sale?\n<image>"}, {"from": "gpt", "value": "decorative items"}]
|
00000008684.jpg
|
8684_evjvqa
|
[{"from": "human", "value": "what area is the man in the white shirt pushing the cart into?\n<image>"}, {"from": "gpt", "value": "the clothing sale area"}]
|
00000008687.jpg
|
8687_evjvqa
|
[{"from": "human", "value": "what item is the cashier charging?\n<image>"}, {"from": "gpt", "value": "the durian"}]
|
00000008689.jpg
|
8689_evjvqa
|
[{"from": "human", "value": "where are these canoes parked?\n<image>"}, {"from": "gpt", "value": "in a lotus pond"}]
|
00000008697.jpg
|
8697_evjvqa
|
[{"from": "human", "value": "how many people are walking on the pavement?\n<image>"}, {"from": "gpt", "value": "there is one person walking on the pavement"}]
|
00000009502.jpg
|
9502_evjvqa
|
[{"from": "human", "value": "whom is the girl in white shirt going with?\n<image>"}, {"from": "gpt", "value": "the girl in white shirt is going with the girl in black shirt"}]
|
00000009503.jpg
|
9503_evjvqa
|
[{"from": "human", "value": "what color is the hat that the girl in the long-sleeved shirt riding a motorbike is wearing?\n<image>"}, {"from": "gpt", "value": "the girl in the long-sleeved shirt riding a motorbike is wearing a white hat"}]
|
00000009504.jpg
|
9504_evjvqa
|
[{"from": "human", "value": "what are these people holding?\n<image>"}, {"from": "gpt", "value": "these people are holding bags"}]
|
00000009505.jpg
|
9505_evjvqa
|
[{"from": "human", "value": "what color are the shirts that the people lining up are wearing?\n<image>"}, {"from": "gpt", "value": "the people lining up are wearing red shirts"}]
|
00000009506.jpg
|
9506_evjvqa
|
[{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "this woman is thinking about what to buy"}]
|
00000009507.jpg
|
9507_evjvqa
|
[{"from": "human", "value": "what color are the hats that these people are wearing?\n<image>"}, {"from": "gpt", "value": "these people are wearing white hats"}]
|
00000009508.jpg
|
9508_evjvqa
|
[{"from": "human", "value": "what color is the boat that this girl is riding?\n<image>"}, {"from": "gpt", "value": "this girl is riding the blue boat"}]
|
00000009511.jpg
|
9511_evjvqa
|
[{"from": "human", "value": "what color are the shirts that the waiters at this restaurant wear?\n<image>"}, {"from": "gpt", "value": "the waiters at this restaurant wear yellow shirts"}]
|
00000009512.jpg
|
9512_evjvqa
|
[{"from": "human", "value": "what color is the face mask that the man in black shirt wears?\n<image>"}, {"from": "gpt", "value": "the man in black shirt wears the white face mask"}]
|
00000009513.jpg
|
9513_evjvqa
|
[{"from": "human", "value": "what does this person want to buy?\n<image>"}, {"from": "gpt", "value": "this person wants to buy bananas"}]
|
00000009514.jpg
|
9514_evjvqa
|
[{"from": "human", "value": "what color is the inner shirt that the man raising his hand up is wearing?\n<image>"}, {"from": "gpt", "value": "the man raising his hand up is wearing the white inner shirt"}]
|
00000009515.jpg
|
9515_evjvqa
|
[{"from": "human", "value": "what color is the tie that the man is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing the blue tie"}]
|
00000009516.jpg
|
9516_evjvqa
|
[{"from": "human", "value": "what accident has just happened in this area?\n<image>"}, {"from": "gpt", "value": "there is a house in this area that caught fire"}]
|
00000009517.jpg
|
9517_evjvqa
|
[{"from": "human", "value": "what color is the dress that the woman shopping wears?\n<image>"}, {"from": "gpt", "value": "the woman shopping wears blue dress"}]
|
00000009518.jpg
|
9518_evjvqa
|
[{"from": "human", "value": "what color are the shirts that the employees of this company wear?\n<image>"}, {"from": "gpt", "value": "the employees of this company wear red shirts"}]
|
00000009520.jpg
|
9520_evjvqa
|
[{"from": "human", "value": "what is the woman wearing the pink hairpin doing?\n<image>"}, {"from": "gpt", "value": "the woman wearing the pink hairpin is shopping"}]
|
00000009521.jpg
|
9521_evjvqa
|
[{"from": "human", "value": "what color is the roof of this house?\n<image>"}, {"from": "gpt", "value": "the roof of this house is brown"}]
|
00000009522.jpg
|
9522_evjvqa
|
[{"from": "human", "value": "what is this area exhibiting?\n<image>"}, {"from": "gpt", "value": "this area is exhibiting the skeleton of an animal"}]
|
00000009523.jpg
|
9523_evjvqa
|
[{"from": "human", "value": "what is the main color of this store?\n<image>"}, {"from": "gpt", "value": "the main color of this store is red"}]
|
00000009524.jpg
|
9524_evjvqa
|
[{"from": "human", "value": "what does this woman want to buy?\n<image>"}, {"from": "gpt", "value": "this woman wants to buy fruits"}]
|
00000009525.jpg
|
9525_evjvqa
|
[{"from": "human", "value": "what color are the walls of the houses here?\n<image>"}, {"from": "gpt", "value": "the walls of the houses here are white"}]
|
00000009527.jpg
|
9527_evjvqa
|
[{"from": "human", "value": "what color is the shirt that the man is wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing a black shirt"}]
|
00000009528.jpg
|
9528_evjvqa
|
[{"from": "human", "value": "what is the main color of this market?\n<image>"}, {"from": "gpt", "value": "the main color of this market is yellow"}]
|
00000009529.jpg
|
9529_evjvqa
|
[{"from": "human", "value": "what are people buying?\n<image>"}, {"from": "gpt", "value": "people are buying flowers"}]
|
00000009530.jpg
|
9530_evjvqa
|
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "on a beach"}]
|
00000009531.jpg
|
9531_evjvqa
|
[{"from": "human", "value": "what are the ladies buying?\n<image>"}, {"from": "gpt", "value": "the ladies are buying clothes"}]
|
00000009532.jpg
|
9532_evjvqa
|
[{"from": "human", "value": "what are the women on the left-hand corner buying?\n<image>"}, {"from": "gpt", "value": "the women on the left-hand corner are buying flowers"}]
|
00000009535.jpg
|
9535_evjvqa
|
[{"from": "human", "value": "what color is the shirt that this person is wearing?\n<image>"}, {"from": "gpt", "value": "this person is wearing the white shirt"}]
|
00000009536.jpg
|
9536_evjvqa
|
[{"from": "human", "value": "how many storeys high is the tallest building in the area?\n<image>"}, {"from": "gpt", "value": "the tallest building in the area is eight storeys high"}]
|
00000009540.jpg
|
9540_evjvqa
|
[{"from": "human", "value": "where is this woman sitting?\n<image>"}, {"from": "gpt", "value": "this woman is sitting in front of a house"}]
|
00000009541.jpg
|
9541_evjvqa
|
[{"from": "human", "value": "what color are the chairs of this store?\n<image>"}, {"from": "gpt", "value": "the chairs of this store are white"}]
|
00000009546.jpg
|
9546_evjvqa
|
[{"from": "human", "value": "what color is the fence here?\n<image>"}, {"from": "gpt", "value": "the fence here is yellow"}]
|
00000009547.jpg
|
9547_evjvqa
|
[{"from": "human", "value": "how many police comrades are selecting vegetables?\n<image>"}, {"from": "gpt", "value": "there are two police comrades selecting vegetables"}]
|
00000009548.jpg
|
9548_evjvqa
|
[{"from": "human", "value": "what colro are the parasols?\n<image>"}, {"from": "gpt", "value": "the parasols are white"}]
|
00000009549.jpg
|
9549_evjvqa
|
[{"from": "human", "value": "how many women are selling goods?\n<image>"}, {"from": "gpt", "value": "there are three women selling goods"}]
|
00000009550.jpg
|
9550_evjvqa
|
[{"from": "human", "value": "what color is the shopping basket of this supermarket?\n<image>"}, {"from": "gpt", "value": "the shopping basket of this supermarket is red"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.