image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003575.jpg | 3575_evjvqa | [{"from": "human", "value": "where is the girl in the pink shirt?\n<image>"}, {"from": "gpt", "value": "the girl in the pink shirt is sitting on the left rail"}] |
00000003575.jpg | 3575_evjvqa | [{"from": "human", "value": "which side rail is the girl in the pink shirt sitting on?\n<image>"}, {"from": "gpt", "value": "she is sitting on the left rail"}] |
00000003578.jpg | 3578_evjvqa | [{"from": "human", "value": "who is standing opposite the two girls in red shirts?\n<image>"}, {"from": "gpt", "value": "standing opposite the two girls in red shirts is a woman in the floral white shirt"}] |
00000003578.jpg | 3578_evjvqa | [{"from": "human", "value": "how many girls wear red shirts in all at this counter?\n<image>"}, {"from": "gpt", "value": "three girls wear red shirts at this counter"}] |
00000003578.jpg | 3578_evjvqa | [{"from": "human", "value": "where is the young man in the cyan shirt standing?\n<image>"}, {"from": "gpt", "value": "the young man in the cyan shirt is standing next to the one in the yellow shirt"}] |
00000003579.jpg | 3579_evjvqa | [{"from": "human", "value": "what objects do people hang on electric wires across the street?\n<image>"}, {"from": "gpt", "value": "people hang light bulbs on electric wires across the street"}] |
00000003579.jpg | 3579_evjvqa | [{"from": "human", "value": "what is hanging on the strung wires?\n<image>"}, {"from": "gpt", "value": "the light bulbs"}] |
00000003579.jpg | 3579_evjvqa | [{"from": "human", "value": "how many street lights are there at this street corner?\n<image>"}, {"from": "gpt", "value": "one street light is at this street corner"}] |
00000003580.jpg | 3580_evjvqa | [{"from": "human", "value": "what does this store specialize in?\n<image>"}, {"from": "gpt", "value": "this store specializes in selling clothes"}] |
00000003580.jpg | 3580_evjvqa | [{"from": "human", "value": "what is the blonde woman shopping at this store?\n<image>"}, {"from": "gpt", "value": "the blonde woman is shopping for clothes at this store"}] |
00000003580.jpg | 3580_evjvqa | [{"from": "human", "value": "what is the black haired woman doing?\n<image>"}, {"from": "gpt", "value": "she is choosing clothes"}] |
00000003581.jpg | 3581_evjvqa | [{"from": "human", "value": "what kinds of vehicles are parked in front of this store?\n<image>"}, {"from": "gpt", "value": "car and motorbikes"}] |
00000003582.jpg | 3582_evjvqa | [{"from": "human", "value": "which woman is the person standing inside the house?\n<image>"}, {"from": "gpt", "value": "the person standing inside the house is a woman in a white shirt"}] |
00000003582.jpg | 3582_evjvqa | [{"from": "human", "value": "where is the woman in the gray shirt standing?\n<image>"}, {"from": "gpt", "value": "she is standing outside the store, next to the blue chair"}] |
00000003582.jpg | 3582_evjvqa | [{"from": "human", "value": "what kind of pants is the woman in the white shirt wearing?\n<image>"}, {"from": "gpt", "value": "the pants that the woman in the white shirt is wearing are jeans"}] |
00000003585.jpg | 3585_evjvqa | [{"from": "human", "value": "what do all these men tie at the collar of their shirts?\n<image>"}, {"from": "gpt", "value": "they tie ties"}] |
00000003585.jpg | 3585_evjvqa | [{"from": "human", "value": "which way is the tallest man looking?\n<image>"}, {"from": "gpt", "value": "he is looking at the man's hand to the right"}] |
00000003585.jpg | 3585_evjvqa | [{"from": "human", "value": "what does a man reach out his hand for?\n<image>"}, {"from": "gpt", "value": "to point to each word on the sign"}] |
00000003586.jpg | 3586_evjvqa | [{"from": "human", "value": "what is this man holding in his hand?\n<image>"}, {"from": "gpt", "value": "he is holding a lychee box"}] |
00000003586.jpg | 3586_evjvqa | [{"from": "human", "value": "what items does this man wear on his face?\n<image>"}, {"from": "gpt", "value": "he wears a mask and glasses"}] |
00000003586.jpg | 3586_evjvqa | [{"from": "human", "value": "what is the man in the black shirt looking at?\n<image>"}, {"from": "gpt", "value": "the man in the black shirt is looking at the lychee box in his hands"}] |
00000003589.jpg | 3589_evjvqa | [{"from": "human", "value": "what are hanging on the wall?\n<image>"}, {"from": "gpt", "value": "masks"}] |
00000003589.jpg | 3589_evjvqa | [{"from": "human", "value": "what vehicles are moving in a line on the road?\n<image>"}, {"from": "gpt", "value": "the cyclos are moving in a line on the road"}] |
00000003589.jpg | 3589_evjvqa | [{"from": "human", "value": "how many cyclos are there on this road?\n<image>"}, {"from": "gpt", "value": "four cyclos are on this road"}] |
00000003700.jpg | 3700_evjvqa | [{"from": "human", "value": "what is the girl sitting at the stall wearing?\n<image>"}, {"from": "gpt", "value": "whe is wearing white ao dai"}] |
00000003700.jpg | 3700_evjvqa | [{"from": "human", "value": "what color are the bottle caps of the bottles exhibited at the stall?\n<image>"}, {"from": "gpt", "value": "the bottle caps are green"}] |
00000003700.jpg | 3700_evjvqa | [{"from": "human", "value": "what is the girl wearing on her neck?\n<image>"}, {"from": "gpt", "value": "she is wearing a yellow ribbon"}] |
00000003702.jpg | 3702_evjvqa | [{"from": "human", "value": "what color is the sofa in this house?\n<image>"}, {"from": "gpt", "value": "the sofa in this house is black"}] |
00000003702.jpg | 3702_evjvqa | [{"from": "human", "value": "what is the banister made from?\n<image>"}, {"from": "gpt", "value": "the banister is made from wood"}] |
00000003702.jpg | 3702_evjvqa | [{"from": "human", "value": "where is the blue bucket?\n<image>"}, {"from": "gpt", "value": "the blue bucket is in front of the glass door"}] |
00000003707.jpg | 3707_evjvqa | [{"from": "human", "value": "what vehicle is the woman wearing orange jacket using?\n<image>"}, {"from": "gpt", "value": "the is moving on a white motorbike"}] |
00000003708.jpg | 3708_evjvqa | [{"from": "human", "value": "who is standing oposite to the man wearing red helmet?\n<image>"}, {"from": "gpt", "value": "the man wearing blue t-shirt"}] |
00000003708.jpg | 3708_evjvqa | [{"from": "human", "value": "what is the man standing opposite to the woman in red shirt wearing on his hand?\n<image>"}, {"from": "gpt", "value": "a watch"}] |
00000003708.jpg | 3708_evjvqa | [{"from": "human", "value": "what is the man wearing red helmet coming to this store for?\n<image>"}, {"from": "gpt", "value": "he is coming to buy gold"}] |
00000003709.jpg | 3709_evjvqa | [{"from": "human", "value": "what time is it at this area?\n<image>"}, {"from": "gpt", "value": "it is evening at this area"}] |
00000003709.jpg | 3709_evjvqa | [{"from": "human", "value": "what vehicles are there in front of the store?\n<image>"}, {"from": "gpt", "value": "there are motorbikes in front of the store"}] |
00000003712.jpg | 3712_evjvqa | [{"from": "human", "value": "what color are the stripes on the man's shirt?\n<image>"}, {"from": "gpt", "value": "white, grey and beige"}] |
00000003712.jpg | 3712_evjvqa | [{"from": "human", "value": "what color is the girl's shirt?\n<image>"}, {"from": "gpt", "value": "the girl wear a red shirt"}] |
00000003712.jpg | 3712_evjvqa | [{"from": "human", "value": "what is the man wearing striped shirt wearing on his hand?\n<image>"}, {"from": "gpt", "value": "he is wearing a watch"}] |
00000003713.jpg | 3713_evjvqa | [{"from": "human", "value": "what color are the decorations hanging above the road?\n<image>"}, {"from": "gpt", "value": "the decorations are yellow"}] |
00000003713.jpg | 3713_evjvqa | [{"from": "human", "value": "who is the man in black t-shirt walking with?\n<image>"}, {"from": "gpt", "value": "he is walking with the woman in red and brown t-shirt"}] |
00000003713.jpg | 3713_evjvqa | [{"from": "human", "value": "what is the man in black t-shirt wearing on his face?\n<image>"}, {"from": "gpt", "value": "he is wearing sunglasses"}] |
00000003714.jpg | 3714_evjvqa | [{"from": "human", "value": "how many people are in white shirt in this crowd?\n<image>"}, {"from": "gpt", "value": "there are two people wearing white shirt in this crowd"}] |
00000003715.jpg | 3715_evjvqa | [{"from": "human", "value": "what is the man wearing red shirt and white hat doing?\n<image>"}, {"from": "gpt", "value": "he is drawing on a blue umbrella"}] |
00000003715.jpg | 3715_evjvqa | [{"from": "human", "value": "what is the woman sitting on the left of the man wearing hat doing?\n<image>"}, {"from": "gpt", "value": "she is drawing on a pink umbrella"}] |
00000003715.jpg | 3715_evjvqa | [{"from": "human", "value": "what is the man wearing cap holding the paddle for?\n<image>"}, {"from": "gpt", "value": "to row the boat"}] |
00000003716.jpg | 3716_evjvqa | [{"from": "human", "value": "how many motorbikes are there in front of the store?\n<image>"}, {"from": "gpt", "value": "there are three motorbikes in front of the store"}] |
00000003716.jpg | 3716_evjvqa | [{"from": "human", "value": "where is the bicycle?\n<image>"}, {"from": "gpt", "value": "the bicycle is to the right of the white motorbike"}] |
00000003716.jpg | 3716_evjvqa | [{"from": "human", "value": "where is the white motorbike parked?\n<image>"}, {"from": "gpt", "value": "the white motorbike is to the left of the bicycle"}] |
00000003721.jpg | 3721_evjvqa | [{"from": "human", "value": "what is the woman wearing conical hat doing?\n<image>"}, {"from": "gpt", "value": "she is carrying the goods"}] |
00000003721.jpg | 3721_evjvqa | [{"from": "human", "value": "how many motorbikes are there on the street?\n<image>"}, {"from": "gpt", "value": "there is one motorbike on the street"}] |
00000003721.jpg | 3721_evjvqa | [{"from": "human", "value": "what kind of hat is the pedlar wearing?\n<image>"}, {"from": "gpt", "value": "conical hat"}] |
00000003723.jpg | 3723_evjvqa | [{"from": "human", "value": "what was used to contain the cabbages?\n<image>"}, {"from": "gpt", "value": "the green plastic baskets"}] |
00000003723.jpg | 3723_evjvqa | [{"from": "human", "value": "what is the woman using to store the goods?\n<image>"}, {"from": "gpt", "value": "the trolley"}] |
00000003727.jpg | 3727_evjvqa | [{"from": "human", "value": "what fruit is the man in blue shirt choosing?\n<image>"}, {"from": "gpt", "value": "he is choosing durian"}] |
00000003727.jpg | 3727_evjvqa | [{"from": "human", "value": "what kind of fruit is on the aisle where the woman in black shirt standing at?\n<image>"}, {"from": "gpt", "value": "the woman is standing at the avocado aisle"}] |
00000003727.jpg | 3727_evjvqa | [{"from": "human", "value": "what is the man in blue shirt wearing on his hand?\n<image>"}, {"from": "gpt", "value": "he is wearing blue gloves"}] |
00000003728.jpg | 3728_evjvqa | [{"from": "human", "value": "what is the woman using to contain the milk she chose?\n<image>"}, {"from": "gpt", "value": "she is using the blue basket"}] |
00000003728.jpg | 3728_evjvqa | [{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "she is shopping"}] |
00000003728.jpg | 3728_evjvqa | [{"from": "human", "value": "what is the woman putting in her basket?\n<image>"}, {"from": "gpt", "value": "she is putting milk in her basket"}] |
00000003733.jpg | 3733_evjvqa | [{"from": "human", "value": "what are the sign used to prohibit?\n<image>"}, {"from": "gpt", "value": "cars"}] |
00000003733.jpg | 3733_evjvqa | [{"from": "human", "value": "what vehicles are not allowed to move in this road?\n<image>"}, {"from": "gpt", "value": "cars are not allowed to move in this road"}] |
00000003739.jpg | 3739_evjvqa | [{"from": "human", "value": "where is the entrance?\n<image>"}, {"from": "gpt", "value": "the right side road"}] |
00000003742.jpg | 3742_evjvqa | [{"from": "human", "value": "what is this view?\n<image>"}, {"from": "gpt", "value": "this is the view of the city from the above"}] |
00000003742.jpg | 3742_evjvqa | [{"from": "human", "value": "how was the sky in this city?\n<image>"}, {"from": "gpt", "value": "the sky is blue and clear"}] |
00000003742.jpg | 3742_evjvqa | [{"from": "human", "value": "how was the weather at this city?\n<image>"}, {"from": "gpt", "value": "it was sunny"}] |
00000003752.jpg | 3752_evjvqa | [{"from": "human", "value": "what is decorated along the left sidewalk of the river?\n<image>"}, {"from": "gpt", "value": "pots of decorative plants"}] |
00000003752.jpg | 3752_evjvqa | [{"from": "human", "value": "how was the wearther at this tourism destination?\n<image>"}, {"from": "gpt", "value": "it is sunny"}] |
00000003754.jpg | 3754_evjvqa | [{"from": "human", "value": "how many light bulbs are there?\n<image>"}, {"from": "gpt", "value": "there are two light bulbs"}] |
00000003754.jpg | 3754_evjvqa | [{"from": "human", "value": "what decorations are placed next to the light bulbs?\n<image>"}, {"from": "gpt", "value": "the santa claus decoration"}] |
00000003754.jpg | 3754_evjvqa | [{"from": "human", "value": "which light bulb is hung higher?\n<image>"}, {"from": "gpt", "value": "the bulb on the left is hung higher"}] |
00000003758.jpg | 3758_evjvqa | [{"from": "human", "value": "whose hand is the man in white shirt holding?\n<image>"}, {"from": "gpt", "value": "he is holding the girl in white ao dai's hand"}] |
00000003758.jpg | 3758_evjvqa | [{"from": "human", "value": "who is next to the man wearing white shirt?\n<image>"}, {"from": "gpt", "value": "the girl wearing white ao dai"}] |
00000003758.jpg | 3758_evjvqa | [{"from": "human", "value": "what are the costumes of the man who is holding the woman's hand?\n<image>"}, {"from": "gpt", "value": "he wears white shirt and black pants"}] |
00000003763.jpg | 3763_evjvqa | [{"from": "human", "value": "what vehicle is opening the red light?\n<image>"}, {"from": "gpt", "value": "the ambulance is opening the red light"}] |
00000003763.jpg | 3763_evjvqa | [{"from": "human", "value": "how many motorbikes are there on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes parking on the sidewalk"}] |
00000003763.jpg | 3763_evjvqa | [{"from": "human", "value": "what vehicle is moving on the road?\n<image>"}, {"from": "gpt", "value": "motorbikes and ambulance are moving on the road"}] |
00000003764.jpg | 3764_evjvqa | [{"from": "human", "value": "what color is the sneakers of the girl wearing brown clothes?\n<image>"}, {"from": "gpt", "value": "she is wearing white sneakers"}] |
00000003764.jpg | 3764_evjvqa | [{"from": "human", "value": "what color is the pants of the girl wearing shite sneakers?\n<image>"}, {"from": "gpt", "value": "she is wearing brown pants"}] |
00000003768.jpg | 3768_evjvqa | [{"from": "human", "value": "what are placed on the table behind the sitting woman?\n<image>"}, {"from": "gpt", "value": "the towels are placed on table behind the sitting woman"}] |
00000003768.jpg | 3768_evjvqa | [{"from": "human", "value": "where is the blue plastic chair?\n<image>"}, {"from": "gpt", "value": "the blue plastic chair is under the table"}] |
00000003768.jpg | 3768_evjvqa | [{"from": "human", "value": "where are the bag of sweet potatoes?\n<image>"}, {"from": "gpt", "value": "it is on the ground, on the left side of the plastic blue chair"}] |
00000003769.jpg | 3769_evjvqa | [{"from": "human", "value": "what is the woman sitting doing?\n<image>"}, {"from": "gpt", "value": "she is rowing the boat"}] |
00000003769.jpg | 3769_evjvqa | [{"from": "human", "value": "what is the man standing doing?\n<image>"}, {"from": "gpt", "value": "he is catching the fish"}] |
00000003769.jpg | 3769_evjvqa | [{"from": "human", "value": "where is the baby on the boat?\n<image>"}, {"from": "gpt", "value": "the baby is sitting in front of the woman"}] |
00000003771.jpg | 3771_evjvqa | [{"from": "human", "value": "what is the time at this area?\n<image>"}, {"from": "gpt", "value": "it is evening in this area"}] |
00000003772.jpg | 3772_evjvqa | [{"from": "human", "value": "what is the woman wearing pink jacket doing?\n<image>"}, {"from": "gpt", "value": "she is choosing the dragonfruits"}] |
00000003772.jpg | 3772_evjvqa | [{"from": "human", "value": "what is the woman wearing black jacket doing?\n<image>"}, {"from": "gpt", "value": "she is choosing dragonfruits"}] |
00000003772.jpg | 3772_evjvqa | [{"from": "human", "value": "what is on the aisle behind the woman wearing black jacket?\n<image>"}, {"from": "gpt", "value": "it is the watermelon aisle behind the woman wearing black jacket"}] |
00000003773.jpg | 3773_evjvqa | [{"from": "human", "value": "what are those people gathering for?\n<image>"}, {"from": "gpt", "value": "taking a picture"}] |
00000003773.jpg | 3773_evjvqa | [{"from": "human", "value": "what is the time of the day in this area?\n<image>"}, {"from": "gpt", "value": "it is noon"}] |
00000003777.jpg | 3777_evjvqa | [{"from": "human", "value": "where does the motorbike stop at?\n<image>"}, {"from": "gpt", "value": "the motorbike stops at the person selling goods on the sidewalk"}] |
00000003779.jpg | 3779_evjvqa | [{"from": "human", "value": "what does the woman hugging diapers wear on her face?\n<image>"}, {"from": "gpt", "value": "she is wearing a medical mask and a face shield"}] |
00000003779.jpg | 3779_evjvqa | [{"from": "human", "value": "what is the woman wearing the face shield hugging on her hand?\n<image>"}, {"from": "gpt", "value": "she is hugging a bag of diapers"}] |
00000003779.jpg | 3779_evjvqa | [{"from": "human", "value": "who is standing next to the woman wearing face shield?\n<image>"}, {"from": "gpt", "value": "the woman wearing white t-shirt"}] |
00000003782.jpg | 3782_evjvqa | [{"from": "human", "value": "what is on the aisle which is next tot the woman wearing white mask?\n<image>"}, {"from": "gpt", "value": "she is standing next to the cantaloupe"}] |
00000003782.jpg | 3782_evjvqa | [{"from": "human", "value": "what is the girl wearing mask pushing?\n<image>"}, {"from": "gpt", "value": "she is pushing the trolley"}] |
00000003782.jpg | 3782_evjvqa | [{"from": "human", "value": "what is the woman wearing mask doing?\n<image>"}, {"from": "gpt", "value": "she is going shopping"}] |
00000003783.jpg | 3783_evjvqa | [{"from": "human", "value": "what is the woman wearing red t-shirt doing?\n<image>"}, {"from": "gpt", "value": "she is putting the carrots into the plastic bags"}] |
00000003783.jpg | 3783_evjvqa | [{"from": "human", "value": "what are the woman putting into the plastic bags?\n<image>"}, {"from": "gpt", "value": "the woman is putting carrots into the plastic bags"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.