image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000005249.jpg
5249_evjvqa
[{"from": "human", "value": "how many trucks are there?\n<image>"}, {"from": "gpt", "value": "there is one truck"}]
00000005249.jpg
5249_evjvqa
[{"from": "human", "value": "what color shirt are the uniforms of all the people being photographed?\n<image>"}, {"from": "gpt", "value": "gray and red"}]
00000005252.jpg
5252_evjvqa
[{"from": "human", "value": "how many motorbikes are parked in front of the cafe?\n<image>"}, {"from": "gpt", "value": "two motorbikes are parked in front of the cafe"}]
00000005254.jpg
5254_evjvqa
[{"from": "human", "value": "what is the main color of the room?\n<image>"}, {"from": "gpt", "value": "white"}]
00000005254.jpg
5254_evjvqa
[{"from": "human", "value": "how many blue pillows are there?\n<image>"}, {"from": "gpt", "value": "there are two blue pillows"}]
00000005254.jpg
5254_evjvqa
[{"from": "human", "value": "how many yellow pillows are there?\n<image>"}, {"from": "gpt", "value": "there are two yellow pillows"}]
00000005257.jpg
5257_evjvqa
[{"from": "human", "value": "what color is the table top?\n<image>"}, {"from": "gpt", "value": "the table top is black"}]
00000005257.jpg
5257_evjvqa
[{"from": "human", "value": "how many phones are there?\n<image>"}, {"from": "gpt", "value": "there is one phone"}]
00000005257.jpg
5257_evjvqa
[{"from": "human", "value": "how many glasses of water are on the table?\n<image>"}, {"from": "gpt", "value": "one glass of water"}]
00000005258.jpg
5258_evjvqa
[{"from": "human", "value": "what color is the lamp post?\n<image>"}, {"from": "gpt", "value": "the lamp post is painted black"}]
00000005258.jpg
5258_evjvqa
[{"from": "human", "value": "what is the main color of the building?\n<image>"}, {"from": "gpt", "value": "the main color of the building is white"}]
00000005258.jpg
5258_evjvqa
[{"from": "human", "value": "what color is the flag on the building?\n<image>"}, {"from": "gpt", "value": "the flag on the building is red"}]
00000005260.jpg
5260_evjvqa
[{"from": "human", "value": "how many motorbikes are parked in front of the cafe?\n<image>"}, {"from": "gpt", "value": "two motorbikes are parked in front of the cafe"}]
00000005260.jpg
5260_evjvqa
[{"from": "human", "value": "what color backpack is the girl in the white shirt wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing black backpack"}]
00000005266.jpg
5266_evjvqa
[{"from": "human", "value": "how is the weather outside?\n<image>"}, {"from": "gpt", "value": "it is raining outside"}]
00000005266.jpg
5266_evjvqa
[{"from": "human", "value": "how many people are using bicycles on the road?\n<image>"}, {"from": "gpt", "value": "there is one bicycle"}]
00000005266.jpg
5266_evjvqa
[{"from": "human", "value": "how many cars are moving on the road?\n<image>"}, {"from": "gpt", "value": "one car"}]
00000005268.jpg
5268_evjvqa
[{"from": "human", "value": "how many young men wear red shoes?\n<image>"}, {"from": "gpt", "value": "one young men wear red shoes"}]
00000005268.jpg
5268_evjvqa
[{"from": "human", "value": "what is the main color of the building?\n<image>"}, {"from": "gpt", "value": "the main color of the building is yellow?"}]
00000005268.jpg
5268_evjvqa
[{"from": "human", "value": "how many girls in black dresses are standing?\n<image>"}, {"from": "gpt", "value": "one girl in black dress is standing"}]
00000005272.jpg
5272_evjvqa
[{"from": "human", "value": "what is the standing man holding?\n<image>"}, {"from": "gpt", "value": "the standing man is holding a micro"}]
00000005272.jpg
5272_evjvqa
[{"from": "human", "value": "what color shirt is the standing man wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing white shirt"}]
00000005274.jpg
5274_evjvqa
[{"from": "human", "value": "what color shirt is the man standing in the cafe wearing?\n<image>"}, {"from": "gpt", "value": "red shirt"}]
00000005279.jpg
5279_evjvqa
[{"from": "human", "value": "what color helmet is the man on the motorcycle wearing?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000005279.jpg
5279_evjvqa
[{"from": "human", "value": "what color shirt is the girl sitting on the wine red motorbike wearing?\n<image>"}, {"from": "gpt", "value": "white shirt"}]
00000005279.jpg
5279_evjvqa
[{"from": "human", "value": "how many motorbikes are there?\n<image>"}, {"from": "gpt", "value": "there is one motorbike"}]
00000005280.jpg
5280_evjvqa
[{"from": "human", "value": "what color pants is the girl in the white shawl wearing?\n<image>"}, {"from": "gpt", "value": "red pants"}]
00000005280.jpg
5280_evjvqa
[{"from": "human", "value": "how many girls wear yellow shirts?\n<image>"}, {"from": "gpt", "value": "one girl wears yellow shirt"}]
00000005280.jpg
5280_evjvqa
[{"from": "human", "value": "how many men in wine red shirts are standing?\n<image>"}, {"from": "gpt", "value": "one man is in wine red shirt"}]
00000005284.jpg
5284_evjvqa
[{"from": "human", "value": "what color mask is the standing man wearing?\n<image>"}, {"from": "gpt", "value": "wearing a gray mask"}]
00000005284.jpg
5284_evjvqa
[{"from": "human", "value": "how many cars are parked on the roadside?\n<image>"}, {"from": "gpt", "value": "one car is parked on the roadside"}]
00000005284.jpg
5284_evjvqa
[{"from": "human", "value": "what color jacket is the standing man wearing?\n<image>"}, {"from": "gpt", "value": "gray jacket"}]
00000005286.jpg
5286_evjvqa
[{"from": "human", "value": "what color is the motorcycle?\n<image>"}, {"from": "gpt", "value": "green"}]
00000005286.jpg
5286_evjvqa
[{"from": "human", "value": "what color sandals is the standing man wearing?\n<image>"}, {"from": "gpt", "value": "black sandals"}]
00000005286.jpg
5286_evjvqa
[{"from": "human", "value": "how many girls are wearing red shirts?\n<image>"}, {"from": "gpt", "value": "one girl is wearing red shirt"}]
00000005287.jpg
5287_evjvqa
[{"from": "human", "value": "where was the photo taken?\n<image>"}, {"from": "gpt", "value": "the photo was taken at the market"}]
00000005287.jpg
5287_evjvqa
[{"from": "human", "value": "how many motorbikes are carrying vegetables?\n<image>"}, {"from": "gpt", "value": "one motorbike is carrying vegetables"}]
00000005287.jpg
5287_evjvqa
[{"from": "human", "value": "what is the woman in the red jacket doing?\n<image>"}, {"from": "gpt", "value": "choosing vegetables"}]
00000005288.jpg
5288_evjvqa
[{"from": "human", "value": "how many women wear black jackets?\n<image>"}, {"from": "gpt", "value": "one woman wears black jacket"}]
00000005288.jpg
5288_evjvqa
[{"from": "human", "value": "what color dress is the woman in the black jacket wearing?\n<image>"}, {"from": "gpt", "value": "the woman in the black jacket is wearing a red dress"}]
00000005288.jpg
5288_evjvqa
[{"from": "human", "value": "how many girls wear yellow shirts?\n<image>"}, {"from": "gpt", "value": "one girl wears yellow shirt"}]
00000005289.jpg
5289_evjvqa
[{"from": "human", "value": "what color shirt is the man on the motorbike wearing?\n<image>"}, {"from": "gpt", "value": "white shirt"}]
00000005289.jpg
5289_evjvqa
[{"from": "human", "value": "how many white vehicles are there?\n<image>"}, {"from": "gpt", "value": "there is one white vehicle"}]
00000005291.jpg
5291_evjvqa
[{"from": "human", "value": "how many pots of banana trees are there?\n<image>"}, {"from": "gpt", "value": "there are two pots of banana trees"}]
00000005291.jpg
5291_evjvqa
[{"from": "human", "value": "how many people are sitting in front of the house?\n<image>"}, {"from": "gpt", "value": "one person is sitting in front of the house"}]
00000005291.jpg
5291_evjvqa
[{"from": "human", "value": "what is the young man sitting in front of the house doing?\n<image>"}, {"from": "gpt", "value": "surfing the phone"}]
00000005296.jpg
5296_evjvqa
[{"from": "human", "value": "what time of day was the photo taken?\n<image>"}, {"from": "gpt", "value": "the photo was taken in the morning"}]
00000005296.jpg
5296_evjvqa
[{"from": "human", "value": "what color roof does the building have?\n<image>"}, {"from": "gpt", "value": "red roof"}]
00000005296.jpg
5296_evjvqa
[{"from": "human", "value": "what is the main color of the walls of the building?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000005297.jpg
5297_evjvqa
[{"from": "human", "value": "what is the main color of the store's discount board?\n<image>"}, {"from": "gpt", "value": "red"}]
00000003301.jpg
3301_evjvqa
[{"from": "human", "value": "what color mask is the standing young man wearing the protective suit wearing?\n<image>"}, {"from": "gpt", "value": "he is wearing a white mask"}]
00000003303.jpg
3303_evjvqa
[{"from": "human", "value": "which side is the red motorcycle parked on the roadside?\n<image>"}, {"from": "gpt", "value": "the red motorcycle is parked on the left roadside"}]
00000003303.jpg
3303_evjvqa
[{"from": "human", "value": "what vehicle is moving right behind the man riding a black motorbike?\n<image>"}, {"from": "gpt", "value": "right behind the man riding a black motorbike is a moving car"}]
00000003306.jpg
3306_evjvqa
[{"from": "human", "value": "what color chair is there a young man sitting on?\n<image>"}, {"from": "gpt", "value": "there is a young man sitting on a red chair"}]
00000003306.jpg
3306_evjvqa
[{"from": "human", "value": "what color light do the bulbs used at this flower market emit?\n<image>"}, {"from": "gpt", "value": "the bulbs used at this flower market emit white light"}]
00000003306.jpg
3306_evjvqa
[{"from": "human", "value": "what clothes is the young man sitting on the chair wearing?\n<image>"}, {"from": "gpt", "value": "the young man sitting on a chair is wearing a dark blue t-shirt and black pants"}]
00000003309.jpg
3309_evjvqa
[{"from": "human", "value": "what is the woman wearing the ao dai holding?\n<image>"}, {"from": "gpt", "value": "the woman wearing the ao dai is holding the bouquet"}]
00000003309.jpg
3309_evjvqa
[{"from": "human", "value": "where is the man in the military uniform standing?\n<image>"}, {"from": "gpt", "value": "the man in the military uniform is standing between the woman in the red ao dai and the man in the navy jacket"}]
00000003309.jpg
3309_evjvqa
[{"from": "human", "value": "where is the fan?\n<image>"}, {"from": "gpt", "value": "the fan was hung on the wall between the glass doors"}]
00000003310.jpg
3310_evjvqa
[{"from": "human", "value": "what material are the dining table tops made of?\n<image>"}, {"from": "gpt", "value": "the dining table tops are made of wood"}]
00000003310.jpg
3310_evjvqa
[{"from": "human", "value": "what color are the chairs at the dining table?\n<image>"}, {"from": "gpt", "value": "the chairs at the dining table are black"}]
00000003310.jpg
3310_evjvqa
[{"from": "human", "value": "what color chairs does this restaurant arrange at each table?\n<image>"}, {"from": "gpt", "value": "this restaurant arranges black chairs at each table"}]
00000003312.jpg
3312_evjvqa
[{"from": "human", "value": "what clothes does the working supermarket staff wear?\n<image>"}, {"from": "gpt", "value": "the supermarket staff wears blue t-shirt and black pants"}]
00000003312.jpg
3312_evjvqa
[{"from": "human", "value": "what is the supermarket staff doing at the milk shelf?\n<image>"}, {"from": "gpt", "value": "the supermarket staff is checking the products on the milk shelf"}]
00000003312.jpg
3312_evjvqa
[{"from": "human", "value": "what kind of products are sold on the shelf right where the staff is standing?\n<image>"}, {"from": "gpt", "value": "milk products"}]
00000003313.jpg
3313_evjvqa
[{"from": "human", "value": "which cell on the vegetable counter that the carrots are for sale?\n<image>"}, {"from": "gpt", "value": "the carrots are for sale in the second cell from the right in the bottom row"}]
00000003313.jpg
3313_evjvqa
[{"from": "human", "value": "which vegetables on the vegetable counter are in the cell located directly to the left of the cell with carrots?\n<image>"}, {"from": "gpt", "value": "garlic"}]
00000003313.jpg
3313_evjvqa
[{"from": "human", "value": "which material is the vegetable counter made of?\n<image>"}, {"from": "gpt", "value": "the vegetable counter is made of wood"}]
00000003317.jpg
3317_evjvqa
[{"from": "human", "value": "what does the person riding the motorbike wear on their back?\n<image>"}, {"from": "gpt", "value": "the person riding the motorbike wears a backpack on their back"}]
00000003317.jpg
3317_evjvqa
[{"from": "human", "value": "what color helmet does the person riding the motorbike wear?\n<image>"}, {"from": "gpt", "value": "the person riding the motorbike wears a red helmet"}]
00000003317.jpg
3317_evjvqa
[{"from": "human", "value": "what means is the person wearing the red jacket going to the market?\n<image>"}, {"from": "gpt", "value": "the person wearing the red jacket is going to the market by motorbike"}]
00000003320.jpg
3320_evjvqa
[{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "she is speaking in front of the press"}]
00000003320.jpg
3320_evjvqa
[{"from": "human", "value": "what objects are placed on the podium tribune rostrum stand?\n<image>"}, {"from": "gpt", "value": "microphones are placed on the podium tribune rostrum stand"}]
00000003320.jpg
3320_evjvqa
[{"from": "human", "value": "what is this woman doing with the microphones?\n<image>"}, {"from": "gpt", "value": "this woman is speaking"}]
00000003323.jpg
3323_evjvqa
[{"from": "human", "value": "where is the sign?\n<image>"}, {"from": "gpt", "value": "the sign is next to the wall of the building on the right"}]
00000003323.jpg
3323_evjvqa
[{"from": "human", "value": "what do tourists sit on to drink beer in this street?\n<image>"}, {"from": "gpt", "value": "they sit on plastic chairs"}]
00000003326.jpg
3326_evjvqa
[{"from": "human", "value": "what is this man holding in his hands?\n<image>"}, {"from": "gpt", "value": "he is holding a house model in his hands"}]
00000003326.jpg
3326_evjvqa
[{"from": "human", "value": "what does this man wear on his head?\n<image>"}, {"from": "gpt", "value": "he wears a blue cap"}]
00000003326.jpg
3326_evjvqa
[{"from": "human", "value": "what color is his jacket?\n<image>"}, {"from": "gpt", "value": "this man is wearing a gray and yellow-green jacket"}]
00000003327.jpg
3327_evjvqa
[{"from": "human", "value": "what color is the road markings on this street?\n<image>"}, {"from": "gpt", "value": "the road markings on this street are white"}]
00000003327.jpg
3327_evjvqa
[{"from": "human", "value": "what vehicle is the person in black and wearing a black helmet traveling by?\n<image>"}, {"from": "gpt", "value": "the person in black and wearing a black helmet is traveling by a motorbike"}]
00000003327.jpg
3327_evjvqa
[{"from": "human", "value": "who is the man in the white shirt and black pants walking with?\n<image>"}, {"from": "gpt", "value": "the man in the white shirt and black pants is walking with a child in the black shirt"}]
00000003330.jpg
3330_evjvqa
[{"from": "human", "value": "what outfit is the woman holding hands wearing?\n<image>"}, {"from": "gpt", "value": "the woman holding hands is wearing an ao dai"}]
00000003330.jpg
3330_evjvqa
[{"from": "human", "value": "what color shirt is the person behind the woman holding hands wearing?\n<image>"}, {"from": "gpt", "value": "the person behind the woman holding hands is wearing white shirt"}]
00000003330.jpg
3330_evjvqa
[{"from": "human", "value": "what is the posture of the woman in the ao dai?\n<image>"}, {"from": "gpt", "value": "she is standing"}]
00000003334.jpg
3334_evjvqa
[{"from": "human", "value": "what color shirt is the woman wearing?\n<image>"}, {"from": "gpt", "value": "the woman is wearing a gray shirt"}]
00000003334.jpg
3334_evjvqa
[{"from": "human", "value": "what color shirts are both men wearing?\n<image>"}, {"from": "gpt", "value": "both men are wearing white shirts"}]
00000003338.jpg
3338_evjvqa
[{"from": "human", "value": "who is holding the bouquet?\n<image>"}, {"from": "gpt", "value": "the girl in the white ao dai"}]
00000003338.jpg
3338_evjvqa
[{"from": "human", "value": "which side does the girl in the white ao dai stand compared to the young man?\n<image>"}, {"from": "gpt", "value": "on the right side of the young man"}]
00000003338.jpg
3338_evjvqa
[{"from": "human", "value": "what is the girl in the white ao dai holding in her hands?\n<image>"}, {"from": "gpt", "value": "the girl in the white ao dai is holding a bouqet"}]
00000003340.jpg
3340_evjvqa
[{"from": "human", "value": "what color is the text on the wall?\n<image>"}, {"from": "gpt", "value": "the text on the wall is black"}]
00000003340.jpg
3340_evjvqa
[{"from": "human", "value": "where is the woman in the black jacket looking?\n<image>"}, {"from": "gpt", "value": "she is looking at the text on the wall"}]
00000003340.jpg
3340_evjvqa
[{"from": "human", "value": "what is the girl in the black jacket wearing by her side?\n<image>"}, {"from": "gpt", "value": "she is wearing a bag"}]
00000003341.jpg
3341_evjvqa
[{"from": "human", "value": "what are the students sitting together on the steps to?\n<image>"}, {"from": "gpt", "value": "to take a group photo"}]
00000003341.jpg
3341_evjvqa
[{"from": "human", "value": "what does the flag hanging on the tower look like?\n<image>"}, {"from": "gpt", "value": "the flag has a red background and a yellow star"}]
00000003341.jpg
3341_evjvqa
[{"from": "human", "value": "where is the woman in the pink shirt standing?\n<image>"}, {"from": "gpt", "value": "the woman in the pink shirt is standing to the leftmost of the top row"}]
00000003342.jpg
3342_evjvqa
[{"from": "human", "value": "how is the girl sitting on the high chair dressed?\n<image>"}, {"from": "gpt", "value": "she is wearing a white t-shirt and jeans"}]
00000003342.jpg
3342_evjvqa
[{"from": "human", "value": "what color is the chair on which the girl is sitting stroking her hair?\n<image>"}, {"from": "gpt", "value": "the chair on which the girl is sitting stroking her hair is white"}]
00000003342.jpg
3342_evjvqa
[{"from": "human", "value": "how many buildings are painted cream beige?\n<image>"}, {"from": "gpt", "value": "two buildings are painted cream beige"}]
00000003346.jpg
3346_evjvqa
[{"from": "human", "value": "what is hanging on high ropes across the street?\n<image>"}, {"from": "gpt", "value": "lanterns"}]