image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000006444.jpg
6444_evjvqa
[{"from": "human", "value": "what is the man in blue doing?\n<image>"}, {"from": "gpt", "value": "he is driving the motorbike on the street. "}]
00000006447.jpg
6447_evjvqa
[{"from": "human", "value": "what is the woman in the blue top doing?\n<image>"}, {"from": "gpt", "value": "the woman in the blue top is sitting selling rice noodles."}]
00000006447.jpg
6447_evjvqa
[{"from": "human", "value": "how many people are sitting selling rice noodles?\n<image>"}, {"from": "gpt", "value": "there is one person sitting selling rice noodles."}]
00000006447.jpg
6447_evjvqa
[{"from": "human", "value": "what color is the top that the woman selling rice noodles is wearing?\n<image>"}, {"from": "gpt", "value": "it is blue. "}]
00000006452.jpg
6452_evjvqa
[{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is stacking the shelves with goods."}]
00000006452.jpg
6452_evjvqa
[{"from": "human", "value": "what color is her shirt?\n<image>"}, {"from": "gpt", "value": "it is red."}]
00000006459.jpg
6459_evjvqa
[{"from": "human", "value": "what are they doing?\n<image>"}, {"from": "gpt", "value": "they are conducting trade on the river. "}]
00000006459.jpg
6459_evjvqa
[{"from": "human", "value": "what kind of vehicles are they sitting on?\n<image>"}, {"from": "gpt", "value": "they are sitting on punts. "}]
00000006459.jpg
6459_evjvqa
[{"from": "human", "value": "what means of transport do they use for trade?\n<image>"}, {"from": "gpt", "value": "they use punts for transport. "}]
00000006463.jpg
6463_evjvqa
[{"from": "human", "value": "what color is the woman in the white pants' handbag?\n<image>"}, {"from": "gpt", "value": "it is black."}]
00000006463.jpg
6463_evjvqa
[{"from": "human", "value": "what color are the woman in the blue shirt's pants?\n<image>"}, {"from": "gpt", "value": "they are white. "}]
00000006463.jpg
6463_evjvqa
[{"from": "human", "value": "how many men are sitting selling vegetables?\n<image>"}, {"from": "gpt", "value": "there is one man sitting selling vegetables. "}]
00000006465.jpg
6465_evjvqa
[{"from": "human", "value": "how many men are driving cyclos?\n<image>"}, {"from": "gpt", "value": "there is one man driving the cyclo."}]
00000006465.jpg
6465_evjvqa
[{"from": "human", "value": "what color is the skirt that she is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a black skirt."}]
00000006465.jpg
6465_evjvqa
[{"from": "human", "value": "what is the girl in the black skirt doing?\n<image>"}, {"from": "gpt", "value": "she is walking on the railway."}]
00000006469.jpg
6469_evjvqa
[{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is riding the bicycle."}]
00000006469.jpg
6469_evjvqa
[{"from": "human", "value": "how many women are riding bicycles?\n<image>"}, {"from": "gpt", "value": "there is one woman riding the bicycle. "}]
00000006469.jpg
6469_evjvqa
[{"from": "human", "value": "what color is the woman riding the bicycle's shirt?\n<image>"}, {"from": "gpt", "value": "it is white."}]
00000006471.jpg
6471_evjvqa
[{"from": "human", "value": "how many men are wearing black and red shirts?\n<image>"}, {"from": "gpt", "value": "there is one man wearing the black and red shirt."}]
00000006471.jpg
6471_evjvqa
[{"from": "human", "value": "how many girls are shopping?\n<image>"}, {"from": "gpt", "value": "there are two girls shopping."}]
00000006471.jpg
6471_evjvqa
[{"from": "human", "value": "how many men in white are shopping?\n<image>"}, {"from": "gpt", "value": "there are two men in white shopping."}]
00000006473.jpg
6473_evjvqa
[{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is standing by the bicycle."}]
00000006473.jpg
6473_evjvqa
[{"from": "human", "value": "what color are the pants that the woman standing by the bicycle is wearing?\n<image>"}, {"from": "gpt", "value": "the woman standing by the bicycle is wearing black pants."}]
00000006473.jpg
6473_evjvqa
[{"from": "human", "value": "how many women are standing by the bicycle?\n<image>"}, {"from": "gpt", "value": "there is one woman standing by the bicycle."}]
00000006478.jpg
6478_evjvqa
[{"from": "human", "value": "what color is her ao dai?\n<image>"}, {"from": "gpt", "value": "it is blue."}]
00000006478.jpg
6478_evjvqa
[{"from": "human", "value": "what is the expression on her face?\n<image>"}, {"from": "gpt", "value": "she is smiling."}]
00000006478.jpg
6478_evjvqa
[{"from": "human", "value": "what kind of clothing is she wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a blue ao dai. "}]
00000006479.jpg
6479_evjvqa
[{"from": "human", "value": "how many women are sitting?\n<image>"}, {"from": "gpt", "value": "there is one woman sitting."}]
00000006479.jpg
6479_evjvqa
[{"from": "human", "value": "how many men are standing in the booth?\n<image>"}, {"from": "gpt", "value": "there is one man standing in the booth. "}]
00000006480.jpg
6480_evjvqa
[{"from": "human", "value": "what are the police officers doing?\n<image>"}, {"from": "gpt", "value": "they are taking part in a memorial service."}]
00000006480.jpg
6480_evjvqa
[{"from": "human", "value": "what color are the police officers' uniforms?\n<image>"}, {"from": "gpt", "value": "they are green."}]
00000006480.jpg
6480_evjvqa
[{"from": "human", "value": "how many men in black are there?\n<image>"}, {"from": "gpt", "value": "there is one man in black. "}]
00000006481.jpg
6481_evjvqa
[{"from": "human", "value": "what is the man in green doing?\n<image>"}, {"from": "gpt", "value": "the man in green is selling products to the woman in the blue coat."}]
00000006481.jpg
6481_evjvqa
[{"from": "human", "value": "how many men in green are there?\n<image>"}, {"from": "gpt", "value": "there is one man in the green t-shirt."}]
00000006481.jpg
6481_evjvqa
[{"from": "human", "value": "what color is the t-shirt that the man standing in the booth is wearing?\n<image>"}, {"from": "gpt", "value": "it is green. "}]
00000006482.jpg
6482_evjvqa
[{"from": "human", "value": "how many girls are riding bicycles?\n<image>"}, {"from": "gpt", "value": "there is one girl riding the bicycle."}]
00000006482.jpg
6482_evjvqa
[{"from": "human", "value": "what is the girl in white doing?\n<image>"}, {"from": "gpt", "value": "the girl in white is riding the bicycle."}]
00000006482.jpg
6482_evjvqa
[{"from": "human", "value": "what color is the dress that the female cyclist is wearing?\n<image>"}, {"from": "gpt", "value": "it is white. "}]
00000006485.jpg
6485_evjvqa
[{"from": "human", "value": "what color is her top?\n<image>"}, {"from": "gpt", "value": "it is white."}]
00000006485.jpg
6485_evjvqa
[{"from": "human", "value": "what color is her hair band?\n<image>"}, {"from": "gpt", "value": "it is red."}]
00000006485.jpg
6485_evjvqa
[{"from": "human", "value": "what color is the top that she is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a white top. "}]
00000006488.jpg
6488_evjvqa
[{"from": "human", "value": "where is she standing?\n<image>"}, {"from": "gpt", "value": "she is standing on the riverbank."}]
00000006488.jpg
6488_evjvqa
[{"from": "human", "value": "what color is her top?\n<image>"}, {"from": "gpt", "value": "it is black."}]
00000006491.jpg
6491_evjvqa
[{"from": "human", "value": "what color is the jacket that she is wearing?\n<image>"}, {"from": "gpt", "value": "it is orange."}]
00000006491.jpg
6491_evjvqa
[{"from": "human", "value": "what is the woman in orange doing?\n<image>"}, {"from": "gpt", "value": "she is walking the bicycle."}]
00000006491.jpg
6491_evjvqa
[{"from": "human", "value": "how many women are walking bicycles?\n<image>"}, {"from": "gpt", "value": "there is one woman walking the bicycle."}]
00000006498.jpg
6498_evjvqa
[{"from": "human", "value": "how many women in white are there?\n<image>"}, {"from": "gpt", "value": "there is one woman in white."}]
00000006498.jpg
6498_evjvqa
[{"from": "human", "value": "how many women in black are there?\n<image>"}, {"from": "gpt", "value": "there is one woman in black."}]
00000006498.jpg
6498_evjvqa
[{"from": "human", "value": "what is the woman in white doing?\n<image>"}, {"from": "gpt", "value": "the woman in white is sitting fixing the clothes. "}]
00000005200.jpg
5200_evjvqa
[{"from": "human", "value": "what is the main color of the interface?\n<image>"}, {"from": "gpt", "value": "the main color of the interface is blue"}]
00000005203.jpg
5203_evjvqa
[{"from": "human", "value": "what color is the content of the food printed on the restaurant's signboard?\n<image>"}, {"from": "gpt", "value": "printed in red"}]
00000005204.jpg
5204_evjvqa
[{"from": "human", "value": "what is the girl in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "the girl in the blue shirt is packing for a customer"}]
00000005204.jpg
5204_evjvqa
[{"from": "human", "value": "what color shirt is the woman with the glasses on her head wearing?\n<image>"}, {"from": "gpt", "value": "the woman with the glasses on her head is wearing a yellow shirt"}]
00000005204.jpg
5204_evjvqa
[{"from": "human", "value": "how are the feelings of the girl and the woman?\n<image>"}, {"from": "gpt", "value": "both of them are happy"}]
00000005207.jpg
5207_evjvqa
[{"from": "human", "value": "whose hand is the boy holding?\n<image>"}, {"from": "gpt", "value": "the boy is holding the girl's hand"}]
00000005207.jpg
5207_evjvqa
[{"from": "human", "value": "what color shoes is the boy wearing?\n<image>"}, {"from": "gpt", "value": "the boy is wearing black shoes"}]
00000005207.jpg
5207_evjvqa
[{"from": "human", "value": "what color jacket is the girl wearing?\n<image>"}, {"from": "gpt", "value": "pink"}]
00000005214.jpg
5214_evjvqa
[{"from": "human", "value": "what color is the iron door?\n<image>"}, {"from": "gpt", "value": "green"}]
00000005214.jpg
5214_evjvqa
[{"from": "human", "value": "how many gray shirts are on display in the closet?\n<image>"}, {"from": "gpt", "value": "one gray shirt is on display in the closet"}]
00000005214.jpg
5214_evjvqa
[{"from": "human", "value": "what is the color of the brick floor of the house?\n<image>"}, {"from": "gpt", "value": "yellow and white"}]
00000005215.jpg
5215_evjvqa
[{"from": "human", "value": "how many men are riding motorbikes?\n<image>"}, {"from": "gpt", "value": "one man is riding a motorbike"}]
00000005216.jpg
5216_evjvqa
[{"from": "human", "value": "what means of transport is the woman using?\n<image>"}, {"from": "gpt", "value": "the woman is using bicycle"}]
00000005216.jpg
5216_evjvqa
[{"from": "human", "value": "what is the woman carrying?\n<image>"}, {"from": "gpt", "value": "the woman is carrying flowers"}]
00000005216.jpg
5216_evjvqa
[{"from": "human", "value": "what color is the shirt the woman is wearing?\n<image>"}, {"from": "gpt", "value": "black"}]
00000005219.jpg
5219_evjvqa
[{"from": "human", "value": "what color shirt is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a red shirt"}]
00000005219.jpg
5219_evjvqa
[{"from": "human", "value": "what is the posture of the man in the light blue shirt?\n<image>"}, {"from": "gpt", "value": "the man in the light blue shirt is sitting"}]
00000005219.jpg
5219_evjvqa
[{"from": "human", "value": "how many people are wearing red shirt in the picture?\n<image>"}, {"from": "gpt", "value": "two people are wearing red shirts"}]
00000005220.jpg
5220_evjvqa
[{"from": "human", "value": "how many white passenger cars are there?\n<image>"}, {"from": "gpt", "value": "there is one white passenger car"}]
00000005220.jpg
5220_evjvqa
[{"from": "human", "value": "how many red passenger cars are there?\n<image>"}, {"from": "gpt", "value": "there is one red passenger car"}]
00000005220.jpg
5220_evjvqa
[{"from": "human", "value": "how many blue passenger cars are there?\n<image>"}, {"from": "gpt", "value": "there is one blue passenger car"}]
00000005223.jpg
5223_evjvqa
[{"from": "human", "value": "what color shirt is the man wearing?\n<image>"}, {"from": "gpt", "value": "brown shirt"}]
00000005223.jpg
5223_evjvqa
[{"from": "human", "value": "what color hat is the man wearing?\n<image>"}, {"from": "gpt", "value": "brown hat"}]
00000005225.jpg
5225_evjvqa
[{"from": "human", "value": "what color is the tablecloth?\n<image>"}, {"from": "gpt", "value": "white tablecloth"}]
00000005225.jpg
5225_evjvqa
[{"from": "human", "value": "what products does the stall sell?\n<image>"}, {"from": "gpt", "value": "jewelry"}]
00000005225.jpg
5225_evjvqa
[{"from": "human", "value": "how many men are wearing black shirts?\n<image>"}, {"from": "gpt", "value": "one man is wearing black shirt"}]
00000005227.jpg
5227_evjvqa
[{"from": "human", "value": "how many young men wearing black are on the street?\n<image>"}, {"from": "gpt", "value": "one man wearing black is on the street"}]
00000005227.jpg
5227_evjvqa
[{"from": "human", "value": "how many boys are controling the model car?\n<image>"}, {"from": "gpt", "value": "one boy is using the model car"}]
00000005227.jpg
5227_evjvqa
[{"from": "human", "value": "what color is the boy's model car?\n<image>"}, {"from": "gpt", "value": "red"}]
00000005228.jpg
5228_evjvqa
[{"from": "human", "value": "what happened to the house?\n<image>"}, {"from": "gpt", "value": "the house has collapsed roof"}]
00000005228.jpg
5228_evjvqa
[{"from": "human", "value": "how many men are standing in front of the house?\n<image>"}, {"from": "gpt", "value": "two men are standing in front of the house"}]
00000005228.jpg
5228_evjvqa
[{"from": "human", "value": "where does the man in the blue shirt stand compared to the rest?\n<image>"}, {"from": "gpt", "value": "stands in front of"}]
00000005231.jpg
5231_evjvqa
[{"from": "human", "value": "what is the main color of the restaurant?\n<image>"}, {"from": "gpt", "value": "brown"}]
00000005231.jpg
5231_evjvqa
[{"from": "human", "value": "how many flower stands are there in front of the store?\n<image>"}, {"from": "gpt", "value": "three flower stands are in front of the store"}]
00000005232.jpg
5232_evjvqa
[{"from": "human", "value": "how is the girl feeling?\n<image>"}, {"from": "gpt", "value": "she is smiling"}]
00000005232.jpg
5232_evjvqa
[{"from": "human", "value": "what color jacket is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a white jacket"}]
00000005232.jpg
5232_evjvqa
[{"from": "human", "value": "what color are the girl's shoes?\n<image>"}, {"from": "gpt", "value": "black"}]
00000005233.jpg
5233_evjvqa
[{"from": "human", "value": "how many young men in white shirt are there in the photo?\n<image>"}, {"from": "gpt", "value": "there are three young men wearing white shirts"}]
00000005233.jpg
5233_evjvqa
[{"from": "human", "value": "how many young men wear hats?\n<image>"}, {"from": "gpt", "value": "one young man wear hat"}]
00000005233.jpg
5233_evjvqa
[{"from": "human", "value": "what color shoes do the two girls wear?\n<image>"}, {"from": "gpt", "value": "black"}]
00000005234.jpg
5234_evjvqa
[{"from": "human", "value": "how many police soldiers wear blue uniforms?\n<image>"}, {"from": "gpt", "value": "two soldiers wear blue uniforms"}]
00000005234.jpg
5234_evjvqa
[{"from": "human", "value": "how many people are clapping?\n<image>"}, {"from": "gpt", "value": "three people are clapping"}]
00000005234.jpg
5234_evjvqa
[{"from": "human", "value": "what color mask does the man wearing white slippers have?\n<image>"}, {"from": "gpt", "value": "white mask"}]
00000005241.jpg
5241_evjvqa
[{"from": "human", "value": "how many men wear dark glasses?\n<image>"}, {"from": "gpt", "value": "one man is wearing dark glasses"}]
00000005245.jpg
5245_evjvqa
[{"from": "human", "value": "what color shirt is the man riding the black motorbike wearing?\n<image>"}, {"from": "gpt", "value": "blue shirt"}]
00000005245.jpg
5245_evjvqa
[{"from": "human", "value": "what color helmet is the man riding the motorbike in a blue shirt wearing?\n<image>"}, {"from": "gpt", "value": "yellow helmet"}]
00000005245.jpg
5245_evjvqa
[{"from": "human", "value": "how many red vehicles are moving on the road?\n<image>"}, {"from": "gpt", "value": "one red vehicle"}]
00000005246.jpg
5246_evjvqa
[{"from": "human", "value": "where is the girl standing being photographed?\n<image>"}, {"from": "gpt", "value": "in front of the brick wall"}]
00000005246.jpg
5246_evjvqa
[{"from": "human", "value": "what color is the girl's shirt?\n<image>"}, {"from": "gpt", "value": "the girl's shirt is blue"}]
00000005246.jpg
5246_evjvqa
[{"from": "human", "value": "what color shoes is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing gray shoes"}]
00000005247.jpg
5247_evjvqa
[{"from": "human", "value": "what color is the roof of the stalls?\n<image>"}, {"from": "gpt", "value": "green"}]