image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000000340.jpg
340_evjvqa
[{"from": "human", "value": "where are all three motorbikes parked?\n<image>"}, {"from": "gpt", "value": "pavement"}]
00000000344.jpg
344_evjvqa
[{"from": "human", "value": "what color shirt are the people walking outside the supermarket wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000344.jpg
344_evjvqa
[{"from": "human", "value": "how many motorbikes are running around the supermarket?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000344.jpg
344_evjvqa
[{"from": "human", "value": "what item is there between the two people in red shirts?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
00000000346.jpg
346_evjvqa
[{"from": "human", "value": "what kind of fruit is the man in the red jacket choosing?\n<image>"}, {"from": "gpt", "value": "tomato"}]
00000000346.jpg
346_evjvqa
[{"from": "human", "value": "what is the man in the red shirt standing next to the counter for?\n<image>"}, {"from": "gpt", "value": "choosing tomatoes"}]
00000000346.jpg
346_evjvqa
[{"from": "human", "value": "what is the woman wearing the orange jacket holding in her bag?\n<image>"}, {"from": "gpt", "value": "lime"}]
00000000350.jpg
350_evjvqa
[{"from": "human", "value": "how is the boy with the shorter height dressed?\n<image>"}, {"from": "gpt", "value": "blue jacket, red t-shirt, gray jeans and brown shoes"}]
00000000350.jpg
350_evjvqa
[{"from": "human", "value": "what theme souvenirs does this shop sell?\n<image>"}, {"from": "gpt", "value": "christmas"}]
00000000350.jpg
350_evjvqa
[{"from": "human", "value": "how many people are watching in the souvenir shop?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000350.jpg
350_evjvqa
[{"from": "human", "value": "on which side does the taller boy stand compared to the shorter boy?\n<image>"}, {"from": "gpt", "value": "left"}]
00000000350.jpg
350_evjvqa
[{"from": "human", "value": "which side of the shorter boy's arm is the taller boy standing?\n<image>"}, {"from": "gpt", "value": "right arm"}]
00000000351.jpg
351_evjvqa
[{"from": "human", "value": "how many spoons are in the bowl of pho?\n<image>"}, {"from": "gpt", "value": "one"}]
00000000351.jpg
351_evjvqa
[{"from": "human", "value": "what is this bowl of pho placed on?\n<image>"}, {"from": "gpt", "value": "plate"}]
00000000351.jpg
351_evjvqa
[{"from": "human", "value": "what color plate is the tissue placed on?\n<image>"}, {"from": "gpt", "value": "green"}]
00000000356.jpg
356_evjvqa
[{"from": "human", "value": "what product does this store sellin?\n<image>"}, {"from": "gpt", "value": "clothes"}]
00000000356.jpg
356_evjvqa
[{"from": "human", "value": "how is the man raising his hand dressed?\n<image>"}, {"from": "gpt", "value": "white t-shirt and blue jeans"}]
00000000356.jpg
356_evjvqa
[{"from": "human", "value": "which way is the woman in white shirt standing facing?\n<image>"}, {"from": "gpt", "value": "the shop"}]
00000000359.jpg
359_evjvqa
[{"from": "human", "value": "how many people are riding motorbikes passing the store?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000359.jpg
359_evjvqa
[{"from": "human", "value": "what color shirt is the person standing inside the store wearing?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000362.jpg
362_evjvqa
[{"from": "human", "value": "what is in the back of the car?\n<image>"}, {"from": "gpt", "value": "garbage truck"}]
00000000362.jpg
362_evjvqa
[{"from": "human", "value": "who is the man on the scooter carrying?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000362.jpg
362_evjvqa
[{"from": "human", "value": "what is the purpose of the car on the side of the road?\n<image>"}, {"from": "gpt", "value": "parking"}]
00000000362.jpg
362_evjvqa
[{"from": "human", "value": "what status are all the shops here?\n<image>"}, {"from": "gpt", "value": "closure"}]
00000000362.jpg
362_evjvqa
[{"from": "human", "value": "what types of transport are there on the street?\n<image>"}, {"from": "gpt", "value": "car and scooter"}]
00000000365.jpg
365_evjvqa
[{"from": "human", "value": "where are the customers of this shop sitting?\n<image>"}, {"from": "gpt", "value": "inside the shop"}]
00000000365.jpg
365_evjvqa
[{"from": "human", "value": "who are sitting outside the shop drinking coffee?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000366.jpg
366_evjvqa
[{"from": "human", "value": "what material is the furniture in this living room made of?\n<image>"}, {"from": "gpt", "value": "wood"}]
00000000366.jpg
366_evjvqa
[{"from": "human", "value": "how many fans are on the wall?\n<image>"}, {"from": "gpt", "value": "one"}]
00000000366.jpg
366_evjvqa
[{"from": "human", "value": "which object is hanging on the wall?\n<image>"}, {"from": "gpt", "value": "fan"}]
00000000366.jpg
366_evjvqa
[{"from": "human", "value": "what is in the corner next to the curtain?\n<image>"}, {"from": "gpt", "value": "potted plant"}]
00000000372.jpg
372_evjvqa
[{"from": "human", "value": "what kind of clothes is the girl wearing?\n<image>"}, {"from": "gpt", "value": "swimsuit"}]
00000000372.jpg
372_evjvqa
[{"from": "human", "value": "how many boats are there at sea?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000372.jpg
372_evjvqa
[{"from": "human", "value": "who is the girl standing with?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000373.jpg
373_evjvqa
[{"from": "human", "value": "how is the traffic on this street?\n<image>"}, {"from": "gpt", "value": "deserted"}]
00000000373.jpg
373_evjvqa
[{"from": "human", "value": "what is the person riding a motorbike intending to?\n<image>"}, {"from": "gpt", "value": "turn left"}]
00000000373.jpg
373_evjvqa
[{"from": "human", "value": "how many cars are traveling on this route?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000374.jpg
374_evjvqa
[{"from": "human", "value": "who is holding the flower out of these two?\n<image>"}, {"from": "gpt", "value": "the man"}]
00000000374.jpg
374_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "white dress"}]
00000000374.jpg
374_evjvqa
[{"from": "human", "value": "what animal is standing between the couple?\n<image>"}, {"from": "gpt", "value": "horse"}]
00000000374.jpg
374_evjvqa
[{"from": "human", "value": "how is the man dressed?\n<image>"}, {"from": "gpt", "value": "black vest, black pants and black shoes"}]
00000000374.jpg
374_evjvqa
[{"from": "human", "value": "where is the horse standing compared to the couple?\n<image>"}, {"from": "gpt", "value": "in the middle"}]
00000000376.jpg
376_evjvqa
[{"from": "human", "value": "what vehicles are people on the road using?\n<image>"}, {"from": "gpt", "value": "bicycle and cyclo"}]
00000000376.jpg
376_evjvqa
[{"from": "human", "value": "how many floors are the houses across the street?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000376.jpg
376_evjvqa
[{"from": "human", "value": "what is lying next to side road on the opposite side of the buildings?\n<image>"}, {"from": "gpt", "value": "boats"}]
00000000377.jpg
377_evjvqa
[{"from": "human", "value": "how many people are standing in front of the bao nhu store?\n<image>"}, {"from": "gpt", "value": "zero"}]
00000000380.jpg
380_evjvqa
[{"from": "human", "value": "what are these people doing?\n<image>"}, {"from": "gpt", "value": "shopping"}]
00000000380.jpg
380_evjvqa
[{"from": "human", "value": "where is the little girl in red shirt sitting on?\n<image>"}, {"from": "gpt", "value": "supermarket trolley"}]
00000000380.jpg
380_evjvqa
[{"from": "human", "value": "which of the people here is female?\n<image>"}, {"from": "gpt", "value": "everyone"}]
00000000383.jpg
383_evjvqa
[{"from": "human", "value": "where is the man in white shirt?\n<image>"}, {"from": "gpt", "value": "in the park"}]
00000000383.jpg
383_evjvqa
[{"from": "human", "value": "how is the standing man dressed?\n<image>"}, {"from": "gpt", "value": "white shirt and black pants"}]
00000000384.jpg
384_evjvqa
[{"from": "human", "value": "what does this store mainly sell?\n<image>"}, {"from": "gpt", "value": "dairy products"}]
00000000384.jpg
384_evjvqa
[{"from": "human", "value": "who is shopping for milk?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000385.jpg
385_evjvqa
[{"from": "human", "value": "what is the man in the white shirt sitting outside the store for?\n<image>"}, {"from": "gpt", "value": "looking after the vehicles"}]
00000000385.jpg
385_evjvqa
[{"from": "human", "value": "how is the sitting man dressed?\n<image>"}, {"from": "gpt", "value": "white t-shirt and blue jeans"}]
00000000385.jpg
385_evjvqa
[{"from": "human", "value": "how many motorbikes have no one sitting on?\n<image>"}, {"from": "gpt", "value": "three"}]
00000000387.jpg
387_evjvqa
[{"from": "human", "value": "where can the garbage can be found?\n<image>"}, {"from": "gpt", "value": "at the foot of the lamp post"}]
00000000387.jpg
387_evjvqa
[{"from": "human", "value": "how many people are walking up the stairs?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000387.jpg
387_evjvqa
[{"from": "human", "value": "what color shirt is the man walking behind the other wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000397.jpg
397_evjvqa
[{"from": "human", "value": "what kind of vehicle is parked right in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "scooter"}]
00000000399.jpg
399_evjvqa
[{"from": "human", "value": "what mean do the customers travel to this store?\n<image>"}, {"from": "gpt", "value": "motorbike"}]
00000000399.jpg
399_evjvqa
[{"from": "human", "value": "what does this store give customers on opening day?\n<image>"}, {"from": "gpt", "value": "balloon"}]
00000011601.jpg
11601_evjvqa
[{"from": "human", "value": "what products do the shelves on the right display?\n<image>"}, {"from": "gpt", "value": "shelves displaying all kinds of diapers"}]
00000011603.jpg
11603_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this area is an oil field"}]
00000011604.jpg
11604_evjvqa
[{"from": "human", "value": "how many people is the cyclo driver carrying?\n<image>"}, {"from": "gpt", "value": "the cyclo driver is carrying two people"}]
00000011605.jpg
11605_evjvqa
[{"from": "human", "value": "which area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000011606.jpg
11606_evjvqa
[{"from": "human", "value": "how many people wearing ao dai are in the market?\n<image>"}, {"from": "gpt", "value": "there are three people wearing ao dai in the market"}]
00000011607.jpg
11607_evjvqa
[{"from": "human", "value": "what color is the paddle?\n<image>"}, {"from": "gpt", "value": "the paddle is yellow"}]
00000011610.jpg
11610_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "the man is taking photo with camera"}]
00000011612.jpg
11612_evjvqa
[{"from": "human", "value": "what color is the motorcycle that is about to run?\n<image>"}, {"from": "gpt", "value": "the motorcycle is blue"}]
00000011613.jpg
11613_evjvqa
[{"from": "human", "value": "how many employees are standing in front of the store?\n<image>"}, {"from": "gpt", "value": "there is an employee standing in front of the store"}]
00000011615.jpg
11615_evjvqa
[{"from": "human", "value": "how many floors does the row of houses have?\n<image>"}, {"from": "gpt", "value": "the row of houses has three stories"}]
00000011616.jpg
11616_evjvqa
[{"from": "human", "value": "how many white cars are there?\n<image>"}, {"from": "gpt", "value": "there are two white cars on the road"}]
00000011618.jpg
11618_evjvqa
[{"from": "human", "value": "what color is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is in white t-shirt"}]
00000011619.jpg
11619_evjvqa
[{"from": "human", "value": "what are people wearing?\n<image>"}, {"from": "gpt", "value": "everyone is wearing south vietnamese pyjamas"}]
00000011620.jpg
11620_evjvqa
[{"from": "human", "value": "how many similar pictures are there?\n<image>"}, {"from": "gpt", "value": "there are two similar pictures"}]
00000011621.jpg
11621_evjvqa
[{"from": "human", "value": "how many people use supermarket trolleys?\n<image>"}, {"from": "gpt", "value": "there are two people using a supermarket trolley"}]
00000011623.jpg
11623_evjvqa
[{"from": "human", "value": "where is here?\n<image>"}, {"from": "gpt", "value": "this is the roundabout"}]
00000011624.jpg
11624_evjvqa
[{"from": "human", "value": "on which side of the boy does the girl stand?\n<image>"}, {"from": "gpt", "value": "the girl stands to the right of the boy"}]
00000011626.jpg
11626_evjvqa
[{"from": "human", "value": "how many loaves of bread are there?\n<image>"}, {"from": "gpt", "value": "there are two loaves of bread"}]
00000011630.jpg
11630_evjvqa
[{"from": "human", "value": "how many people are visiting the museum?\n<image>"}, {"from": "gpt", "value": "there are four people visiting the museum"}]
00000011631.jpg
11631_evjvqa
[{"from": "human", "value": "which side of the boy does the girl stand on?\n<image>"}, {"from": "gpt", "value": "the girl stands to the right of the boy"}]
00000011632.jpg
11632_evjvqa
[{"from": "human", "value": "what color shirt is the man walking on the street wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing blue shirt"}]
00000011636.jpg
11636_evjvqa
[{"from": "human", "value": "what color are the lanterns?\n<image>"}, {"from": "gpt", "value": "the lanterns are white"}]
00000011637.jpg
11637_evjvqa
[{"from": "human", "value": "what color is the cat's fur?\n<image>"}, {"from": "gpt", "value": "the cat has white fur"}]
00000011638.jpg
11638_evjvqa
[{"from": "human", "value": "how many paramedics are behind the car?\n<image>"}, {"from": "gpt", "value": "there are two paramedics"}]
00000011639.jpg
11639_evjvqa
[{"from": "human", "value": "what is the person wearing the helmet doing?\n<image>"}, {"from": "gpt", "value": "the person wearing a helmet is leading a vehicle"}]
00000011642.jpg
11642_evjvqa
[{"from": "human", "value": "how many people are pulling a vegetable cart?\n<image>"}, {"from": "gpt", "value": "there are three people pulling a vegetable cart"}]
00000011643.jpg
11643_evjvqa
[{"from": "human", "value": "how many tiers does a shelf have?\n<image>"}, {"from": "gpt", "value": "the shelf is four-tiered "}]
00000011644.jpg
11644_evjvqa
[{"from": "human", "value": "what color shirt is the first girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a yellow t-shirt"}]
00000011645.jpg
11645_evjvqa
[{"from": "human", "value": "what is the guy in the white t-shirt doing?\n<image>"}, {"from": "gpt", "value": "the boy is taking pictures"}]
00000011646.jpg
11646_evjvqa
[{"from": "human", "value": "what color shirt is the woman pushing the cart?\n<image>"}, {"from": "gpt", "value": "the woman is wearing a red top"}]
00000011648.jpg
11648_evjvqa
[{"from": "human", "value": "how many boats are on the river?\n<image>"}, {"from": "gpt", "value": "there are four boats on the river"}]
00000011650.jpg
11650_evjvqa
[{"from": "human", "value": "what is the girl wearing on her head?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a conical hat"}]
00000011651.jpg
11651_evjvqa
[{"from": "human", "value": "what color top is the baby wearing?\n<image>"}, {"from": "gpt", "value": "the baby wears pink shirt"}]
00000011652.jpg
11652_evjvqa
[{"from": "human", "value": "which hand does the guy hold the phone with?\n<image>"}, {"from": "gpt", "value": "the boy is holding phone with his right hand"}]
00000011654.jpg
11654_evjvqa
[{"from": "human", "value": "how many people are in the park?\n<image>"}, {"from": "gpt", "value": "there are five people exercising in the park"}]
00000011656.jpg
11656_evjvqa
[{"from": "human", "value": "how many stone pillars are there in the room?\n<image>"}, {"from": "gpt", "value": "there are five stone pillars"}]
00000011657.jpg
11657_evjvqa
[{"from": "human", "value": "what color top is the woman wearing?\n<image>"}, {"from": "gpt", "value": "the woman is wearing black top"}]
00000011658.jpg
11658_evjvqa
[{"from": "human", "value": "what color are the banners hanging on the lampposts?\n<image>"}, {"from": "gpt", "value": "the banner hanging on the lamp post is red"}]