image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000011661.jpg
11661_evjvqa
[{"from": "human", "value": "which hand does the man raise?\n<image>"}, {"from": "gpt", "value": "the man is raising his right hand"}]
00000011663.jpg
11663_evjvqa
[{"from": "human", "value": "how many people are pushing carts?\n<image>"}, {"from": "gpt", "value": "there are two people pushing a cart"}]
00000011666.jpg
11666_evjvqa
[{"from": "human", "value": "what color is the shirt of the woman carrying two bags of clothes?\n<image>"}, {"from": "gpt", "value": "the woman is wearing black shirt"}]
00000011668.jpg
11668_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this area is a market"}]
00000011670.jpg
11670_evjvqa
[{"from": "human", "value": "what is the woman in black holding in her hand?\n<image>"}, {"from": "gpt", "value": "woman in black is holding a hat in her hand"}]
00000011671.jpg
11671_evjvqa
[{"from": "human", "value": "how many bulletin boards are there on the lawn?\n<image>"}, {"from": "gpt", "value": "there is a notice board on the lawn"}]
00000011672.jpg
11672_evjvqa
[{"from": "human", "value": "what is the girl in white holding in her hand?\n<image>"}, {"from": "gpt", "value": "the girl in white is holding two boxes of grapes"}]
00000011674.jpg
11674_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "the girl is sitting near the pool"}]
00000011675.jpg
11675_evjvqa
[{"from": "human", "value": "how many boats are on the river?\n<image>"}, {"from": "gpt", "value": "there is a boat on the river"}]
00000011677.jpg
11677_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this area is a night market"}]
00000011680.jpg
11680_evjvqa
[{"from": "human", "value": "what clothes do the women in the back row wear?\n<image>"}, {"from": "gpt", "value": "the women in the back row are wearing red ao dai"}]
00000011681.jpg
11681_evjvqa
[{"from": "human", "value": "what is the bike carrying?\n<image>"}, {"from": "gpt", "value": "the bicycle is carrying bouquets of flowers"}]
00000011683.jpg
11683_evjvqa
[{"from": "human", "value": "what is the woman wearing?\n<image>"}, {"from": "gpt", "value": "the woman is wearing white dress"}]
00000011684.jpg
11684_evjvqa
[{"from": "human", "value": "what color hat is the man in black wearing?\n<image>"}, {"from": "gpt", "value": "the man in black is wearing a black hat"}]
00000011685.jpg
11685_evjvqa
[{"from": "human", "value": "how many boats are on the river?\n<image>"}, {"from": "gpt", "value": "there are three boats on the river"}]
00000011690.jpg
11690_evjvqa
[{"from": "human", "value": "what color is the coffee shop sign?\n<image>"}, {"from": "gpt", "value": "the coffee shop sign is pink"}]
00000011691.jpg
11691_evjvqa
[{"from": "human", "value": "which shelf is the girl in yellow standing near?\n<image>"}, {"from": "gpt", "value": "the girl in yellow is standing near the shelves of instant noodles"}]
00000011692.jpg
11692_evjvqa
[{"from": "human", "value": "how many anthropomorphic statues are there on the monument?\n<image>"}, {"from": "gpt", "value": "there are three anthropomorphic statues on the monument"}]
00000011695.jpg
11695_evjvqa
[{"from": "human", "value": "what time of day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is evening"}]
00000011697.jpg
11697_evjvqa
[{"from": "human", "value": "which side of the man in the yellow shirt does the red dress woman stand?\n<image>"}, {"from": "gpt", "value": "the woman is standing in front of the man in yellow"}]
00000011698.jpg
11698_evjvqa
[{"from": "human", "value": "what is the white hat woman holding in her hand?\n<image>"}, {"from": "gpt", "value": "the woman is holding a dragon fruit in her hand"}]
00000002905.jpg
2905_evjvqa
[{"from": "human", "value": "what color shirt is the woman without the envelope wearing?\n<image>"}, {"from": "gpt", "value": "purple"}]
00000002907.jpg
2907_evjvqa
[{"from": "human", "value": "what kind of vehicle is used by the woman in purple?\n<image>"}, {"from": "gpt", "value": "the electric bicycle"}]
00000002907.jpg
2907_evjvqa
[{"from": "human", "value": "what is the woman in purple shirt stopping the electric bike for?\n<image>"}, {"from": "gpt", "value": "buying products at the stall in the market"}]
00000002907.jpg
2907_evjvqa
[{"from": "human", "value": "what is the purpose of the woman in the dark blue short-sleeved shirt holding out the bill?\n<image>"}, {"from": "gpt", "value": "to pay"}]
00000002908.jpg
2908_evjvqa
[{"from": "human", "value": "what items are hung on high for decoration?\n<image>"}, {"from": "gpt", "value": "the umbrellas"}]
00000002908.jpg
2908_evjvqa
[{"from": "human", "value": "what are attached on the high ropes for decoration?\n<image>"}, {"from": "gpt", "value": "the umbrellas"}]
00000002908.jpg
2908_evjvqa
[{"from": "human", "value": "what are chosen to decor on the high ropes for decoration?\n<image>"}, {"from": "gpt", "value": "the umbrellas"}]
00000002913.jpg
2913_evjvqa
[{"from": "human", "value": "what black items are left on the neck of the motorcycle?\n<image>"}, {"from": "gpt", "value": "the bag"}]
00000002914.jpg
2914_evjvqa
[{"from": "human", "value": "what is the outdoor exhibit here?\n<image>"}, {"from": "gpt", "value": "the tank has been taken down"}]
00000002915.jpg
2915_evjvqa
[{"from": "human", "value": "who is holding boxes of cakes in hands?\n<image>"}, {"from": "gpt", "value": "the woman is wearing the pink overcoat"}]
00000002915.jpg
2915_evjvqa
[{"from": "human", "value": "which side is the woman in the pink coat on compared to the other woman?\n<image>"}, {"from": "gpt", "value": "on the right"}]
00000002915.jpg
2915_evjvqa
[{"from": "human", "value": "what is the woman in pink intending to buy>?\n<image>"}, {"from": "gpt", "value": "some boxes of cakes"}]
00000002917.jpg
2917_evjvqa
[{"from": "human", "value": "how many cars are parked on the side of the road?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002918.jpg
2918_evjvqa
[{"from": "human", "value": "what color of mask is the young man in the red jacket wearing?\n<image>"}, {"from": "gpt", "value": "sky blue"}]
00000002918.jpg
2918_evjvqa
[{"from": "human", "value": "what is the young man in the red overcoat doing?\n<image>"}, {"from": "gpt", "value": "he is lining up to pay"}]
00000002918.jpg
2918_evjvqa
[{"from": "human", "value": "which side does the man in the brown t-shirt stand against the man in the light gray t-shirt with glasses?\n<image>"}, {"from": "gpt", "value": "in front"}]
00000002920.jpg
2920_evjvqa
[{"from": "human", "value": "what is the employee in the red shirt doing at this supermarket?\n<image>"}, {"from": "gpt", "value": "paying the customer"}]
00000002920.jpg
2920_evjvqa
[{"from": "human", "value": "who is being charged for the items purchased?\n<image>"}, {"from": "gpt", "value": "the man is wearing the blue shirt with floral pattern"}]
00000002924.jpg
2924_evjvqa
[{"from": "human", "value": "what color hat is the man wearing who is standing opposite the woman in the yellow shirt?\n<image>"}, {"from": "gpt", "value": "he is wearing the blue helmet"}]
00000002924.jpg
2924_evjvqa
[{"from": "human", "value": "which side is the woman in the yellow shirt on compared to the man in the blue helmet?\n<image>"}, {"from": "gpt", "value": "opposite"}]
00000002924.jpg
2924_evjvqa
[{"from": "human", "value": "what is the man wearing the blue helmet selling?\n<image>"}, {"from": "gpt", "value": "dried fish and sausage"}]
00000002925.jpg
2925_evjvqa
[{"from": "human", "value": "how is the woman with the handbag standing near the rows of store shelves dressed?\n<image>"}, {"from": "gpt", "value": "she is wearing the yellow shirt and black skirt"}]
00000002925.jpg
2925_evjvqa
[{"from": "human", "value": "which side is the fruit stand compared to the fruit and vegetable stall on?\n<image>"}, {"from": "gpt", "value": "the fruit stall is behind the vegetable stall"}]
00000002925.jpg
2925_evjvqa
[{"from": "human", "value": "which side is the fruit stand compared to the dry food stall?\n<image>"}, {"from": "gpt", "value": "on the right"}]
00000002931.jpg
2931_evjvqa
[{"from": "human", "value": "what color is the trolley of the supermarket?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002931.jpg
2931_evjvqa
[{"from": "human", "value": "where is the shopping of the people going on?\n<image>"}, {"from": "gpt", "value": "in te supermarket"}]
00000002931.jpg
2931_evjvqa
[{"from": "human", "value": "what's behind the woman in the blue shirt?\n<image>"}, {"from": "gpt", "value": "the trolley"}]
00000002933.jpg
2933_evjvqa
[{"from": "human", "value": "what is the man in the white shirt doing with the children?\n<image>"}, {"from": "gpt", "value": "he is giving gifts"}]
00000002933.jpg
2933_evjvqa
[{"from": "human", "value": "which students are not received gifts?\n<image>"}, {"from": "gpt", "value": "two students on the right"}]
00000002933.jpg
2933_evjvqa
[{"from": "human", "value": "what is the third student from the right leaning forward for?\n<image>"}, {"from": "gpt", "value": "to receive the gift"}]
00000002938.jpg
2938_evjvqa
[{"from": "human", "value": "what is the cashier doing for the customer?\n<image>"}, {"from": "gpt", "value": "she is keeping the receipt for the customer to scan the qr code"}]
00000002938.jpg
2938_evjvqa
[{"from": "human", "value": "what is the woman in the white hat using the phone for?\n<image>"}, {"from": "gpt", "value": "to scan the qr"}]
00000002938.jpg
2938_evjvqa
[{"from": "human", "value": "who is the cashier at the convenience store?\n<image>"}, {"from": "gpt", "value": "the girl is wearing the red shirt and black mask"}]
00000002942.jpg
2942_evjvqa
[{"from": "human", "value": "how many chairs are being sit?\n<image>"}, {"from": "gpt", "value": "four"}]
00000002942.jpg
2942_evjvqa
[{"from": "human", "value": "what posing is the person wearing the green helmet?\n<image>"}, {"from": "gpt", "value": "sitting"}]
00000002942.jpg
2942_evjvqa
[{"from": "human", "value": "what does the weather look like on the street?\n<image>"}, {"from": "gpt", "value": "it is sunny"}]
00000002943.jpg
2943_evjvqa
[{"from": "human", "value": "where is the woman in the purple overcoat going?\n<image>"}, {"from": "gpt", "value": "she is going up the stairs"}]
00000002943.jpg
2943_evjvqa
[{"from": "human", "value": "who is the girl in the blue overcoat standing to the left?\n<image>"}, {"from": "gpt", "value": "she is on the left of the woman in the black pant and without mask"}]
00000002944.jpg
2944_evjvqa
[{"from": "human", "value": "where is the man standing?\n<image>"}, {"from": "gpt", "value": "he is standing to the left of the yellow flag on the shore"}]
00000002944.jpg
2944_evjvqa
[{"from": "human", "value": "what color is the bow of the canoe painted?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002945.jpg
2945_evjvqa
[{"from": "human", "value": "which side does the man in the purple tie stand against the man in the orange tie?\n<image>"}, {"from": "gpt", "value": "on the left"}]
00000002945.jpg
2945_evjvqa
[{"from": "human", "value": "which direction is the man in the orange tie looking?\n<image>"}, {"from": "gpt", "value": "the fruit baskets are on display"}]
00000002945.jpg
2945_evjvqa
[{"from": "human", "value": "which side does the man in the orange tie stand against the man in the purple tie?\n<image>"}, {"from": "gpt", "value": "on the right"}]
00000002946.jpg
2946_evjvqa
[{"from": "human", "value": "what color helmet is the woman in the pink overcoat wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002946.jpg
2946_evjvqa
[{"from": "human", "value": "what color letters are used to represent the market name?\n<image>"}, {"from": "gpt", "value": "the red letters"}]
00000002948.jpg
2948_evjvqa
[{"from": "human", "value": "who is the one walking with a purple backpack?\n<image>"}, {"from": "gpt", "value": "the man is wearing shirt and shorts"}]
00000002948.jpg
2948_evjvqa
[{"from": "human", "value": "who are following the man holding a purple backpack?\n<image>"}, {"from": "gpt", "value": "the boy and the woman is carrying the baby"}]
00000002948.jpg
2948_evjvqa
[{"from": "human", "value": "what color of flowers is bloomed on the grass field?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000002950.jpg
2950_evjvqa
[{"from": "human", "value": "what does the plate on the left of the bowl of pho contain?\n<image>"}, {"from": "gpt", "value": "herbs"}]
00000002950.jpg
2950_evjvqa
[{"from": "human", "value": "what can be taken from the bowl to the right of the bowl of pho?\n<image>"}, {"from": "gpt", "value": "the lemon slices"}]
00000002950.jpg
2950_evjvqa
[{"from": "human", "value": "what ingredients are inside this bowl of pho?\n<image>"}, {"from": "gpt", "value": "noodle, green onions and beef"}]
00000002951.jpg
2951_evjvqa
[{"from": "human", "value": "where does the person in the green uniform stand?\n<image>"}, {"from": "gpt", "value": "the fourth from the left"}]
00000002951.jpg
2951_evjvqa
[{"from": "human", "value": "who is holding a bag of tomatoes in his hand?\n<image>"}, {"from": "gpt", "value": "the man is standing on the left side"}]
00000002951.jpg
2951_evjvqa
[{"from": "human", "value": "what are the three people on the left doing at the supermarket?\n<image>"}, {"from": "gpt", "value": "checking product information"}]
00000002957.jpg
2957_evjvqa
[{"from": "human", "value": "what color is the scooter parked in front of the coffee store?\n<image>"}, {"from": "gpt", "value": "red"}]
00000002957.jpg
2957_evjvqa
[{"from": "human", "value": "how many trash cans are there in front of the bar?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002957.jpg
2957_evjvqa
[{"from": "human", "value": "which side does the arrow symbol drawn on the pavement point?\n<image>"}, {"from": "gpt", "value": "to the left"}]
00000002958.jpg
2958_evjvqa
[{"from": "human", "value": "what does the woman in the checkered shirt sell in the market?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
00000002958.jpg
2958_evjvqa
[{"from": "human", "value": "what is the person on the motorcycle wearing on his wrist?\n<image>"}, {"from": "gpt", "value": "the watch"}]
00000002958.jpg
2958_evjvqa
[{"from": "human", "value": "what is the woman in black doing?\n<image>"}, {"from": "gpt", "value": "she is weighing products"}]
00000002962.jpg
2962_evjvqa
[{"from": "human", "value": "what color overcoat is the woman carrying the pole wearing?\n<image>"}, {"from": "gpt", "value": "purple"}]
00000002962.jpg
2962_evjvqa
[{"from": "human", "value": "what is the woman in the purple shirt carrying on her shoulder?\n<image>"}, {"from": "gpt", "value": "she is carrying the pole"}]
00000002962.jpg
2962_evjvqa
[{"from": "human", "value": "what kind of goods do the roadside vendors sell?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
00000002964.jpg
2964_evjvqa
[{"from": "human", "value": "what are the things in the hands of the person in the green shirt?\n<image>"}, {"from": "gpt", "value": "left hand holds a phone, right hand holds a package of food"}]
00000002964.jpg
2964_evjvqa
[{"from": "human", "value": "where is the young man in the green shirt going?\n<image>"}, {"from": "gpt", "value": "he is going to deliver food to customers"}]
00000002964.jpg
2964_evjvqa
[{"from": "human", "value": "which transport is the young man in green helmet used to deliver goods?\n<image>"}, {"from": "gpt", "value": "by motorbike"}]
00000002969.jpg
2969_evjvqa
[{"from": "human", "value": "what is hung on the wall?\n<image>"}, {"from": "gpt", "value": "the arts and the printed board"}]
00000002969.jpg
2969_evjvqa
[{"from": "human", "value": "how many benches are there in the room?\n<image>"}, {"from": "gpt", "value": "two"}]
00000002969.jpg
2969_evjvqa
[{"from": "human", "value": "what is between the two benches?\n<image>"}, {"from": "gpt", "value": "a table"}]
00000002974.jpg
2974_evjvqa
[{"from": "human", "value": "what does a woman in a black dress with a jacket look like?\n<image>"}, {"from": "gpt", "value": "the red jecket"}]
00000002974.jpg
2974_evjvqa
[{"from": "human", "value": "what kind of products are on display here?\n<image>"}, {"from": "gpt", "value": "the crafts"}]
00000002974.jpg
2974_evjvqa
[{"from": "human", "value": "what is the pose of the woman in the blue suit?\n<image>"}, {"from": "gpt", "value": "the woman in blue suit is squatting"}]
00000002976.jpg
2976_evjvqa
[{"from": "human", "value": "what does the sky over the market look like?\n<image>"}, {"from": "gpt", "value": "it is cloudy"}]
00000002981.jpg
2981_evjvqa
[{"from": "human", "value": "where does the young man in purple shirt stand?\n<image>"}, {"from": "gpt", "value": "the third from the right"}]
00000002981.jpg
2981_evjvqa
[{"from": "human", "value": "what is the man in the white shirt and the mask standing the podium for?\n<image>"}, {"from": "gpt", "value": "to receive the gift"}]
00000002981.jpg
2981_evjvqa
[{"from": "human", "value": "where is the young man in the blue t-shirt standing among the recipients?\n<image>"}, {"from": "gpt", "value": "the young man in a blue t-shirt stands between the man in a white shirt and the young man in a purple shirt"}]
00000002983.jpg
2983_evjvqa
[{"from": "human", "value": "có tất cả bao nhiêu người đang đi bộ trên sân?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000002983.jpg
2983_evjvqa
[{"from": "human", "value": "how many trees are planted along the pond?\n<image>"}, {"from": "gpt", "value": "four"}]
00000002983.jpg
2983_evjvqa
[{"from": "human", "value": "what does the sky look like in this area?\n<image>"}, {"from": "gpt", "value": "blue but whithout clouds"}]