image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000000162.jpg
162_evjvqa
[{"from": "human", "value": "how is the walking man attired?\n<image>"}, {"from": "gpt", "value": "a dark blue shirt, blue trousers and a red cap"}]
00000000163.jpg
163_evjvqa
[{"from": "human", "value": "what color of the apron is the cashier wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000163.jpg
163_evjvqa
[{"from": "human", "value": "what is the costume of the man waiting to pay the bill?\n<image>"}, {"from": "gpt", "value": "a gray shirt, black pants and a white mask"}]
00000000163.jpg
163_evjvqa
[{"from": "human", "value": "where is the cashier working?\n<image>"}, {"from": "gpt", "value": "checkout counter"}]
00000000163.jpg
163_evjvqa
[{"from": "human", "value": "what is a man wearing a white mask doing at the cashier?\n<image>"}, {"from": "gpt", "value": "waiting to pay"}]
00000000163.jpg
163_evjvqa
[{"from": "human", "value": "what is the cashier doing for the customer?\n<image>"}, {"from": "gpt", "value": "tying a bag"}]
00000000165.jpg
165_evjvqa
[{"from": "human", "value": "what color of the coat does the long-haired woman viewing the goods have?\n<image>"}, {"from": "gpt", "value": "ruby"}]
00000000167.jpg
167_evjvqa
[{"from": "human", "value": "how many men are standing?\n<image>"}, {"from": "gpt", "value": "four"}]
00000000167.jpg
167_evjvqa
[{"from": "human", "value": "how does a man in a white mask wear clothes?\n<image>"}, {"from": "gpt", "value": "a grey shirt and black trousers"}]
00000000167.jpg
167_evjvqa
[{"from": "human", "value": "among the people standing, what color of the masks are three people wearing?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000168.jpg
168_evjvqa
[{"from": "human", "value": "what is the woman carrying in the basket?\n<image>"}, {"from": "gpt", "value": "bananas"}]
00000000168.jpg
168_evjvqa
[{"from": "human", "value": "what color is the house with flowers in front of the door painted?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000000168.jpg
168_evjvqa
[{"from": "human", "value": "what is the woman holding in her left hand?\n<image>"}, {"from": "gpt", "value": "a bunch of bananas"}]
00000000169.jpg
169_evjvqa
[{"from": "human", "value": "how is the girl standing next to the motorbike dressed?\n<image>"}, {"from": "gpt", "value": "a yellow dress, a grey blue handbag and black shoes"}]
00000000169.jpg
169_evjvqa
[{"from": "human", "value": "what two objects is the girl standing between?\n<image>"}, {"from": "gpt", "value": "a motorcycle and a chair"}]
00000000169.jpg
169_evjvqa
[{"from": "human", "value": "which side of the motorcycle is the chair located on?\n<image>"}, {"from": "gpt", "value": "on the right side"}]
00000000169.jpg
169_evjvqa
[{"from": "human", "value": "what is in between the chair and the motorbike?\n<image>"}, {"from": "gpt", "value": "a girl in a yellow dress with a blue and gray handbag"}]
00000000174.jpg
174_evjvqa
[{"from": "human", "value": "what color of the hat is the girl standing wearing?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000174.jpg
174_evjvqa
[{"from": "human", "value": "on which hand side is the girl carrying the bag?\n<image>"}, {"from": "gpt", "value": "on the left hand side"}]
00000000174.jpg
174_evjvqa
[{"from": "human", "value": "how is the woman standing attired?\n<image>"}, {"from": "gpt", "value": "a red hat, a cream beige skirt and a brown handbag"}]
00000000182.jpg
182_evjvqa
[{"from": "human", "value": "what are the main colors of the houses in the street?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000000182.jpg
182_evjvqa
[{"from": "human", "value": "how many floors do these houses have?\n<image>"}, {"from": "gpt", "value": "one"}]
00000000182.jpg
182_evjvqa
[{"from": "human", "value": "how many storeys do these houses have?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000184.jpg
184_evjvqa
[{"from": "human", "value": "what kind of clothes are the three kids wearing?\n<image>"}, {"from": "gpt", "value": "wearing paints and topless"}]
00000000184.jpg
184_evjvqa
[{"from": "human", "value": "how are the children playing?\n<image>"}, {"from": "gpt", "value": "splashing water on each other"}]
00000000184.jpg
184_evjvqa
[{"from": "human", "value": "out of the three children, what same object are the two holding?\n<image>"}, {"from": "gpt", "value": "a bucket"}]
00000000192.jpg
192_evjvqa
[{"from": "human", "value": "what color are the letters on the restaurant signboard designed with?\n<image>"}, {"from": "gpt", "value": "green"}]
00000000193.jpg
193_evjvqa
[{"from": "human", "value": "what is the woman in non la doing?\n<image>"}, {"from": "gpt", "value": "rowing"}]
00000000193.jpg
193_evjvqa
[{"from": "human", "value": "what color of the shirt is the person rowing the boat wearing?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000193.jpg
193_evjvqa
[{"from": "human", "value": "how is the woman rowing the boat attired?\n<image>"}, {"from": "gpt", "value": "a blue shirt and a non la"}]
00000000193.jpg
193_evjvqa
[{"from": "human", "value": "what is the woman in blue sitting on?\n<image>"}, {"from": "gpt", "value": "a boat"}]
00000000193.jpg
193_evjvqa
[{"from": "human", "value": "what color is the hair of the rower?\n<image>"}, {"from": "gpt", "value": "black"}]
00000000196.jpg
196_evjvqa
[{"from": "human", "value": "what color is the woman's mask?\n<image>"}, {"from": "gpt", "value": "white"}]
00000000196.jpg
196_evjvqa
[{"from": "human", "value": "on which side of the woman is the red tablecloth?\n<image>"}, {"from": "gpt", "value": "behind"}]
00000000196.jpg
196_evjvqa
[{"from": "human", "value": "on which hand is the handbag worn by the woman?\n<image>"}, {"from": "gpt", "value": "on the left hand side"}]
00000000196.jpg
196_evjvqa
[{"from": "human", "value": "what is the color of the woman's shirt and mask?\n<image>"}, {"from": "gpt", "value": "the shirt is brown and the mask is white"}]
00000000196.jpg
196_evjvqa
[{"from": "human", "value": "where is the balloon?\n<image>"}, {"from": "gpt", "value": "on the right side of the woman"}]
00000003500.jpg
3500_evjvqa
[{"from": "human", "value": "what time is the scene in this square?\n<image>"}, {"from": "gpt", "value": "evening scence"}]
00000003500.jpg
3500_evjvqa
[{"from": "human", "value": "how are the people standing alone separate from the crowd wearing?\n<image>"}, {"from": "gpt", "value": "the people standing alone separate from the crowd are wearing white military uniforms"}]
00000003500.jpg
3500_evjvqa
[{"from": "human", "value": "what color light does the light in the distance emit?\n<image>"}, {"from": "gpt", "value": "the light in the distance emits white light"}]
00000003503.jpg
3503_evjvqa
[{"from": "human", "value": "where are the flag bearers going?\n<image>"}, {"from": "gpt", "value": "they are walking to the gate on the stairs"}]
00000003503.jpg
3503_evjvqa
[{"from": "human", "value": "what do the people in the blue hats walking up the stairs hold in their hands?\n<image>"}, {"from": "gpt", "value": "the people in the blue hats walking up the stairs hold a flag in their hands"}]
00000003503.jpg
3503_evjvqa
[{"from": "human", "value": "what color hats do the flag bearers wear?\n<image>"}, {"from": "gpt", "value": "the flag bearers wear blue hats"}]
00000003504.jpg
3504_evjvqa
[{"from": "human", "value": "what was built around the stone statue?\n<image>"}, {"from": "gpt", "value": "the fence was built around the stone statue"}]
00000003504.jpg
3504_evjvqa
[{"from": "human", "value": "what is the sky like at this agoda area?\n<image>"}, {"from": "gpt", "value": "at this agoda area the sky is blue and there are white clouds"}]
00000003504.jpg
3504_evjvqa
[{"from": "human", "value": "what are some tourists holding umbrellas for?\n<image>"}, {"from": "gpt", "value": "they hold umbrellas to block the sun"}]
00000003506.jpg
3506_evjvqa
[{"from": "human", "value": "what kind of shirts do the staff at this store all wear?\n<image>"}, {"from": "gpt", "value": "the staff at this store all wear pink t-shirts"}]
00000003506.jpg
3506_evjvqa
[{"from": "human", "value": "what kind of pants do the staff at this store wear?\n<image>"}, {"from": "gpt", "value": "the staff at this store wear black pants"}]
00000003506.jpg
3506_evjvqa
[{"from": "human", "value": "how many people stand in this store?\n<image>"}, {"from": "gpt", "value": "four people stand in this store"}]
00000003509.jpg
3509_evjvqa
[{"from": "human", "value": "where is the tallest person standing?\n<image>"}, {"from": "gpt", "value": "the tallest person standing in the middle of the young people"}]
00000003509.jpg
3509_evjvqa
[{"from": "human", "value": "what is sparkling behind these young people?\n<image>"}, {"from": "gpt", "value": "behind these young people is a christmas tree"}]
00000003509.jpg
3509_evjvqa
[{"from": "human", "value": "who is the tallest of these people?\n<image>"}, {"from": "gpt", "value": "the young man standing third from the left is the tallest"}]
00000003515.jpg
3515_evjvqa
[{"from": "human", "value": "how many pictures are hung on the wall?\n<image>"}, {"from": "gpt", "value": "three pictures are hung on the wall"}]
00000003517.jpg
3517_evjvqa
[{"from": "human", "value": "what color is the wall of the house on the rightmost painted?\n<image>"}, {"from": "gpt", "value": "the wall of the house on the rightmost is painted with yellow paint"}]
00000003517.jpg
3517_evjvqa
[{"from": "human", "value": "what is the weather like in this town?\n<image>"}, {"from": "gpt", "value": "in this town it is sunny"}]
00000003517.jpg
3517_evjvqa
[{"from": "human", "value": "what time is it here?\n<image>"}, {"from": "gpt", "value": "morning"}]
00000003518.jpg
3518_evjvqa
[{"from": "human", "value": "what color are the chairs in the room?\n<image>"}, {"from": "gpt", "value": "in the room there are orange chairs and gray chairs"}]
00000003518.jpg
3518_evjvqa
[{"from": "human", "value": "how is the man standing in the room wearing?\n<image>"}, {"from": "gpt", "value": "he is wearing an orange t-shirt and black pants"}]
00000003518.jpg
3518_evjvqa
[{"from": "human", "value": "what is the man in the orange shirt wearing on his face?\n<image>"}, {"from": "gpt", "value": "the man in the orange shirt is wearing a white mask"}]
00000003520.jpg
3520_evjvqa
[{"from": "human", "value": "which side is the man in the blue shirt standing on compared to the woman in black?\n<image>"}, {"from": "gpt", "value": "he is standing to the right of the woman in black"}]
00000003520.jpg
3520_evjvqa
[{"from": "human", "value": "how many children are standing with the adults?\n<image>"}, {"from": "gpt", "value": "two children"}]
00000003520.jpg
3520_evjvqa
[{"from": "human", "value": "who is standing to the right of the man in the blue shirt?\n<image>"}, {"from": "gpt", "value": "the woman wearing yellow ao dai"}]
00000003522.jpg
3522_evjvqa
[{"from": "human", "value": "what is this woman intending to buy?\n<image>"}, {"from": "gpt", "value": "she is intending to buy a bag"}]
00000003522.jpg
3522_evjvqa
[{"from": "human", "value": "what is this woman looking at?\n<image>"}, {"from": "gpt", "value": "she is looking at a bag at the stall"}]
00000003522.jpg
3522_evjvqa
[{"from": "human", "value": "what item is being bought by women?\n<image>"}, {"from": "gpt", "value": "the bag"}]
00000003523.jpg
3523_evjvqa
[{"from": "human", "value": "where are some people standing at this sightseeing place?\n<image>"}, {"from": "gpt", "value": "they are standing around the windmill"}]
00000003523.jpg
3523_evjvqa
[{"from": "human", "value": "what is placed on each fence post along the path?\n<image>"}, {"from": "gpt", "value": "ornamental flower pot is placed on each fence post along the path"}]
00000003523.jpg
3523_evjvqa
[{"from": "human", "value": "what color is the fence along the path painted?\n<image>"}, {"from": "gpt", "value": "the fence along the path is painted black"}]
00000003525.jpg
3525_evjvqa
[{"from": "human", "value": "which young man wears a white t-shirt and no jacket?\n<image>"}, {"from": "gpt", "value": "the young man standing on the far left among the young men standing next to each other"}]
00000003525.jpg
3525_evjvqa
[{"from": "human", "value": "what is the young man not wearing a jacket doing?\n<image>"}, {"from": "gpt", "value": "he is reading book"}]
00000003525.jpg
3525_evjvqa
[{"from": "human", "value": "what is the young man standing on the left of the person not wearing a jacket doing?\n<image>"}, {"from": "gpt", "value": "that young man is reading book"}]
00000003533.jpg
3533_evjvqa
[{"from": "human", "value": "what color chair is this woman sitting on?\n<image>"}, {"from": "gpt", "value": "this woman is sitting on the green chair"}]
00000003533.jpg
3533_evjvqa
[{"from": "human", "value": "where is the helmet?\n<image>"}, {"from": "gpt", "value": "the helmet is next to the red water bottle on the table"}]
00000003533.jpg
3533_evjvqa
[{"from": "human", "value": "what material are the chairs behind the sitting woman?\n<image>"}, {"from": "gpt", "value": "behind the sitting woman are the chairs made of stainless steel"}]
00000003535.jpg
3535_evjvqa
[{"from": "human", "value": "where is a person walking?\n<image>"}, {"from": "gpt", "value": "a person is walking next to a red barrel"}]
00000003538.jpg
3538_evjvqa
[{"from": "human", "value": "what is the girl in the striped shirt going to buy?\n<image>"}, {"from": "gpt", "value": "she is going to buy fruit"}]
00000003538.jpg
3538_evjvqa
[{"from": "human", "value": "what does this stall sell on styrofoam boxes and baskets?\n<image>"}, {"from": "gpt", "value": "this stall sells fruit on styrofoam boxes and baskets"}]
00000003538.jpg
3538_evjvqa
[{"from": "human", "value": "what is the posture of the woman standing opposite the girl in the striped shirt?\n<image>"}, {"from": "gpt", "value": "that woman is bending down"}]
00000003540.jpg
3540_evjvqa
[{"from": "human", "value": "what is the pattern of the flag on the roof?\n<image>"}, {"from": "gpt", "value": "the flag has a red background and a yellow star"}]
00000003540.jpg
3540_evjvqa
[{"from": "human", "value": "what means of transport do people use on the road?\n<image>"}, {"from": "gpt", "value": "they walk"}]
00000003541.jpg
3541_evjvqa
[{"from": "human", "value": "what item is the man in the checkered shirt standing looking at?\n<image>"}, {"from": "gpt", "value": "he is looking at t-shirts"}]
00000003541.jpg
3541_evjvqa
[{"from": "human", "value": "which way is the woman with the handbag pushing the trolley looking?\n<image>"}, {"from": "gpt", "value": "she is looking at the t-shirt counter where the checkered man is standing"}]
00000003551.jpg
3551_evjvqa
[{"from": "human", "value": "what vehicle is moving on the water?\n<image>"}, {"from": "gpt", "value": "the canoe is moving on the water"}]
00000003551.jpg
3551_evjvqa
[{"from": "human", "value": "where is the canoe moving?\n<image>"}, {"from": "gpt", "value": "the canoe is moving on the sea"}]
00000003553.jpg
3553_evjvqa
[{"from": "human", "value": "how many signs are there on the sidewalk?\n<image>"}, {"from": "gpt", "value": "two signs are there on the sidewalk"}]
00000003553.jpg
3553_evjvqa
[{"from": "human", "value": "where is the motorbike parked?\n<image>"}, {"from": "gpt", "value": "the motorbike is parked on the left roadside"}]
00000003553.jpg
3553_evjvqa
[{"from": "human", "value": "how is the traffic on this road?\n<image>"}, {"from": "gpt", "value": "on this road the traffic is deserted"}]
00000003559.jpg
3559_evjvqa
[{"from": "human", "value": "what is this girl doing on the balcony?\n<image>"}, {"from": "gpt", "value": "she is drinking"}]
00000003559.jpg
3559_evjvqa
[{"from": "human", "value": "what is this girl holding in her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a water bottle"}]
00000003559.jpg
3559_evjvqa
[{"from": "human", "value": "where is this girl standing?\n<image>"}, {"from": "gpt", "value": "this girl is standing on the balcony"}]
00000003563.jpg
3563_evjvqa
[{"from": "human", "value": "what is the blue thing behind the woman in the yellow shirt?\n<image>"}, {"from": "gpt", "value": "behind the woman in the yellow shirt is a blue plastic chair"}]
00000003564.jpg
3564_evjvqa
[{"from": "human", "value": "how many customers are sitting behind the window at this cafe?\n<image>"}, {"from": "gpt", "value": "two customers are sitting behind the window at this cafe"}]
00000003564.jpg
3564_evjvqa
[{"from": "human", "value": "what red things are hanging in front of this cafe?\n<image>"}, {"from": "gpt", "value": "lanterns"}]
00000003571.jpg
3571_evjvqa
[{"from": "human", "value": "what is the weather like in this town?\n<image>"}, {"from": "gpt", "value": "it is sunny in this town"}]
00000003571.jpg
3571_evjvqa
[{"from": "human", "value": "what is hanging on the rop across the road?\n<image>"}, {"from": "gpt", "value": "laterns"}]
00000003571.jpg
3571_evjvqa
[{"from": "human", "value": "what color are these houses painted for the facade walls?\n<image>"}, {"from": "gpt", "value": "the facades walls of these houses are painted yellow"}]
00000003573.jpg
3573_evjvqa
[{"from": "human", "value": "who is on the left of the person in the yellow shirt?\n<image>"}, {"from": "gpt", "value": "on the left of the person in the yellow shirt is the man in the red shirt"}]
00000003573.jpg
3573_evjvqa
[{"from": "human", "value": "what is the man in the red shirt doing?\n<image>"}, {"from": "gpt", "value": "the man in the red shirt is looking at pictures at the museum"}]
00000003573.jpg
3573_evjvqa
[{"from": "human", "value": "what is the person on the right of the man in the red shirt doing?\n<image>"}, {"from": "gpt", "value": "that person is looking at pictures at the museum"}]
00000003575.jpg
3575_evjvqa
[{"from": "human", "value": "where is the girl in the pink shirt sitting?\n<image>"}, {"from": "gpt", "value": "she is sitting on the railway track"}]