image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003346.jpg | 3346_evjvqa | [{"from": "human", "value": "who is to the right of the woman in the black shirt?\n<image>"}, {"from": "gpt", "value": "to the right of the woman in the black shirt is the woman in the gray shirt"}] |
00000003346.jpg | 3346_evjvqa | [{"from": "human", "value": "what vehicle is moving behind the woman in the black shirt?\n<image>"}, {"from": "gpt", "value": "the cyclo is moving behind the woman in the black shirt"}] |
00000003350.jpg | 3350_evjvqa | [{"from": "human", "value": "what are these two doing?\n<image>"}, {"from": "gpt", "value": "they are hoeing ground"}] |
00000003350.jpg | 3350_evjvqa | [{"from": "human", "value": "what do these two use to hoe ground?\n<image>"}, {"from": "gpt", "value": "they use a hoe to hoe ground"}] |
00000003350.jpg | 3350_evjvqa | [{"from": "human", "value": "what tool are these people using for their work?\n<image>"}, {"from": "gpt", "value": "the hoe"}] |
00000003356.jpg | 3356_evjvqa | [{"from": "human", "value": "what is the woman in the red jacket standing next to?\n<image>"}, {"from": "gpt", "value": "the woman in the red jacket is standing next to a white motorbike"}] |
00000003358.jpg | 3358_evjvqa | [{"from": "human", "value": "what is the man in the self-defense militia uniform holding in his hand?\n<image>"}, {"from": "gpt", "value": "he is holding a baton"}] |
00000003359.jpg | 3359_evjvqa | [{"from": "human", "value": "what items do these shelves display?\n<image>"}, {"from": "gpt", "value": "these shelves display tv models"}] |
00000003359.jpg | 3359_evjvqa | [{"from": "human", "value": "what kind of equipment is sold at these shelves?\n<image>"}, {"from": "gpt", "value": "tvs are sold at these shelves"}] |
00000003359.jpg | 3359_evjvqa | [{"from": "human", "value": "what items does this store sell at these shelves?\n<image>"}, {"from": "gpt", "value": "tv"}] |
00000003360.jpg | 3360_evjvqa | [{"from": "human", "value": "how many white stripes are on the back of the woman's gray-blue jacket?\n<image>"}, {"from": "gpt", "value": "the back of the woman's gray-blue jacket has two white stripes"}] |
00000003360.jpg | 3360_evjvqa | [{"from": "human", "value": "what pants is the woman in the blue shirt wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing black pants"}] |
00000003360.jpg | 3360_evjvqa | [{"from": "human", "value": "what is the woman in the jacket with two white stripes looking at?\n<image>"}, {"from": "gpt", "value": "the woman in the jacket with two white stripes is looking at beer boxes"}] |
00000003361.jpg | 3361_evjvqa | [{"from": "human", "value": "how many posters are painted on the wall?\n<image>"}, {"from": "gpt", "value": "three posters are painted on the wall"}] |
00000003361.jpg | 3361_evjvqa | [{"from": "human", "value": "what vehicle is the man moving on the road by?\n<image>"}, {"from": "gpt", "value": "the man is moving on the road by motorbike"}] |
00000003361.jpg | 3361_evjvqa | [{"from": "human", "value": "what is the man in the helmet doing?\n<image>"}, {"from": "gpt", "value": "he is riding a motorbike"}] |
00000003362.jpg | 3362_evjvqa | [{"from": "human", "value": "what kind of items does this store sell?\n<image>"}, {"from": "gpt", "value": "this store sells kids' clothes"}] |
00000003362.jpg | 3362_evjvqa | [{"from": "human", "value": "what kind of clothes can be bought at this store?\n<image>"}, {"from": "gpt", "value": "kids' clothes"}] |
00000003362.jpg | 3362_evjvqa | [{"from": "human", "value": "who do you want to buy clothes for if you visit this store?\n<image>"}, {"from": "gpt", "value": "you can visit this store if you want to buy clothes for kids"}] |
00000003369.jpg | 3369_evjvqa | [{"from": "human", "value": "how many cow figures are there in all?\n<image>"}, {"from": "gpt", "value": "there are three cow figures"}] |
00000003369.jpg | 3369_evjvqa | [{"from": "human", "value": "what is to the back right of the two small cow figures?\n<image>"}, {"from": "gpt", "value": "is windmill figure"}] |
00000003370.jpg | 3370_evjvqa | [{"from": "human", "value": "where are customers' motorbikes in this restaurant parked?\n<image>"}, {"from": "gpt", "value": "customers' motorbikes in this restaurant are parked on the sidewalk"}] |
00000003370.jpg | 3370_evjvqa | [{"from": "human", "value": "who looks after the customer's motorbikes at this restaurant?\n<image>"}, {"from": "gpt", "value": "the security guards wearing blue shirts"}] |
00000003370.jpg | 3370_evjvqa | [{"from": "human", "value": "what kind of vehicles are parked on the sidewalk in front of the shop?\n<image>"}, {"from": "gpt", "value": "motorbikes"}] |
00000003381.jpg | 3381_evjvqa | [{"from": "human", "value": "what is the sitting girl holding in her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a hand fan"}] |
00000003381.jpg | 3381_evjvqa | [{"from": "human", "value": "what outfit is the girl holding the hand fan wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a red floral dress"}] |
00000003381.jpg | 3381_evjvqa | [{"from": "human", "value": "what is the weather like the girl sitting in?\n<image>"}, {"from": "gpt", "value": "the girl is sitting in the sun"}] |
00000003383.jpg | 3383_evjvqa | [{"from": "human", "value": "what is the woman in the purple shirt doing?\n<image>"}, {"from": "gpt", "value": "the woman in the purple shirt is pouring cake"}] |
00000003383.jpg | 3383_evjvqa | [{"from": "human", "value": "what is the woman in the purple shirt doing by the fire stove?\n<image>"}, {"from": "gpt", "value": "she is pouring cake"}] |
00000003383.jpg | 3383_evjvqa | [{"from": "human", "value": "what does this woman use to line the cake that has been poured?\n<image>"}, {"from": "gpt", "value": "banana leaf"}] |
00000003386.jpg | 3386_evjvqa | [{"from": "human", "value": "what kind of agricultural products is the counter the woman in the black shirt on the left standing next to?\n<image>"}, {"from": "gpt", "value": "the woman in the black shirt on the left is standing next to the garlic counter"}] |
00000003386.jpg | 3386_evjvqa | [{"from": "human", "value": "what color mask is the woman in the black shirt on the left of the garlic counter wearing?\n<image>"}, {"from": "gpt", "value": "the woman in the black shirt on the left of the garlic counter is wearing a white mask "}] |
00000003386.jpg | 3386_evjvqa | [{"from": "human", "value": "how many people wear white at this supermarket?\n<image>"}, {"from": "gpt", "value": "one person wears white shirt at this supermarket"}] |
00000003387.jpg | 3387_evjvqa | [{"from": "human", "value": "what color shirt is this young man wearing?\n<image>"}, {"from": "gpt", "value": "this young man is wearing a yellow shirt"}] |
00000003387.jpg | 3387_evjvqa | [{"from": "human", "value": "what is the young man in the yellow shirt wearing on his head?\n<image>"}, {"from": "gpt", "value": "he is not wearing anything on his head"}] |
00000003387.jpg | 3387_evjvqa | [{"from": "human", "value": "where is the young man in the yellow shirt?\n<image>"}, {"from": "gpt", "value": "next to a counter in a supermarket"}] |
00000003391.jpg | 3391_evjvqa | [{"from": "human", "value": "what is the man in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "he is driving a electric car carrying passengers"}] |
00000003391.jpg | 3391_evjvqa | [{"from": "human", "value": "who is the driver of the yellow electric car?\n<image>"}, {"from": "gpt", "value": "the driver of the yellow electric car is the man in the blue shirt"}] |
00000003391.jpg | 3391_evjvqa | [{"from": "human", "value": "what vehicle is the man in the blue shirt driving?\n<image>"}, {"from": "gpt", "value": "the man in blue shirt is driving an electric car"}] |
00000003396.jpg | 3396_evjvqa | [{"from": "human", "value": "who is the young man with?\n<image>"}, {"from": "gpt", "value": "the young man is with a girl"}] |
00000003396.jpg | 3396_evjvqa | [{"from": "human", "value": "what clothes does the young man walking next to the girl wear?\n<image>"}, {"from": "gpt", "value": "he wears a black jacket, black pants and black shoes"}] |
00000003396.jpg | 3396_evjvqa | [{"from": "human", "value": "what does the girl wear on her arm?\n<image>"}, {"from": "gpt", "value": "the girl wears a black bag on her arm"}] |
00000000002.jpg | 2_evjvqa | [{"from": "human", "value": "how many sailing boats are there on the river?\n<image>"}, {"from": "gpt", "value": "there are three sailing boats on the river"}] |
00000000003.jpg | 3_evjvqa | [{"from": "human", "value": "how many women carrying a baby on the back are there?\n<image>"}, {"from": "gpt", "value": "there is one woman carrying a baby on the back"}] |
00000000004.jpg | 4_evjvqa | [{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "at the lantern stall"}] |
00000000004.jpg | 4_evjvqa | [{"from": "human", "value": "where does the woman looks?\n<image>"}, {"from": "gpt", "value": "look towards the lanterns"}] |
00000000004.jpg | 4_evjvqa | [{"from": "human", "value": "what does the stall in front of the woman sell?\n<image>"}, {"from": "gpt", "value": "decorative lanterns"}] |
00000000005.jpg | 5_evjvqa | [{"from": "human", "value": "where was this scene recorded?\n<image>"}, {"from": "gpt", "value": "at the corner of a market"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "how many people are there eating cantonese noodles?\n<image>"}, {"from": "gpt", "value": "five people"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "how many people does the young man wearing hat and eating cantonese go with?\n<image>"}, {"from": "gpt", "value": "two people"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "what color does the owner wear?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "what stall is behind the cantonese noodles stall?\n<image>"}, {"from": "gpt", "value": "banh tet stall"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "what color does the owner of the banh tet stall wear?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "what color is the jacket of the woman standing behind the banh tet stall's owner?\n<image>"}, {"from": "gpt", "value": "green"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "how many fan are there in the banh tet stall?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "what stall is behind the banh tet stall?\n<image>"}, {"from": "gpt", "value": "the spring roll stall"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "how many people sitting in the spring roll stall?\n<image>"}, {"from": "gpt", "value": "two people"}] |
00000000006.jpg | 6_evjvqa | [{"from": "human", "value": "what color does the girl sitting besides the woman with non la on wear?\n<image>"}, {"from": "gpt", "value": "blue polka dot"}] |
00000000009.jpg | 9_evjvqa | [{"from": "human", "value": "how many chair are there along the path?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000000009.jpg | 9_evjvqa | [{"from": "human", "value": "from which direction does the sunlight come?\n<image>"}, {"from": "gpt", "value": "from the right"}] |
00000000009.jpg | 9_evjvqa | [{"from": "human", "value": "how many people were taken photos?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000000010.jpg | 10_evjvqa | [{"from": "human", "value": "what are the couple holding in hands?\n<image>"}, {"from": "gpt", "value": "a bouquet of flowers"}] |
00000000017.jpg | 17_evjvqa | [{"from": "human", "value": "where was this view recorded?\n<image>"}, {"from": "gpt", "value": "in a museum"}] |
00000000020.jpg | 20_evjvqa | [{"from": "human", "value": "where are the ships docked?\n<image>"}, {"from": "gpt", "value": "by a coat"}] |
00000000022.jpg | 22_evjvqa | [{"from": "human", "value": "how many people are choosing clothes?\n<image>"}, {"from": "gpt", "value": "four people"}] |
00000000022.jpg | 22_evjvqa | [{"from": "human", "value": "what color does the woman carrying a purse wear?\n<image>"}, {"from": "gpt", "value": "black"}] |
00000000022.jpg | 22_evjvqa | [{"from": "human", "value": "what is the woman in pink doing at the far end?\n<image>"}, {"from": "gpt", "value": "sitting using the phone"}] |
00000000023.jpg | 23_evjvqa | [{"from": "human", "value": "what is the man besides the yellow-raincoat guy doing?\n<image>"}, {"from": "gpt", "value": "taking photos"}] |
00000000023.jpg | 23_evjvqa | [{"from": "human", "value": "what does the stall in the opposite of the man taking photo sell?\n<image>"}, {"from": "gpt", "value": "flowers"}] |
00000000025.jpg | 25_evjvqa | [{"from": "human", "value": "how many prepared chairs are in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "five"}] |
00000000025.jpg | 25_evjvqa | [{"from": "human", "value": "how many prepared tables are in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000000026.jpg | 26_evjvqa | [{"from": "human", "value": "what does the girl wear to pose for pictures?\n<image>"}, {"from": "gpt", "value": "red shirt, black skirt and white shoes"}] |
00000000026.jpg | 26_evjvqa | [{"from": "human", "value": "what is the girl standing for?\n<image>"}, {"from": "gpt", "value": "posing for pictures"}] |
00000000031.jpg | 31_evjvqa | [{"from": "human", "value": "how many men are sitting?\n<image>"}, {"from": "gpt", "value": "there is one man sitting"}] |
00000000032.jpg | 32_evjvqa | [{"from": "human", "value": "how many people are standing by the seashore?\n<image>"}, {"from": "gpt", "value": "there are three people standing by the seashore"}] |
00000000032.jpg | 32_evjvqa | [{"from": "human", "value": "where is the group of three people standing?\n<image>"}, {"from": "gpt", "value": "on a beach"}] |
00000000037.jpg | 37_evjvqa | [{"from": "human", "value": "what is the color of the masks worn by two kids standing either side of santa claus?\n<image>"}, {"from": "gpt", "value": "black"}] |
00000000037.jpg | 37_evjvqa | [{"from": "human", "value": "how many standing kids are in the photo?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000000037.jpg | 37_evjvqa | [{"from": "human", "value": "what skirt does the girl dress in?\n<image>"}, {"from": "gpt", "value": "polka dot pink skirt"}] |
00000000040.jpg | 40_evjvqa | [{"from": "human", "value": "which commodities is the woman making choice of?\n<image>"}, {"from": "gpt", "value": "vegetables and fruits"}] |
00000000040.jpg | 40_evjvqa | [{"from": "human", "value": "what is the man in black doing?\n<image>"}, {"from": "gpt", "value": "choosing a bundle of vegetables"}] |
00000000043.jpg | 43_evjvqa | [{"from": "human", "value": "how many people are standing by the seafood stall?\n<image>"}, {"from": "gpt", "value": "six people"}] |
00000000043.jpg | 43_evjvqa | [{"from": "human", "value": "how many people are selecting the seafood?\n<image>"}, {"from": "gpt", "value": "three people"}] |
00000000043.jpg | 43_evjvqa | [{"from": "human", "value": "what is the guy in a white shirt standing across from the woman in a purple shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting the seafood"}] |
00000000044.jpg | 44_evjvqa | [{"from": "human", "value": "where are those standing at?\n<image>"}, {"from": "gpt", "value": "at a market"}] |
00000000047.jpg | 47_evjvqa | [{"from": "human", "value": "how many people are in the store?\n<image>"}, {"from": "gpt", "value": "four people"}] |
00000000047.jpg | 47_evjvqa | [{"from": "human", "value": "what is the woman wearing a white shirt doing?\n<image>"}, {"from": "gpt", "value": "making choice of items"}] |
00000000047.jpg | 47_evjvqa | [{"from": "human", "value": "what is the woman next to the one in yellow doing?\n<image>"}, {"from": "gpt", "value": "choosing the items of decoration"}] |
00000000047.jpg | 47_evjvqa | [{"from": "human", "value": "what does this store sell?\n<image>"}, {"from": "gpt", "value": "souvenirs, ornaments"}] |
00000000048.jpg | 48_evjvqa | [{"from": "human", "value": "which district is this grocery store located at?\n<image>"}, {"from": "gpt", "value": "district 1"}] |
00000000048.jpg | 48_evjvqa | [{"from": "human", "value": "where is this grocery store?\n<image>"}, {"from": "gpt", "value": "district 1"}] |
00000000048.jpg | 48_evjvqa | [{"from": "human", "value": "what is the business of this store?\n<image>"}, {"from": "gpt", "value": "groceries"}] |
00000000048.jpg | 48_evjvqa | [{"from": "human", "value": "how many people are in the store?\n<image>"}, {"from": "gpt", "value": "one person"}] |
00000000048.jpg | 48_evjvqa | [{"from": "human", "value": "how many people are collecting products?\n<image>"}, {"from": "gpt", "value": "nobody is collecting"}] |
00000000051.jpg | 51_evjvqa | [{"from": "human", "value": "where are these people sightseeing?\n<image>"}, {"from": "gpt", "value": "at a historic attraction"}] |
00000000053.jpg | 53_evjvqa | [{"from": "human", "value": "which samples is the room exhibiting now?\n<image>"}, {"from": "gpt", "value": "a skeleton of whale"}] |
00000000053.jpg | 53_evjvqa | [{"from": "human", "value": "where is the skeleton of whale exhibited at?\n<image>"}, {"from": "gpt", "value": "in a exhibition room in a museum"}] |
00000000053.jpg | 53_evjvqa | [{"from": "human", "value": "which samples does the room exhibit?\n<image>"}, {"from": "gpt", "value": "a skeleton of whale"}] |
00000000055.jpg | 55_evjvqa | [{"from": "human", "value": "what scenes does this painting show?\n<image>"}, {"from": "gpt", "value": "the painting depicts a street corner, with girls dressing in ao dai are wandering in the road"}] |
00000000055.jpg | 55_evjvqa | [{"from": "human", "value": "what kinds of clothes do people wear in the painting?\n<image>"}, {"from": "gpt", "value": "ao dai"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.