image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003169.jpg | 3169_evjvqa | [{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "she is cutting paklov"}] |
00000003169.jpg | 3169_evjvqa | [{"from": "human", "value": "what is this woman cutting paklov with?\n<image>"}, {"from": "gpt", "value": "she is cutting paklov with scissors"}] |
00000003169.jpg | 3169_evjvqa | [{"from": "human", "value": "what is the woman wearing non la holding scissors for?\n<image>"}, {"from": "gpt", "value": "for cutting paklov"}] |
00000003172.jpg | 3172_evjvqa | [{"from": "human", "value": "what is the woman in the brown shirt doing?\n<image>"}, {"from": "gpt", "value": "she is going shopping"}] |
00000003172.jpg | 3172_evjvqa | [{"from": "human", "value": "what is the woman in brown pushing in the supermarket?\n<image>"}, {"from": "gpt", "value": "the woman in brown is pushing a supermarket trolley"}] |
00000003172.jpg | 3172_evjvqa | [{"from": "human", "value": "where is the girl in brown going shopping?\n<image>"}, {"from": "gpt", "value": "the girl in a brown shirt is shopping in the supermarket"}] |
00000003173.jpg | 3173_evjvqa | [{"from": "human", "value": "where is the woman wearing moss green ao dai?\n<image>"}, {"from": "gpt", "value": "the woman wearing moss green ao dai is standing on the rightmost side"}] |
00000003173.jpg | 3173_evjvqa | [{"from": "human", "value": "who is standing right before the woman with glasses on her collar?\n<image>"}, {"from": "gpt", "value": "is a man in a white shirt"}] |
00000003173.jpg | 3173_evjvqa | [{"from": "human", "value": "what is the woman standing on the rightmost wearing?\n<image>"}, {"from": "gpt", "value": "she wears a moss green ao dai"}] |
00000003176.jpg | 3176_evjvqa | [{"from": "human", "value": "what kind of clothes are these three people wearing?\n<image>"}, {"from": "gpt", "value": "they are wearing protective gear"}] |
00000003176.jpg | 3176_evjvqa | [{"from": "human", "value": "what are three people in protective gear doing?\n<image>"}, {"from": "gpt", "value": "three people wearing protective gear are spraying disinfectant at the supermarket"}] |
00000003176.jpg | 3176_evjvqa | [{"from": "human", "value": "what are these three people wearing to disinfect the supermarket?\n<image>"}, {"from": "gpt", "value": "they wear protective gear"}] |
00000003177.jpg | 3177_evjvqa | [{"from": "human", "value": "on which side of the railway line are the motorbikes parked?\n<image>"}, {"from": "gpt", "value": "the motorbikes are parked on the right side of the railway line"}] |
00000003177.jpg | 3177_evjvqa | [{"from": "human", "value": "on which side of the railroad are the wooden chairs placed?\n<image>"}, {"from": "gpt", "value": "left side of the railroad"}] |
00000003177.jpg | 3177_evjvqa | [{"from": "human", "value": "what color does the lightbulb give off above the chairs?\n<image>"}, {"from": "gpt", "value": "the lightbulb above the chairs gives off a yellow light"}] |
00000003179.jpg | 3179_evjvqa | [{"from": "human", "value": "what outfit is this girl wearing?\n<image>"}, {"from": "gpt", "value": "this girl is wearing a wedding dress"}] |
00000003179.jpg | 3179_evjvqa | [{"from": "human", "value": "what clothes does the man wear while walking with the woman?\n<image>"}, {"from": "gpt", "value": "the man is wearing a black vest"}] |
00000003179.jpg | 3179_evjvqa | [{"from": "human", "value": "what means of transport do this couple travel together?\n<image>"}, {"from": "gpt", "value": "they walk on foot together"}] |
00000003180.jpg | 3180_evjvqa | [{"from": "human", "value": "how does the walking man dress?\n<image>"}, {"from": "gpt", "value": "he wears a grey t-shirt and black pants"}] |
00000003180.jpg | 3180_evjvqa | [{"from": "human", "value": "what kind of vehicles are parked in front of this store?\n<image>"}, {"from": "gpt", "value": "motorcycles are parked in front of this shop"}] |
00000003181.jpg | 3181_evjvqa | [{"from": "human", "value": "what is the woman wearing non la doing?\n<image>"}, {"from": "gpt", "value": "the woman wearing non la is carrying street vendors to sell"}] |
00000003181.jpg | 3181_evjvqa | [{"from": "human", "value": "what is the woman wearing to carry the street vendors?\n<image>"}, {"from": "gpt", "value": "the woman is wearing a non la to carry street vendors"}] |
00000003181.jpg | 3181_evjvqa | [{"from": "human", "value": "on which side of the female hawker is the tree located?\n<image>"}, {"from": "gpt", "value": "the tree is to the left of the female hawker"}] |
00000003182.jpg | 3182_evjvqa | [{"from": "human", "value": "how many floors do these shelves have?\n<image>"}, {"from": "gpt", "value": "these shopping shelves have four floors"}] |
00000003182.jpg | 3182_evjvqa | [{"from": "human", "value": "what is on these shelves?\n<image>"}, {"from": "gpt", "value": "nothing at all"}] |
00000003182.jpg | 3182_evjvqa | [{"from": "human", "value": "what color are these shopping shelves painted?\n<image>"}, {"from": "gpt", "value": "these shelves are painted white"}] |
00000003185.jpg | 3185_evjvqa | [{"from": "human", "value": "by what means of transportation do the children move in the alley?\n<image>"}, {"from": "gpt", "value": "the children move in the alley by bicycles"}] |
00000003187.jpg | 3187_evjvqa | [{"from": "human", "value": "what product is this woman looking at?\n<image>"}, {"from": "gpt", "value": "this woman is looking at a carton of milk powder"}] |
00000003187.jpg | 3187_evjvqa | [{"from": "human", "value": "what shirt is the woman holding the milk carton wearing?\n<image>"}, {"from": "gpt", "value": "woman who is holding a milk carton is wearing white t-shirt"}] |
00000003187.jpg | 3187_evjvqa | [{"from": "human", "value": "what is in this woman's hand?\n<image>"}, {"from": "gpt", "value": "in this woman's hand is a powdered milk carton"}] |
00000003193.jpg | 3193_evjvqa | [{"from": "human", "value": "what ingredients do the chili trays contain to use with this dish?\n<image>"}, {"from": "gpt", "value": "there are lemon slices in the chili tray"}] |
00000003193.jpg | 3193_evjvqa | [{"from": "human", "value": "of what is the plate to the left of the bowl of pho?\n<image>"}, {"from": "gpt", "value": "to the left of the bowl of pho is a plate of sprouts "}] |
00000003193.jpg | 3193_evjvqa | [{"from": "human", "value": "on which plate are the bean sprouts placed?\n<image>"}, {"from": "gpt", "value": "the bean sprouts are placed on the plate to the left of the bowl of pho"}] |
00000003199.jpg | 3199_evjvqa | [{"from": "human", "value": "what is the girl standing on the balcony wearing?\n<image>"}, {"from": "gpt", "value": "the girl standing on the balcony is wearing a white ao dai"}] |
00000003199.jpg | 3199_evjvqa | [{"from": "human", "value": "what outfit does this girl wear to chill out on the roof terrace of this house?\n<image>"}, {"from": "gpt", "value": "she wears a white dress"}] |
00000003199.jpg | 3199_evjvqa | [{"from": "human", "value": "what color is the house walls painted in front of the girl?\n<image>"}, {"from": "gpt", "value": "the house right in front of the girl has walls painted in yellow"}] |
00000000100.jpg | 100_evjvqa | [{"from": "human", "value": "what color does the girl sitting on the trolley wear?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000100.jpg | 100_evjvqa | [{"from": "human", "value": "what tool do the couple use to take their daughter go with them in the supermarket?\n<image>"}, {"from": "gpt", "value": "the supermarket trolley"}] |
00000000100.jpg | 100_evjvqa | [{"from": "human", "value": "what is the man in white doing?\n<image>"}, {"from": "gpt", "value": "pushing the trolley"}] |
00000000100.jpg | 100_evjvqa | [{"from": "human", "value": "how many people are using the trolly in which the girl is sitting?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000000100.jpg | 100_evjvqa | [{"from": "human", "value": "what side of the man in white is the woman going on?\n<image>"}, {"from": "gpt", "value": "the left side"}] |
00000000103.jpg | 103_evjvqa | [{"from": "human", "value": "what color of the uniform are the guy wearing?\n<image>"}, {"from": "gpt", "value": "green"}] |
00000000103.jpg | 103_evjvqa | [{"from": "human", "value": "what is the color of the trousers of the girl wearing a yellow hat?\n<image>"}, {"from": "gpt", "value": "violet"}] |
00000000103.jpg | 103_evjvqa | [{"from": "human", "value": "how many females are wearing the military uniform?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000000110.jpg | 110_evjvqa | [{"from": "human", "value": "what color of the mask does the woman holding the product wear?\n<image>"}, {"from": "gpt", "value": "white"}] |
00000000110.jpg | 110_evjvqa | [{"from": "human", "value": "what color is the plastic basket brought the the customer?\n<image>"}, {"from": "gpt", "value": "grey"}] |
00000000110.jpg | 110_evjvqa | [{"from": "human", "value": "what is the woman bringing the plastic basket for?\n<image>"}, {"from": "gpt", "value": "to add items to the basket"}] |
00000000113.jpg | 113_evjvqa | [{"from": "human", "value": "what color is the row of trees in front the block?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000113.jpg | 113_evjvqa | [{"from": "human", "value": "what color is the block painted?\n<image>"}, {"from": "gpt", "value": "yellow"}] |
00000000116.jpg | 116_evjvqa | [{"from": "human", "value": "what is the color of the boots used by the woman wearing all red clothes?\n<image>"}, {"from": "gpt", "value": "black"}] |
00000000119.jpg | 119_evjvqa | [{"from": "human", "value": "what color is the gate's pillars of this attraction painted?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000119.jpg | 119_evjvqa | [{"from": "human", "value": "what color is the flowers of the two trees on both sides of the gate?\n<image>"}, {"from": "gpt", "value": "white"}] |
00000000119.jpg | 119_evjvqa | [{"from": "human", "value": "how many doors are there at the main gate of the attraction?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000000122.jpg | 122_evjvqa | [{"from": "human", "value": "what vehicle is the person driving in the alley?\n<image>"}, {"from": "gpt", "value": "a bicycle"}] |
00000000122.jpg | 122_evjvqa | [{"from": "human", "value": "what is the boy doing for the seller?\n<image>"}, {"from": "gpt", "value": "tying her hair"}] |
00000000122.jpg | 122_evjvqa | [{"from": "human", "value": "why does the woman in white lay the fruits out?\n<image>"}, {"from": "gpt", "value": "for selling"}] |
00000000122.jpg | 122_evjvqa | [{"from": "human", "value": "where is the woman laying her goods in front of?\n<image>"}, {"from": "gpt", "value": "a temple entrance"}] |
00000000122.jpg | 122_evjvqa | [{"from": "human", "value": "what is the only person with a hat doing?\n<image>"}, {"from": "gpt", "value": "riding a bicycle"}] |
00000000124.jpg | 124_evjvqa | [{"from": "human", "value": "how many kids are wearing blue, grey and white striped shirts?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000000124.jpg | 124_evjvqa | [{"from": "human", "value": "how is the man wearing earphones attired?\n<image>"}, {"from": "gpt", "value": "a blue shirt and a red hat"}] |
00000000125.jpg | 125_evjvqa | [{"from": "human", "value": "what color is the helmet on the foam container?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000125.jpg | 125_evjvqa | [{"from": "human", "value": "what do the plastic bags on the sidewalks contain?\n<image>"}, {"from": "gpt", "value": "vegetables"}] |
00000000125.jpg | 125_evjvqa | [{"from": "human", "value": "what color is the car by the side of the street painted?\n<image>"}, {"from": "gpt", "value": "white"}] |
00000000126.jpg | 126_evjvqa | [{"from": "human", "value": "what is the man holding the paddle for?\n<image>"}, {"from": "gpt", "value": "for rowing the boat"}] |
00000000126.jpg | 126_evjvqa | [{"from": "human", "value": "what kind of garden are these people rowing the boat in the middle of?\n<image>"}, {"from": "gpt", "value": "the rambutan garden"}] |
00000000126.jpg | 126_evjvqa | [{"from": "human", "value": "what color does the girl in front of the boat wear?\n<image>"}, {"from": "gpt", "value": "yellow"}] |
00000000132.jpg | 132_evjvqa | [{"from": "human", "value": "what color does the woman in the sunlight wear?\n<image>"}, {"from": "gpt", "value": "a red shirt and a blue jean"}] |
00000000132.jpg | 132_evjvqa | [{"from": "human", "value": "how many people are wearing helmets inside the store?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000000133.jpg | 133_evjvqa | [{"from": "human", "value": "how does the man in green clothes form his posture?\n<image>"}, {"from": "gpt", "value": "sitting"}] |
00000000133.jpg | 133_evjvqa | [{"from": "human", "value": "what color is the bottom present box in front of the man in green clothes?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000133.jpg | 133_evjvqa | [{"from": "human", "value": "on which side of the standing man is the pine tree located?\n<image>"}, {"from": "gpt", "value": "ahead"}] |
00000000135.jpg | 135_evjvqa | [{"from": "human", "value": "what color is the barrier between the seller and her customers?\n<image>"}, {"from": "gpt", "value": "blue"}] |
00000000135.jpg | 135_evjvqa | [{"from": "human", "value": "on which side of the blue table is the yellow box lying?\n<image>"}, {"from": "gpt", "value": "the top side"}] |
00000000135.jpg | 135_evjvqa | [{"from": "human", "value": "what is the customer doing on the right-hand side of the person wearing a red helmet?\n<image>"}, {"from": "gpt", "value": "buying stuffs"}] |
00000000135.jpg | 135_evjvqa | [{"from": "human", "value": "how many active customers are not wearing masks?\n<image>"}, {"from": "gpt", "value": "none"}] |
00000000139.jpg | 139_evjvqa | [{"from": "human", "value": "what color are the shirts of both people in the supermarket?\n<image>"}, {"from": "gpt", "value": "white"}] |
00000000139.jpg | 139_evjvqa | [{"from": "human", "value": "what is the heterosexual couple doing together?\n<image>"}, {"from": "gpt", "value": "shopping"}] |
00000000139.jpg | 139_evjvqa | [{"from": "human", "value": "which one is wearing a blue jacket?\n<image>"}, {"from": "gpt", "value": "the man"}] |
00000000139.jpg | 139_evjvqa | [{"from": "human", "value": "what products have the couple choosen to buy?\n<image>"}, {"from": "gpt", "value": "vegetables and bread"}] |
00000000139.jpg | 139_evjvqa | [{"from": "human", "value": "what is the couple pushing together?\n<image>"}, {"from": "gpt", "value": "the supermarket trolley"}] |
00000000143.jpg | 143_evjvqa | [{"from": "human", "value": "what two color does the guy holding a green plastic bag wear?\n<image>"}, {"from": "gpt", "value": "white and black"}] |
00000000143.jpg | 143_evjvqa | [{"from": "human", "value": "what color is the plastic bag brought by the person wearing a black-and-white jacket?\n<image>"}, {"from": "gpt", "value": "green"}] |
00000000143.jpg | 143_evjvqa | [{"from": "human", "value": "what are the main product type of this store?\n<image>"}, {"from": "gpt", "value": "clothes"}] |
00000000143.jpg | 143_evjvqa | [{"from": "human", "value": "among the two people holding a green bag, how does the female dress?\n<image>"}, {"from": "gpt", "value": "a violet jacket and a black trousers"}] |
00000000147.jpg | 147_evjvqa | [{"from": "human", "value": "which shape are the stripes of the man's shirt distributed into?\n<image>"}, {"from": "gpt", "value": "horizontal"}] |
00000000147.jpg | 147_evjvqa | [{"from": "human", "value": "what is on the head of the man wearing a horizontally striped shirt?\n<image>"}, {"from": "gpt", "value": "a helmet"}] |
00000000147.jpg | 147_evjvqa | [{"from": "human", "value": "how colorful is the non-landing motor scooter?\n<image>"}, {"from": "gpt", "value": "cream"}] |
00000000149.jpg | 149_evjvqa | [{"from": "human", "value": "what color are the hat and shirt of the man riding a scooter?\n<image>"}, {"from": "gpt", "value": "the shirt is blue and the hat is black"}] |
00000000149.jpg | 149_evjvqa | [{"from": "human", "value": "how many people is the scooter riding man carry?\n<image>"}, {"from": "gpt", "value": "none"}] |
00000000151.jpg | 151_evjvqa | [{"from": "human", "value": "what are the three teenagers doing?\n<image>"}, {"from": "gpt", "value": "riding bikes"}] |
00000000151.jpg | 151_evjvqa | [{"from": "human", "value": "what color of the hats do the three girls wear, left to right?\n<image>"}, {"from": "gpt", "value": "black, red and green"}] |
00000000151.jpg | 151_evjvqa | [{"from": "human", "value": "what color of the hat does the girl wear while cycling in the middle?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000000151.jpg | 151_evjvqa | [{"from": "human", "value": "how many bikes are there on the road?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000000151.jpg | 151_evjvqa | [{"from": "human", "value": "on which side of the girl with a black cap is the girl wearing a green cap cycling?\n<image>"}, {"from": "gpt", "value": "on the right side"}] |
00000000156.jpg | 156_evjvqa | [{"from": "human", "value": "how many floors does the tallest house have?\n<image>"}, {"from": "gpt", "value": "five"}] |
00000000156.jpg | 156_evjvqa | [{"from": "human", "value": "how many floors does the house on the rightmost have?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000000156.jpg | 156_evjvqa | [{"from": "human", "value": "what position is the house with the billboard in front of it from the right?\n<image>"}, {"from": "gpt", "value": "five"}] |
00000000156.jpg | 156_evjvqa | [{"from": "human", "value": "what floors does the third house from the right have?\n<image>"}, {"from": "gpt", "value": "one ground floor and three floors"}] |
00000000156.jpg | 156_evjvqa | [{"from": "human", "value": "how is the sixth house from the left stratified?\n<image>"}, {"from": "gpt", "value": "one ground floor and four floors"}] |
00000000162.jpg | 162_evjvqa | [{"from": "human", "value": "what color of the hat is the walking man wearing?\n<image>"}, {"from": "gpt", "value": "red"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.