image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000001082.jpg | 1082_evjvqa | [{"from": "human", "value": "where is the group of students sitting?\n<image>"}, {"from": "gpt", "value": "at a wooden house"}] |
00000001085.jpg | 1085_evjvqa | [{"from": "human", "value": "what department are the three men standing?\n<image>"}, {"from": "gpt", "value": "fruit department"}] |
00000001085.jpg | 1085_evjvqa | [{"from": "human", "value": "how many people are standing and watching at the fruit department?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000001085.jpg | 1085_evjvqa | [{"from": "human", "value": "what are the two men black shirts doing?\n<image>"}, {"from": "gpt", "value": "selecting fruits"}] |
00000001087.jpg | 1087_evjvqa | [{"from": "human", "value": "what is the main item that the stalls in this area are selling?\n<image>"}, {"from": "gpt", "value": "fresh seafood"}] |
00000001091.jpg | 1091_evjvqa | [{"from": "human", "value": "how many people are standing in front of the antique house?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001091.jpg | 1091_evjvqa | [{"from": "human", "value": "how many motorbikes are running on the road?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001091.jpg | 1091_evjvqa | [{"from": "human", "value": "what does the girl in front of the antique house wear?\n<image>"}, {"from": "gpt", "value": "a white ao dai"}] |
00000001092.jpg | 1092_evjvqa | [{"from": "human", "value": "where are the two women walking?\n<image>"}, {"from": "gpt", "value": "clothing stall"}] |
00000001092.jpg | 1092_evjvqa | [{"from": "human", "value": "what color is the shirt the man standing near the woman in red striped shirt wear?\n<image>"}, {"from": "gpt", "value": "brown"}] |
00000001092.jpg | 1092_evjvqa | [{"from": "human", "value": "what products do these stalls sell?\n<image>"}, {"from": "gpt", "value": "clothes, bags"}] |
00000001097.jpg | 1097_evjvqa | [{"from": "human", "value": "where are the lanterns hung?\n<image>"}, {"from": "gpt", "value": "above a street"}] |
00000001098.jpg | 1098_evjvqa | [{"from": "human", "value": "where is the woman in red coat standing?\n<image>"}, {"from": "gpt", "value": "a christmas shop"}] |
00000001098.jpg | 1098_evjvqa | [{"from": "human", "value": "how many people are standing in front of the christmas shop?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000001098.jpg | 1098_evjvqa | [{"from": "human", "value": "how many people are inside the christmas shop?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001098.jpg | 1098_evjvqa | [{"from": "human", "value": "where is the woman in red coat going?\n<image>"}, {"from": "gpt", "value": "going to buy christmas decorations"}] |
00000001108.jpg | 1108_evjvqa | [{"from": "human", "value": "what is essential when participating in traffic that the man in blue shirt riding a motorbike does not wear?\n<image>"}, {"from": "gpt", "value": "a helmet"}] |
00000001108.jpg | 1108_evjvqa | [{"from": "human", "value": "where is the unmanned motorbike parked?\n<image>"}, {"from": "gpt", "value": "on the pavement"}] |
00000001110.jpg | 1110_evjvqa | [{"from": "human", "value": "what kind of hats do these women wear?\n<image>"}, {"from": "gpt", "value": "conical hats"}] |
00000001110.jpg | 1110_evjvqa | [{"from": "human", "value": "what do they sell here?\n<image>"}, {"from": "gpt", "value": "green vegetables"}] |
00000001110.jpg | 1110_evjvqa | [{"from": "human", "value": "what do these women wear conical hats for?\n<image>"}, {"from": "gpt", "value": "shading from the sun"}] |
00000001111.jpg | 1111_evjvqa | [{"from": "human", "value": "how many people are walking up the stairs in total?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001114.jpg | 1114_evjvqa | [{"from": "human", "value": "who is being carried by the boy in white shirt?\n<image>"}, {"from": "gpt", "value": "the baby in pink polka dot shirt"}] |
00000001114.jpg | 1114_evjvqa | [{"from": "human", "value": "what does this stall in the market sell?\n<image>"}, {"from": "gpt", "value": "meat"}] |
00000001114.jpg | 1114_evjvqa | [{"from": "human", "value": "where is the baby carried?\n<image>"}, {"from": "gpt", "value": "in front of the meat stall in the market"}] |
00000001126.jpg | 1126_evjvqa | [{"from": "human", "value": "what is this woman doing in the garden?\n<image>"}, {"from": "gpt", "value": "picking rambutans"}] |
00000001126.jpg | 1126_evjvqa | [{"from": "human", "value": "what is this woman picking rambutans in?\n<image>"}, {"from": "gpt", "value": "a conical hat"}] |
00000001126.jpg | 1126_evjvqa | [{"from": "human", "value": "what is the woman in green shirt putting in her conical hat? \n<image>"}, {"from": "gpt", "value": "a bunch of rambutans"}] |
00000001127.jpg | 1127_evjvqa | [{"from": "human", "value": "what is the girl in white shirt standing in front of the door?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}] |
00000001127.jpg | 1127_evjvqa | [{"from": "human", "value": "by whom is the girl in white dress being photographed?\n<image>"}, {"from": "gpt", "value": "the girl wearing black skirt"}] |
00000001127.jpg | 1127_evjvqa | [{"from": "human", "value": "what is the girl in black skirt doing in front of the person in white dress?\n<image>"}, {"from": "gpt", "value": "taking photos"}] |
00000001129.jpg | 1129_evjvqa | [{"from": "human", "value": "what kind of product is this woman choosing to buy?\n<image>"}, {"from": "gpt", "value": "instant noodles"}] |
00000001129.jpg | 1129_evjvqa | [{"from": "human", "value": "what is this woman carrying?\n<image>"}, {"from": "gpt", "value": "a supermarket trolley"}] |
00000001129.jpg | 1129_evjvqa | [{"from": "human", "value": "who is standing behind the woman buying instant noodles?\n<image>"}, {"from": "gpt", "value": "a woman wearing blue outfit"}] |
00000001130.jpg | 1130_evjvqa | [{"from": "human", "value": "what occasion do the shops on this stretch of road sell items on?\n<image>"}, {"from": "gpt", "value": "christmas"}] |
00000001130.jpg | 1130_evjvqa | [{"from": "human", "value": "what is the young man walking next to the girl with the handbag holding in his hand?\n<image>"}, {"from": "gpt", "value": "a coat"}] |
00000001130.jpg | 1130_evjvqa | [{"from": "human", "value": "who is the girl with the yellow handbag going with?\n<image>"}, {"from": "gpt", "value": "the man in black hat holding a coat in his hand"}] |
00000001130.jpg | 1130_evjvqa | [{"from": "human", "value": "what is the girl in white skirt doing?\n<image>"}, {"from": "gpt", "value": "walking with a friend on the street"}] |
00000001133.jpg | 1133_evjvqa | [{"from": "human", "value": "what is the little girl in blue dress doing next to the shelf?\n<image>"}, {"from": "gpt", "value": "reaching for a snack bag"}] |
00000001133.jpg | 1133_evjvqa | [{"from": "human", "value": "who is the man in red shirt sitting next to?\n<image>"}, {"from": "gpt", "value": "the little girl in blue dress"}] |
00000001133.jpg | 1133_evjvqa | [{"from": "human", "value": "in this distance, what is the man pushing?\n<image>"}, {"from": "gpt", "value": "a supermarket trolley"}] |
00000001133.jpg | 1133_evjvqa | [{"from": "human", "value": "what does the man push the trolley for?\n<image>"}, {"from": "gpt", "value": "shopping"}] |
00000001136.jpg | 1136_evjvqa | [{"from": "human", "value": "where is the woman in red hat standing?\n<image>"}, {"from": "gpt", "value": "to the right of the woman in white shirt carrying handbag"}] |
00000001136.jpg | 1136_evjvqa | [{"from": "human", "value": "what are these people standing around?\n<image>"}, {"from": "gpt", "value": "a freezer"}] |
00000001136.jpg | 1136_evjvqa | [{"from": "human", "value": "how many people pin glasses onto their hair?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001140.jpg | 1140_evjvqa | [{"from": "human", "value": "what is this man doing on the staircase?\n<image>"}, {"from": "gpt", "value": "carrying the boxes upstairs"}] |
00000001140.jpg | 1140_evjvqa | [{"from": "human", "value": "what are on this man's back?\n<image>"}, {"from": "gpt", "value": "boxes and bags"}] |
00000001140.jpg | 1140_evjvqa | [{"from": "human", "value": "what does the porter wear on his head?\n<image>"}, {"from": "gpt", "value": "a black hat"}] |
00000001142.jpg | 1142_evjvqa | [{"from": "human", "value": "what are the young girls standing together on the railroad for?\n<image>"}, {"from": "gpt", "value": "taking a photo together"}] |
00000001142.jpg | 1142_evjvqa | [{"from": "human", "value": "what does each girl standing on the railroad holding in hand?\n<image>"}, {"from": "gpt", "value": "a conical hat"}] |
00000001142.jpg | 1142_evjvqa | [{"from": "human", "value": "how many girls wearing skirts hold conical hats in hands?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000001143.jpg | 1143_evjvqa | [{"from": "human", "value": "how can employees of the cosmetic stall be identified?\n<image>"}, {"from": "gpt", "value": "they wear green t-shirts, black trousers and white caps"}] |
00000001144.jpg | 1144_evjvqa | [{"from": "human", "value": "what does this bridge cross?\n<image>"}, {"from": "gpt", "value": "a channel"}] |
00000001147.jpg | 1147_evjvqa | [{"from": "human", "value": "for each seat, what is on the table there?\n<image>"}, {"from": "gpt", "value": "a teacu["}] |
00000001147.jpg | 1147_evjvqa | [{"from": "human", "value": "what outfit do the women wear at this event?\n<image>"}, {"from": "gpt", "value": "ao dai"}] |
00000001147.jpg | 1147_evjvqa | [{"from": "human", "value": "what do all the people here gather for?\n<image>"}, {"from": "gpt", "value": "taking group photos"}] |
00000001147.jpg | 1147_evjvqa | [{"from": "human", "value": "where are people here taking pictures together?\n<image>"}, {"from": "gpt", "value": "in the hall"}] |
00000001149.jpg | 1149_evjvqa | [{"from": "human", "value": "what is the woman in striped shirt doing?\n<image>"}, {"from": "gpt", "value": "putting things on the cashier counter"}] |
00000001149.jpg | 1149_evjvqa | [{"from": "human", "value": "what is the woman in striped shirt putting on the cashier counter?\n<image>"}, {"from": "gpt", "value": "a cooking oil bottle"}] |
00000001149.jpg | 1149_evjvqa | [{"from": "human", "value": "where is the woman in striped shirt putting the cooking oil bottle?\n<image>"}, {"from": "gpt", "value": "on the cashier counter"}] |
00000001158.jpg | 1158_evjvqa | [{"from": "human", "value": "what is the woman in yellow ao dai holding in her hand?\n<image>"}, {"from": "gpt", "value": "a handbag"}] |
00000001159.jpg | 1159_evjvqa | [{"from": "human", "value": "what kind of vehicles do diners come to this restaurant by?\n<image>"}, {"from": "gpt", "value": "motorbikes"}] |
00000001164.jpg | 1164_evjvqa | [{"from": "human", "value": "what types of vehicles are parked on the roadside?\n<image>"}, {"from": "gpt", "value": "cars and motorbikes"}] |
00000001164.jpg | 1164_evjvqa | [{"from": "human", "value": "what types of vehicles are moving on the road?\n<image>"}, {"from": "gpt", "value": "motorbikes, cars and cyclos"}] |
00000001164.jpg | 1164_evjvqa | [{"from": "human", "value": "what is the building at the end of this road?\n<image>"}, {"from": "gpt", "value": "a church"}] |
00000001164.jpg | 1164_evjvqa | [{"from": "human", "value": "what vehicles are going on the opposite direction?\n<image>"}, {"from": "gpt", "value": "the motorcycle close behind the cyclos"}] |
00000001170.jpg | 1170_evjvqa | [{"from": "human", "value": "where is this girl taking pictures?\n<image>"}, {"from": "gpt", "value": "in the middle of the railroad"}] |
00000001170.jpg | 1170_evjvqa | [{"from": "human", "value": "what are along the railroad?\n<image>"}, {"from": "gpt", "value": "cafes"}] |
00000001170.jpg | 1170_evjvqa | [{"from": "human", "value": "what is girl standing in the middle of the railroad for?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}] |
00000001172.jpg | 1172_evjvqa | [{"from": "human", "value": "which side is the girl in white shirt turning back?\n<image>"}, {"from": "gpt", "value": "the side of the road"}] |
00000001177.jpg | 1177_evjvqa | [{"from": "human", "value": "what types of vehicles are parked on the right side of the road?\n<image>"}, {"from": "gpt", "value": "motorbikes"}] |
00000001177.jpg | 1177_evjvqa | [{"from": "human", "value": "what types of vehicles are moving on this road?\n<image>"}, {"from": "gpt", "value": "bicycles"}] |
00000001177.jpg | 1177_evjvqa | [{"from": "human", "value": "what are hung on the string that is strung across the road?\n<image>"}, {"from": "gpt", "value": "lanterns"}] |
00000001177.jpg | 1177_evjvqa | [{"from": "human", "value": "what are the students in yellow coats doing?\n<image>"}, {"from": "gpt", "value": "cycling on the road"}] |
00000001179.jpg | 1179_evjvqa | [{"from": "human", "value": "how many windows are there on both sides of the main door?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001179.jpg | 1179_evjvqa | [{"from": "human", "value": "which leaf is closed at the main door?\n<image>"}, {"from": "gpt", "value": "the right one"}] |
00000001179.jpg | 1179_evjvqa | [{"from": "human", "value": "who is standing behind the windows?\n<image>"}, {"from": "gpt", "value": "nobody"}] |
00000001180.jpg | 1180_evjvqa | [{"from": "human", "value": "what is put on the saddle of the motorbike?\n<image>"}, {"from": "gpt", "value": "a gray helmet"}] |
00000001180.jpg | 1180_evjvqa | [{"from": "human", "value": "whose color is the front of the motorbike similar to?\n<image>"}, {"from": "gpt", "value": "the uniform of the man"}] |
00000001181.jpg | 1181_evjvqa | [{"from": "human", "value": "who puts the rules board on the beach?\n<image>"}, {"from": "gpt", "value": "the man wearing orange shirt"}] |
00000001187.jpg | 1187_evjvqa | [{"from": "human", "value": "what are the gatekeepers doing to the people driving into the market?\n<image>"}, {"from": "gpt", "value": "measuring body temperature"}] |
00000001187.jpg | 1187_evjvqa | [{"from": "human", "value": "what do gatekeepers erect barricades for?\n<image>"}, {"from": "gpt", "value": "controlling people entering the market"}] |
00000001188.jpg | 1188_evjvqa | [{"from": "human", "value": "what are some people holding incense sticks intending to do?\n<image>"}, {"from": "gpt", "value": "light the incense sticks"}] |
00000001188.jpg | 1188_evjvqa | [{"from": "human", "value": "what do the students wear on their collars?\n<image>"}, {"from": "gpt", "value": "red scarves"}] |
00000001188.jpg | 1188_evjvqa | [{"from": "human", "value": "what do the students in white uniforms wear on their heads?\n<image>"}, {"from": "gpt", "value": "hats"}] |
00000001196.jpg | 1196_evjvqa | [{"from": "human", "value": "how does the only person wearing glasses in the photo look compared to the others?\n<image>"}, {"from": "gpt", "value": "wearing blue jeans and a shirt"}] |
00000001196.jpg | 1196_evjvqa | [{"from": "human", "value": "how do people without glasses dress?\n<image>"}, {"from": "gpt", "value": "dark green military uniforms"}] |
00000004201.jpg | 4201_evjvqa | [{"from": "human", "value": "what are the green baskets used for?\n<image>"}, {"from": "gpt", "value": "to store bundles of vegetables"}] |
00000004201.jpg | 4201_evjvqa | [{"from": "human", "value": "what items does this stall sell?\n<image>"}, {"from": "gpt", "value": "vegetables and vermicelli"}] |
00000004204.jpg | 4204_evjvqa | [{"from": "human", "value": "what are the man and woman holding?\n<image>"}, {"from": "gpt", "value": "the box of medical masks"}] |
00000004204.jpg | 4204_evjvqa | [{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "he is giving the box of medical masks to the girl"}] |
00000004204.jpg | 4204_evjvqa | [{"from": "human", "value": "where is the computer screen?\n<image>"}, {"from": "gpt", "value": "on the desk"}] |
00000004204.jpg | 4204_evjvqa | [{"from": "human", "value": "what is at the top of the closet in the corner?\n<image>"}, {"from": "gpt", "value": "the bunch of flowers "}] |
00000004205.jpg | 4205_evjvqa | [{"from": "human", "value": "what items is the stall selling?\n<image>"}, {"from": "gpt", "value": "fruits"}] |
00000004205.jpg | 4205_evjvqa | [{"from": "human", "value": "what products does this area have to sell?\n<image>"}, {"from": "gpt", "value": "fruits"}] |
00000004205.jpg | 4205_evjvqa | [{"from": "human", "value": "where are these fruits sold?\n<image>"}, {"from": "gpt", "value": "at the stall in the supermarket"}] |
00000004208.jpg | 4208_evjvqa | [{"from": "human", "value": "how is the street decorated?\n<image>"}, {"from": "gpt", "value": "above the street is hung string lights"}] |
00000004209.jpg | 4209_evjvqa | [{"from": "human", "value": "what is the group of people gathered for?\n<image>"}, {"from": "gpt", "value": "to watch an outdoor show"}] |
00000004209.jpg | 4209_evjvqa | [{"from": "human", "value": "what is the show taking place on the road?\n<image>"}, {"from": "gpt", "value": "there is an outdoor art performance coming up"}] |
00000004209.jpg | 4209_evjvqa | [{"from": "human", "value": "what do people come here for?\n<image>"}, {"from": "gpt", "value": "to participate in the outdoor art show"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.