image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000009884.jpg | 9884_evjvqa | [{"from": "human", "value": "how many people are wearing yellow shirt?\n<image>"}, {"from": "gpt", "value": "there are two people wearing yellow shirt"}] |
00000009886.jpg | 9886_evjvqa | [{"from": "human", "value": "how many people are driving in the street?\n<image>"}, {"from": "gpt", "value": "there is one person driving in the street"}] |
00000009890.jpg | 9890_evjvqa | [{"from": "human", "value": "what is hanged in front of every house?\n<image>"}, {"from": "gpt", "value": "the flags are hanged in the front"}] |
00000009893.jpg | 9893_evjvqa | [{"from": "human", "value": "how many shelves does the store have?\n<image>"}, {"from": "gpt", "value": "the store has three shelves"}] |
00000009896.jpg | 9896_evjvqa | [{"from": "human", "value": "how many motorbikes are parked in front of the eatery?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes parkedd in front of the eatery"}] |
00000009899.jpg | 9899_evjvqa | [{"from": "human", "value": "what is the woman buying?\n<image>"}, {"from": "gpt", "value": "the woman is buying spices"}] |
00000009902.jpg | 9902_evjvqa | [{"from": "human", "value": "what color shirt is the man in the stall wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing black shirt"}] |
00000009904.jpg | 9904_evjvqa | [{"from": "human", "value": "what are the boys doing?\n<image>"}, {"from": "gpt", "value": "the boys are playing with the canoe toys"}] |
00000009906.jpg | 9906_evjvqa | [{"from": "human", "value": "what does the painting portray?\n<image>"}, {"from": "gpt", "value": "the painting portrays the scenery of an old town"}] |
00000009908.jpg | 9908_evjvqa | [{"from": "human", "value": "which hand is the woman in green shirt pushing the cart with?\n<image>"}, {"from": "gpt", "value": "pushing the cart with right hand"}] |
00000009910.jpg | 9910_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a flower market"}] |
00000009912.jpg | 9912_evjvqa | [{"from": "human", "value": "which direction is the girl looking to?\n<image>"}, {"from": "gpt", "value": "the girl is looking to the right "}] |
00000009913.jpg | 9913_evjvqa | [{"from": "human", "value": "what means of transport is everyone moving by?\n<image>"}, {"from": "gpt", "value": "everyone is moving by boats"}] |
00000009916.jpg | 9916_evjvqa | [{"from": "human", "value": "how many people are decorating the signboard for the store?\n<image>"}, {"from": "gpt", "value": "there is one person decorating the signboard for the store"}] |
00000009918.jpg | 9918_evjvqa | [{"from": "human", "value": "how many people are standing in front of the store?\n<image>"}, {"from": "gpt", "value": "there is one person in front of the store"}] |
00000009920.jpg | 9920_evjvqa | [{"from": "human", "value": "which direction is the boy looking to?\n<image>"}, {"from": "gpt", "value": "the boy is looking at the girl"}] |
00000009922.jpg | 9922_evjvqa | [{"from": "human", "value": "how many boxes are on the cart?\n<image>"}, {"from": "gpt", "value": "there are four boxes on the cart"}] |
00000009923.jpg | 9923_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a museum"}] |
00000009925.jpg | 9925_evjvqa | [{"from": "human", "value": "what is everyone doing in front of the café?\n<image>"}, {"from": "gpt", "value": "everyone is decorating the front of the café"}] |
00000009928.jpg | 9928_evjvqa | [{"from": "human", "value": "what color are the chairs of the store?\n<image>"}, {"from": "gpt", "value": "the chairs are red"}] |
00000009929.jpg | 9929_evjvqa | [{"from": "human", "value": "how many cars are parked on the roadside?\n<image>"}, {"from": "gpt", "value": "there are three cars parked on the roadside"}] |
00000009931.jpg | 9931_evjvqa | [{"from": "human", "value": "how many hoses does the gas pump have?\n<image>"}, {"from": "gpt", "value": "the gas pump has two hoses"}] |
00000009933.jpg | 9933_evjvqa | [{"from": "human", "value": "how many medical staffs are standing in front of the house?\n<image>"}, {"from": "gpt", "value": "there is one medical staff standing in front of the house"}] |
00000009934.jpg | 9934_evjvqa | [{"from": "human", "value": "how many cars are parked in front of the store?\n<image>"}, {"from": "gpt", "value": "there are two cars parked in front of the store"}] |
00000009935.jpg | 9935_evjvqa | [{"from": "human", "value": "what goods is the boat where the man is standing on carrying?\n<image>"}, {"from": "gpt", "value": "the boat is carrying many watermelons"}] |
00000009937.jpg | 9937_evjvqa | [{"from": "human", "value": "what stall is the man standing at?\n<image>"}, {"from": "gpt", "value": "standing at the fruit stall"}] |
00000009938.jpg | 9938_evjvqa | [{"from": "human", "value": "what emotion is the woman expressing?\n<image>"}, {"from": "gpt", "value": "the woman is smiling"}] |
00000009939.jpg | 9939_evjvqa | [{"from": "human", "value": "how many people are selecting products at the tet candy stall?\n<image>"}, {"from": "gpt", "value": "at the stall is one person selecting products"}] |
00000009940.jpg | 9940_evjvqa | [{"from": "human", "value": "where is the laptop placed?\n<image>"}, {"from": "gpt", "value": "the laptop is placed in front of the girl"}] |
00000009943.jpg | 9943_evjvqa | [{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing red shirt and black pants"}] |
00000009945.jpg | 9945_evjvqa | [{"from": "human", "value": "what color is the shirt of the boy?\n<image>"}, {"from": "gpt", "value": "the shirt is white"}] |
00000009946.jpg | 9946_evjvqa | [{"from": "human", "value": "how many arms does the statue have?\n<image>"}, {"from": "gpt", "value": "the statue has two arms"}] |
00000009949.jpg | 9949_evjvqa | [{"from": "human", "value": "what color is the uniform of the supermaket staff?\n<image>"}, {"from": "gpt", "value": "the uniform of the supermarket staff has red shirt and black pants"}] |
00000009950.jpg | 9950_evjvqa | [{"from": "human", "value": "where is the group of students standing?\n<image>"}, {"from": "gpt", "value": "the group of students is standing at a martyrs memorial site"}] |
00000009951.jpg | 9951_evjvqa | [{"from": "human", "value": "where is everyone shopping at?\n<image>"}, {"from": "gpt", "value": "everyone is shopping at a supermarket"}] |
00000009954.jpg | 9954_evjvqa | [{"from": "human", "value": "what color is the car in front of the garage?\n<image>"}, {"from": "gpt", "value": "the car is red"}] |
00000009955.jpg | 9955_evjvqa | [{"from": "human", "value": "what color shirt is the man leading the way wearing?\n<image>"}, {"from": "gpt", "value": "wearing white shirt"}] |
00000009956.jpg | 9956_evjvqa | [{"from": "human", "value": "who is the man in yellow shirt going with?\n<image>"}, {"from": "gpt", "value": "going with the man in green jacket"}] |
00000009957.jpg | 9957_evjvqa | [{"from": "human", "value": "how many rows of shelf does the store have?\n<image>"}, {"from": "gpt", "value": "the store has three rows of shelf"}] |
00000009958.jpg | 9958_evjvqa | [{"from": "human", "value": "where is the young man looking to?\n<image>"}, {"from": "gpt", "value": "the young man is looking at the girl"}] |
00000009959.jpg | 9959_evjvqa | [{"from": "human", "value": "what is the girl in orange shirt doing?\n<image>"}, {"from": "gpt", "value": "the girl in orange shirt is using a cellphone"}] |
00000009961.jpg | 9961_evjvqa | [{"from": "human", "value": "who are on the left of the man wearing helmet?\n<image>"}, {"from": "gpt", "value": "on the left is one man wearing black face mask"}] |
00000009962.jpg | 9962_evjvqa | [{"from": "human", "value": "what is the woman in ao dai doing?\n<image>"}, {"from": "gpt", "value": "the woman is introducing the products to the two other people"}] |
00000009963.jpg | 9963_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a supermarket"}] |
00000009965.jpg | 9965_evjvqa | [{"from": "human", "value": "how many people are standing on the boat?\n<image>"}, {"from": "gpt", "value": "there is one person standing on the boat"}] |
00000009969.jpg | 9969_evjvqa | [{"from": "human", "value": "how many people are wearing red ao dai?\n<image>"}, {"from": "gpt", "value": "there is one person wearing red ao dai"}] |
00000009970.jpg | 9970_evjvqa | [{"from": "human", "value": "how many motorbikes are parked in front of the grocery store?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes"}] |
00000009972.jpg | 9972_evjvqa | [{"from": "human", "value": "how many cars are in front of the hair salon?\n<image>"}, {"from": "gpt", "value": "there is one car in front of the hair salon"}] |
00000009974.jpg | 9974_evjvqa | [{"from": "human", "value": "what is the smiling man selling?\n<image>"}, {"from": "gpt", "value": "the man is selling vegetable"}] |
00000009975.jpg | 9975_evjvqa | [{"from": "human", "value": "how many people are sitting in the stall?\n<image>"}, {"from": "gpt", "value": "there is one person sitting in the stall"}] |
00000009976.jpg | 9976_evjvqa | [{"from": "human", "value": "what is the woman wearing?\n<image>"}, {"from": "gpt", "value": "the woman is wearing pink south vietnamese pajama"}] |
00000009977.jpg | 9977_evjvqa | [{"from": "human", "value": "what is this stall selling?\n<image>"}, {"from": "gpt", "value": "the stall is selling souvenirs"}] |
00000009978.jpg | 9978_evjvqa | [{"from": "human", "value": "what are the group of performers wearing?\n<image>"}, {"from": "gpt", "value": "the group of performers are wearing red shirt and white pants"}] |
00000009979.jpg | 9979_evjvqa | [{"from": "human", "value": "how many staffs are standing at the cake introducing stall?\n<image>"}, {"from": "gpt", "value": "there is one staff"}] |
00000009981.jpg | 9981_evjvqa | [{"from": "human", "value": "how many people are walking pass the store?\n<image>"}, {"from": "gpt", "value": "there is one person walking pass the store"}] |
00000009986.jpg | 9986_evjvqa | [{"from": "human", "value": "how many cars are running in the street?\n<image>"}, {"from": "gpt", "value": "there is one car running in the street"}] |
00000009987.jpg | 9987_evjvqa | [{"from": "human", "value": "what color is the motorbike next to the jackfruit truck?\n<image>"}, {"from": "gpt", "value": "the motorbike is white"}] |
00000009989.jpg | 9989_evjvqa | [{"from": "human", "value": "how many ships are there?\n<image>"}, {"from": "gpt", "value": "there is one ship"}] |
00000009991.jpg | 9991_evjvqa | [{"from": "human", "value": "how many people are sitting opposite the girl in black shirt?\n<image>"}, {"from": "gpt", "value": "there are two people sitting opposite the girl"}] |
00000009995.jpg | 9995_evjvqa | [{"from": "human", "value": "how many chairs are around the table?\n<image>"}, {"from": "gpt", "value": "there are four chairs"}] |
00000009996.jpg | 9996_evjvqa | [{"from": "human", "value": "how many bowls are there?\n<image>"}, {"from": "gpt", "value": "there are two bowls"}] |
00000009997.jpg | 9997_evjvqa | [{"from": "human", "value": "what is the man in black shirt doing?\n<image>"}, {"from": "gpt", "value": "the man in black shirt is pushing a motorbike"}] |
00000009999.jpg | 9999_evjvqa | [{"from": "human", "value": "what color is the basket of the supermarket?\n<image>"}, {"from": "gpt", "value": "the basket is red"}] |
00000006400.jpg | 6400_evjvqa | [{"from": "human", "value": "what is the girl driving the blue vehicle doing?\n<image>"}, {"from": "gpt", "value": "she is buying goods. "}] |
00000006400.jpg | 6400_evjvqa | [{"from": "human", "value": "what color is the coat that the male vendor is wearing?\n<image>"}, {"from": "gpt", "value": "it is grey."}] |
00000006400.jpg | 6400_evjvqa | [{"from": "human", "value": "what color is the helmet that the girl driving the blue vehicle is wearing?\n<image>"}, {"from": "gpt", "value": "it is black."}] |
00000006402.jpg | 6402_evjvqa | [{"from": "human", "value": "what is he doing?\n<image>"}, {"from": "gpt", "value": "he is making a speech."}] |
00000006406.jpg | 6406_evjvqa | [{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is sitting drinking coffee and using her phone."}] |
00000006406.jpg | 6406_evjvqa | [{"from": "human", "value": "what color is her shirt?\n<image>"}, {"from": "gpt", "value": "the shirt that she is wearing is black."}] |
00000006406.jpg | 6406_evjvqa | [{"from": "human", "value": "what is she looking at?\n<image>"}, {"from": "gpt", "value": "she is looking at the scene from the balcony. "}] |
00000006408.jpg | 6408_evjvqa | [{"from": "human", "value": "what color are her shoes?\n<image>"}, {"from": "gpt", "value": "they are wine red."}] |
00000006408.jpg | 6408_evjvqa | [{"from": "human", "value": "what color are her pants?\n<image>"}, {"from": "gpt", "value": "they are brown."}] |
00000006411.jpg | 6411_evjvqa | [{"from": "human", "value": "what is the man in the green shirt doing?\n<image>"}, {"from": "gpt", "value": "the man in the green shirt is working as a tourist guide. "}] |
00000006411.jpg | 6411_evjvqa | [{"from": "human", "value": "how many women are wearing red?\n<image>"}, {"from": "gpt", "value": "there is one woman wearing red. "}] |
00000006411.jpg | 6411_evjvqa | [{"from": "human", "value": "what color is the man holding the camera's jacket?\n<image>"}, {"from": "gpt", "value": "his jacket is green."}] |
00000006416.jpg | 6416_evjvqa | [{"from": "human", "value": "how many navy officers are wearing life jackets?\n<image>"}, {"from": "gpt", "value": "there is one navy officer wearing the life jacket."}] |
00000006416.jpg | 6416_evjvqa | [{"from": "human", "value": "what is the officer wearing the life jacket doing?\n<image>"}, {"from": "gpt", "value": "he is pointing at the sea. "}] |
00000006416.jpg | 6416_evjvqa | [{"from": "human", "value": "what color are the binoculars that the navy officer is holding?\n<image>"}, {"from": "gpt", "value": "they are black."}] |
00000006418.jpg | 6418_evjvqa | [{"from": "human", "value": "what are the two men doing?\n<image>"}, {"from": "gpt", "value": "they are fixing the billboard."}] |
00000006418.jpg | 6418_evjvqa | [{"from": "human", "value": "what color is the man standing' shirt?\n<image>"}, {"from": "gpt", "value": "it is blue."}] |
00000006418.jpg | 6418_evjvqa | [{"from": "human", "value": "what color are the man sitting's pants?\n<image>"}, {"from": "gpt", "value": "they are black. "}] |
00000006419.jpg | 6419_evjvqa | [{"from": "human", "value": "what color is the roof of the market?\n<image>"}, {"from": "gpt", "value": "it is orange. "}] |
00000006419.jpg | 6419_evjvqa | [{"from": "human", "value": "what is the dominant color of the market?\n<image>"}, {"from": "gpt", "value": "it is yellow."}] |
00000006425.jpg | 6425_evjvqa | [{"from": "human", "value": "what are they doing?\n<image>"}, {"from": "gpt", "value": "they are going on an excursion."}] |
00000006425.jpg | 6425_evjvqa | [{"from": "human", "value": "what color is his t-shirt?\n<image>"}, {"from": "gpt", "value": "it is white."}] |
00000006425.jpg | 6425_evjvqa | [{"from": "human", "value": "how many men are wearing white?\n<image>"}, {"from": "gpt", "value": "there is one man wearing white. "}] |
00000006426.jpg | 6426_evjvqa | [{"from": "human", "value": "what is the woman sitting with her back against the wall doing?\n<image>"}, {"from": "gpt", "value": "the woman sitting with her back against the wall is reading the newspaper."}] |
00000006426.jpg | 6426_evjvqa | [{"from": "human", "value": "how many people are ferrying flowers by bicycle?\n<image>"}, {"from": "gpt", "value": "there is one person ferrying flowers by bicycle."}] |
00000006426.jpg | 6426_evjvqa | [{"from": "human", "value": "how many bicycles are carrying flowers?\n<image>"}, {"from": "gpt", "value": "there are two bicycles carrying flowers."}] |
00000006429.jpg | 6429_evjvqa | [{"from": "human", "value": "what color is her dress?\n<image>"}, {"from": "gpt", "value": "it is purple."}] |
00000006429.jpg | 6429_evjvqa | [{"from": "human", "value": "what color are the sneakers that she is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a pair of white sneakers."}] |
00000006429.jpg | 6429_evjvqa | [{"from": "human", "value": "what color are her sneakers?\n<image>"}, {"from": "gpt", "value": "they are white."}] |
00000006433.jpg | 6433_evjvqa | [{"from": "human", "value": "what are the two women doing??\n<image>"}, {"from": "gpt", "value": "they are trading vegetables."}] |
00000006433.jpg | 6433_evjvqa | [{"from": "human", "value": "why does the woman in the pink jacket meet the woman in the flower top? \n<image>"}, {"from": "gpt", "value": "to buy vegetables."}] |
00000006433.jpg | 6433_evjvqa | [{"from": "human", "value": "what color is the man in the white shirt's mask?\n<image>"}, {"from": "gpt", "value": "it is blue. "}] |
00000006437.jpg | 6437_evjvqa | [{"from": "human", "value": "what are they doing?\n<image>"}, {"from": "gpt", "value": "they are lighting incense. "}] |
00000006437.jpg | 6437_evjvqa | [{"from": "human", "value": "how many girls are wearing pink?\n<image>"}, {"from": "gpt", "value": "there is one girl wearing pink."}] |
00000006437.jpg | 6437_evjvqa | [{"from": "human", "value": "how many people are wearing blue?\n<image>"}, {"from": "gpt", "value": "there is one person wearing blue. "}] |
00000006444.jpg | 6444_evjvqa | [{"from": "human", "value": "what color is the man driving the motorbike's outfit?\n<image>"}, {"from": "gpt", "value": "the man driving the motorbike's outfit is blue. "}] |
00000006444.jpg | 6444_evjvqa | [{"from": "human", "value": "how many men are driving motorbikes?\n<image>"}, {"from": "gpt", "value": "there is one man driving the motorbike. "}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.