image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000011514.jpg | 11514_evjvqa | [{"from": "human", "value": "what color do the students wear uniforms?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000011519.jpg | 11519_evjvqa | [{"from": "human", "value": "where is this building built?\n<image>"}, {"from": "gpt", "value": "on the seaside"}] |
00000011520.jpg | 11520_evjvqa | [{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "near a bell"}] |
00000011521.jpg | 11521_evjvqa | [{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "in a food fair"}] |
00000011523.jpg | 11523_evjvqa | [{"from": "human", "value": "where are these huts built?\n<image>"}, {"from": "gpt", "value": "in the terraced fields"}] |
00000011524.jpg | 11524_evjvqa | [{"from": "human", "value": "what commodity is the man with two children choosing?\n<image>"}, {"from": "gpt", "value": "choosing boxes of powdered milk"}] |
00000011530.jpg | 11530_evjvqa | [{"from": "human", "value": "where is the girl in the checkered skirt standing?\n<image>"}, {"from": "gpt", "value": "in an amusement park with a lot of snow"}] |
00000011536.jpg | 11536_evjvqa | [{"from": "human", "value": "what is this house made of?\n<image>"}, {"from": "gpt", "value": "of leaves"}] |
00000011537.jpg | 11537_evjvqa | [{"from": "human", "value": "what does a woman wearing a conical hat do?\n<image>"}, {"from": "gpt", "value": "the woman in conical hat is selling fruit"}] |
00000011541.jpg | 11541_evjvqa | [{"from": "human", "value": "what area is the man in the green shirt standing in?\n<image>"}, {"from": "gpt", "value": "restricted access area"}] |
00000011547.jpg | 11547_evjvqa | [{"from": "human", "value": "what does this stall sell?\n<image>"}, {"from": "gpt", "value": "selling fresh meats"}] |
00000011551.jpg | 11551_evjvqa | [{"from": "human", "value": "what are these books about?\n<image>"}, {"from": "gpt", "value": "these books have content about the sea and islands"}] |
00000011556.jpg | 11556_evjvqa | [{"from": "human", "value": "what goods does this store sell?\n<image>"}, {"from": "gpt", "value": "selling all kinds of clothes"}] |
00000011557.jpg | 11557_evjvqa | [{"from": "human", "value": "who is the female friend standing on the sidewalk with?\n<image>"}, {"from": "gpt", "value": "with a male friend"}] |
00000011559.jpg | 11559_evjvqa | [{"from": "human", "value": "what are these people doing?\n<image>"}, {"from": "gpt", "value": "sitting under a porch selling goods"}] |
00000011560.jpg | 11560_evjvqa | [{"from": "human", "value": "where is the girl friend holding the conical hat sitting?\n<image>"}, {"from": "gpt", "value": "in front of an old castle"}] |
00000011562.jpg | 11562_evjvqa | [{"from": "human", "value": "what stall is the woman standing at?\n<image>"}, {"from": "gpt", "value": "fresh food stall"}] |
00000011564.jpg | 11564_evjvqa | [{"from": "human", "value": "what is the pink woman sitting doing?\n<image>"}, {"from": "gpt", "value": "sitting washing dishes"}] |
00000011567.jpg | 11567_evjvqa | [{"from": "human", "value": "where are the two girls sitting?\n<image>"}, {"from": "gpt", "value": "on the side of a pedestal in a coffee shop"}] |
00000011570.jpg | 11570_evjvqa | [{"from": "human", "value": "what is the girl wearing ao dai holding in her hand?\n<image>"}, {"from": "gpt", "value": "a bouquet"}] |
00000011571.jpg | 11571_evjvqa | [{"from": "human", "value": "what artifacts are displayed in the middle of the exhibition hall?\n<image>"}, {"from": "gpt", "value": "a statue"}] |
00000011574.jpg | 11574_evjvqa | [{"from": "human", "value": "what are these parachutes hung up for?\n<image>"}, {"from": "gpt", "value": "to serve the organization of the shopping fair below"}] |
00000011575.jpg | 11575_evjvqa | [{"from": "human", "value": "how many people are standing talking?\n<image>"}, {"from": "gpt", "value": "there was a woman in red ao dai standing talking"}] |
00000011578.jpg | 11578_evjvqa | [{"from": "human", "value": "where do customers who come to dine here park their vehicles?\n<image>"}, {"from": "gpt", "value": "in front of the restaurant"}] |
00000011579.jpg | 11579_evjvqa | [{"from": "human", "value": "what are the main products these shelves sell?\n<image>"}, {"from": "gpt", "value": "mainly christmas decorations"}] |
00000011581.jpg | 11581_evjvqa | [{"from": "human", "value": "what business do these stores do?\n<image>"}, {"from": "gpt", "value": "business in fashion items"}] |
00000011582.jpg | 11582_evjvqa | [{"from": "human", "value": "where are the phoenix trees placed?\n<image>"}, {"from": "gpt", "value": "on either side of a road"}] |
00000011584.jpg | 11584_evjvqa | [{"from": "human", "value": "where is this group of people standing?\n<image>"}, {"from": "gpt", "value": "at the grocery stall"}] |
00000011585.jpg | 11585_evjvqa | [{"from": "human", "value": "what is the man in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing food"}] |
00000011586.jpg | 11586_evjvqa | [{"from": "human", "value": "what vehicle is the woman traveling by?\n<image>"}, {"from": "gpt", "value": "bikes"}] |
00000011587.jpg | 11587_evjvqa | [{"from": "human", "value": "what products does the shop next to the pub sell?\n<image>"}, {"from": "gpt", "value": "the shop next to the pub is a rice shop"}] |
00000011588.jpg | 11588_evjvqa | [{"from": "human", "value": "what are the three soldiers doing?\n<image>"}, {"from": "gpt", "value": "rearranging the packages"}] |
00000011590.jpg | 11590_evjvqa | [{"from": "human", "value": "what counter is the woman in the red dress standing at?\n<image>"}, {"from": "gpt", "value": "stall selling facial cleanser, cream"}] |
00000011591.jpg | 11591_evjvqa | [{"from": "human", "value": "where is the taxi parked?\n<image>"}, {"from": "gpt", "value": "right in front of the market"}] |
00000011593.jpg | 11593_evjvqa | [{"from": "human", "value": "how many motorbikes are crossing the bridge?\n<image>"}, {"from": "gpt", "value": "there is one running across the bridge"}] |
00000011594.jpg | 11594_evjvqa | [{"from": "human", "value": "what is the man in the blue jacket and the standing woman doing?\n<image>"}, {"from": "gpt", "value": "standing to calculate the price"}] |
00000011596.jpg | 11596_evjvqa | [{"from": "human", "value": "where are these bundles of vegetables placed?\n<image>"}, {"from": "gpt", "value": "on the shelf selling vegetables"}] |
00000011597.jpg | 11597_evjvqa | [{"from": "human", "value": "what are this area decorated?\n<image>"}, {"from": "gpt", "value": "this area is decorated with all kinds of christmas decorations"}] |
00000003106.jpg | 3106_evjvqa | [{"from": "human", "value": "what equipment is used to sail?\n<image>"}, {"from": "gpt", "value": "paddle"}] |
00000003106.jpg | 3106_evjvqa | [{"from": "human", "value": "what kind of vehicles are used to carry tourists here?\n<image>"}, {"from": "gpt", "value": "boats"}] |
00000003107.jpg | 3107_evjvqa | [{"from": "human", "value": "how many toy cars is the baby pushing?\n<image>"}, {"from": "gpt", "value": "the baby is pushing two toy cars"}] |
00000003107.jpg | 3107_evjvqa | [{"from": "human", "value": "what color of clothes does the baby pushing the toy cars wear?\n<image>"}, {"from": "gpt", "value": "the baby wears a pink top and brown pants"}] |
00000003107.jpg | 3107_evjvqa | [{"from": "human", "value": "what is the pink t-shirt baby doing?\n<image>"}, {"from": "gpt", "value": "the pink t-shirt baby is pushing toy cars"}] |
00000003109.jpg | 3109_evjvqa | [{"from": "human", "value": "what flags are hung in the row of houses on this street?\n<image>"}, {"from": "gpt", "value": "red flag with yellow stars"}] |
00000003110.jpg | 3110_evjvqa | [{"from": "human", "value": "what are people gathering for?\n<image>"}, {"from": "gpt", "value": "people are gathering to watch the fire"}] |
00000003110.jpg | 3110_evjvqa | [{"from": "human", "value": "what is a crowd watching?\n<image>"}, {"from": "gpt", "value": "they are watching a house fire"}] |
00000003110.jpg | 3110_evjvqa | [{"from": "human", "value": "is there anyone who are not keeping an eye on the fire?\n<image>"}, {"from": "gpt", "value": "there is a young man in a white shirt who doesn't keep an eye on the fire"}] |
00000003111.jpg | 3111_evjvqa | [{"from": "human", "value": "what is the man standing in the distance using to store purchased items in the supermarket?\n<image>"}, {"from": "gpt", "value": "the man standing in the distance is using a trolley to store purchased items in the supermarket"}] |
00000003111.jpg | 3111_evjvqa | [{"from": "human", "value": "what is the man in the deep blue shirt holding to store purchased items in the supermarket?\n<image>"}, {"from": "gpt", "value": "he is holding a plastic basket"}] |
00000003111.jpg | 3111_evjvqa | [{"from": "human", "value": "what is the man in blue jeans doing?\n<image>"}, {"from": "gpt", "value": "he is shopping in the supermarket"}] |
00000003116.jpg | 3116_evjvqa | [{"from": "human", "value": "how many customers are standing in this clothing store?\n<image>"}, {"from": "gpt", "value": "there are no customers in the store"}] |
00000003116.jpg | 3116_evjvqa | [{"from": "human", "value": "where can the ceiling fan be seen?\n<image>"}, {"from": "gpt", "value": "the ceiling fan is on the ceiling in the shop"}] |
00000003116.jpg | 3116_evjvqa | [{"from": "human", "value": "what color is the sliding door of this shop painted?\n<image>"}, {"from": "gpt", "value": "the sliding door of this shop is painted green"}] |
00000003118.jpg | 3118_evjvqa | [{"from": "human", "value": "what does the woman crossing the street hold in her hand?\n<image>"}, {"from": "gpt", "value": "the woman walking across the street is holding bags"}] |
00000003121.jpg | 3121_evjvqa | [{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is sunny and little cloudy in this area"}] |
00000003121.jpg | 3121_evjvqa | [{"from": "human", "value": "what is decorated on the water?\n<image>"}, {"from": "gpt", "value": "lotus ornaments"}] |
00000003121.jpg | 3121_evjvqa | [{"from": "human", "value": "how many lotus ornaments can be seen on the river?\n<image>"}, {"from": "gpt", "value": "there are two lotus-shaped ornaments on the river"}] |
00000003125.jpg | 3125_evjvqa | [{"from": "human", "value": "what is the man in the black suit doing?\n<image>"}, {"from": "gpt", "value": "the man in black suit is giving a speech in front of the press"}] |
00000003127.jpg | 3127_evjvqa | [{"from": "human", "value": "where can the taxi be seen?\n<image>"}, {"from": "gpt", "value": "the taxi was driving in front of the truck on the road"}] |
00000003127.jpg | 3127_evjvqa | [{"from": "human", "value": "on which side of the taxi is the truck running?\n<image>"}, {"from": "gpt", "value": "the truck is behind the taxi"}] |
00000003127.jpg | 3127_evjvqa | [{"from": "human", "value": "on which side of the truck is the taxi running?\n<image>"}, {"from": "gpt", "value": "in front of the truck"}] |
00000003136.jpg | 3136_evjvqa | [{"from": "human", "value": "what is the man walking like dressed in?\n<image>"}, {"from": "gpt", "value": "he is wearing a blue shirt and black trousers"}] |
00000003136.jpg | 3136_evjvqa | [{"from": "human", "value": "on which side of the man is the woman in the black coat walking?\n<image>"}, {"from": "gpt", "value": "the woman in black coat is walking behind the man"}] |
00000003136.jpg | 3136_evjvqa | [{"from": "human", "value": "on which side of the black suit woman is the man in the blue shirt walking?\n<image>"}, {"from": "gpt", "value": "in front of the woman wearing a black suit"}] |
00000003140.jpg | 3140_evjvqa | [{"from": "human", "value": "how many boats are on the river?\n<image>"}, {"from": "gpt", "value": "there are three boats on the river"}] |
00000003140.jpg | 3140_evjvqa | [{"from": "human", "value": "what vehicles are on the river?\n<image>"}, {"from": "gpt", "value": "there are three moving boats on the river"}] |
00000003140.jpg | 3140_evjvqa | [{"from": "human", "value": "what kind of vehicle is used to travel on the river?\n<image>"}, {"from": "gpt", "value": "boats"}] |
00000003142.jpg | 3142_evjvqa | [{"from": "human", "value": "what does the man wearing the red coat equip himself to go out on the street?\n<image>"}, {"from": "gpt", "value": "he wears a helmet and carries a bag"}] |
00000003144.jpg | 3144_evjvqa | [{"from": "human", "value": "what is the man standing in the middle of the meeting room for?\n<image>"}, {"from": "gpt", "value": "to speak in front of everybody in the room"}] |
00000003145.jpg | 3145_evjvqa | [{"from": "human", "value": "who is the young man taking with him?\n<image>"}, {"from": "gpt", "value": "the young man is carrying a girl wearing a white dress"}] |
00000003145.jpg | 3145_evjvqa | [{"from": "human", "value": "who is the girl in the white dress holding hands with?\n<image>"}, {"from": "gpt", "value": "the girl in white dress is holding hands with young white t-shirt man"}] |
00000003145.jpg | 3145_evjvqa | [{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is sunny here"}] |
00000003147.jpg | 3147_evjvqa | [{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is sunny and cloudy in this area"}] |
00000003147.jpg | 3147_evjvqa | [{"from": "human", "value": "what color is the fabric of the flag hanging on the flagpole?\n<image>"}, {"from": "gpt", "value": "the flag is made of red cloth"}] |
00000003147.jpg | 3147_evjvqa | [{"from": "human", "value": "how many lamp posts are there?\n<image>"}, {"from": "gpt", "value": "there are two lampposts"}] |
00000003153.jpg | 3153_evjvqa | [{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "this man is hugging and kissing a woman"}] |
00000003153.jpg | 3153_evjvqa | [{"from": "human", "value": "what is the woman in white ao dai doing?\n<image>"}, {"from": "gpt", "value": "the woman in white ao dai is kissing the man"}] |
00000003153.jpg | 3153_evjvqa | [{"from": "human", "value": "what is this couple doing to each other?\n<image>"}, {"from": "gpt", "value": "they are kissing"}] |
00000003154.jpg | 3154_evjvqa | [{"from": "human", "value": "what tools are used to scoop crab soup?\n<image>"}, {"from": "gpt", "value": "the spoon"}] |
00000003154.jpg | 3154_evjvqa | [{"from": "human", "value": "what food is in this bowl?\n<image>"}, {"from": "gpt", "value": "this bowl is holding crab soup"}] |
00000003154.jpg | 3154_evjvqa | [{"from": "human", "value": "what tool is the crab soup contained in?\n<image>"}, {"from": "gpt", "value": "the crab soup is served in a white bowl"}] |
00000003155.jpg | 3155_evjvqa | [{"from": "human", "value": "on which side of the man riding a bike is the baby sitting?\n<image>"}, {"from": "gpt", "value": "the baby is sitting ahead"}] |
00000003155.jpg | 3155_evjvqa | [{"from": "human", "value": "what vehicle does the man use to carry the child?\n<image>"}, {"from": "gpt", "value": "the man is carrying the child on a bicycle"}] |
00000003155.jpg | 3155_evjvqa | [{"from": "human", "value": "what color is the baby on the bicycle wearing?\n<image>"}, {"from": "gpt", "value": "the baby on bicycle is wearing red t-shirt"}] |
00000003156.jpg | 3156_evjvqa | [{"from": "human", "value": "what is the man in white shirt doing while riding on a black scooter?\n<image>"}, {"from": "gpt", "value": "he is telephoning while driving"}] |
00000003156.jpg | 3156_evjvqa | [{"from": "human", "value": "what colors is the traffic sign post painted?\n<image>"}, {"from": "gpt", "value": "the traffic sign post painted red and white"}] |
00000003159.jpg | 3159_evjvqa | [{"from": "human", "value": "what is the blonde guy in the black jacket wearing on his back?\n<image>"}, {"from": "gpt", "value": "the person in the black jacket is carrying a black bag on his back"}] |
00000003159.jpg | 3159_evjvqa | [{"from": "human", "value": "where does the woman in the blue coat stand compared to the one who dyed his hair blonde?\n<image>"}, {"from": "gpt", "value": "the woman in the blue coat is standing to the right of the person who dyes his hair blonde"}] |
00000003159.jpg | 3159_evjvqa | [{"from": "human", "value": "what kind of fruit is the stall selling right behind the person with the black handbag?\n<image>"}, {"from": "gpt", "value": "right behind the person with the black bag is a mango stall"}] |
00000003160.jpg | 3160_evjvqa | [{"from": "human", "value": "where is this tour taking place?\n<image>"}, {"from": "gpt", "value": "in a museum room"}] |
00000003160.jpg | 3160_evjvqa | [{"from": "human", "value": "what is the woman in the white shirt doing?\n<image>"}, {"from": "gpt", "value": "she is taking pictures of the artifact"}] |
00000003160.jpg | 3160_evjvqa | [{"from": "human", "value": "who is standing right behind the woman in white shirt taking pictures?\n<image>"}, {"from": "gpt", "value": "standing right behind the woman in white shirt taking pictures is a man in black shirt and white pants"}] |
00000003162.jpg | 3162_evjvqa | [{"from": "human", "value": "where are these people trading?\n<image>"}, {"from": "gpt", "value": "in a market"}] |
00000003163.jpg | 3163_evjvqa | [{"from": "human", "value": "what is the shape of the bag of the woman in black standing next to the man with glasses?\n<image>"}, {"from": "gpt", "value": "the bag is white and black stripes"}] |
00000003165.jpg | 3165_evjvqa | [{"from": "human", "value": "how many cars are parked in front of this building?\n<image>"}, {"from": "gpt", "value": "there are no cars"}] |
00000003165.jpg | 3165_evjvqa | [{"from": "human", "value": "how is the sky in this area?\n<image>"}, {"from": "gpt", "value": "the sky is blue and there are no clouds"}] |
00000003165.jpg | 3165_evjvqa | [{"from": "human", "value": "what color is this building designed to be painted?\n<image>"}, {"from": "gpt", "value": "this building is designed with yellow paint"}] |
00000003167.jpg | 3167_evjvqa | [{"from": "human", "value": "what is this girl sitting on?\n<image>"}, {"from": "gpt", "value": "she is sitting on a chair"}] |
00000003167.jpg | 3167_evjvqa | [{"from": "human", "value": "what kind of building is in the far distance?\n<image>"}, {"from": "gpt", "value": "in the distance is a church"}] |
00000003167.jpg | 3167_evjvqa | [{"from": "human", "value": "what is the girl sitting at the bar wearing?\n<image>"}, {"from": "gpt", "value": "the girl is in white top and black pants"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.