image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000000059.jpg
59_evjvqa
[{"from": "human", "value": "what is the masked man in the left corner doing?\n<image>"}, {"from": "gpt", "value": "choosing pitaya"}]
00000000059.jpg
59_evjvqa
[{"from": "human", "value": "what does the stand behind the bread basket show?\n<image>"}, {"from": "gpt", "value": "pitaya"}]
00000000062.jpg
62_evjvqa
[{"from": "human", "value": "how many people are sitting on the sidewalks?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000062.jpg
62_evjvqa
[{"from": "human", "value": "what is the man sitting on the sidewalks doing?\n<image>"}, {"from": "gpt", "value": "surfing the phone"}]
00000000062.jpg
62_evjvqa
[{"from": "human", "value": "how does the girl wearing non la dress?\n<image>"}, {"from": "gpt", "value": "white ao dai, hands holding a black bag"}]
00000000062.jpg
62_evjvqa
[{"from": "human", "value": "what things is the girl wearing a white ao dai holding?\n<image>"}, {"from": "gpt", "value": "a black bag"}]
00000000068.jpg
68_evjvqa
[{"from": "human", "value": "what does this area exhibit?\n<image>"}, {"from": "gpt", "value": "a skeleton of whale"}]
00000000068.jpg
68_evjvqa
[{"from": "human", "value": "where are this skeleton of whale exhibited?\n<image>"}, {"from": "gpt", "value": "in an exhibition room of a museum"}]
00000000069.jpg
69_evjvqa
[{"from": "human", "value": "which products does the discount price tag show the discount price for?\n<image>"}, {"from": "gpt", "value": "rice"}]
00000000069.jpg
69_evjvqa
[{"from": "human", "value": "what is the main product that this store shelf show?\n<image>"}, {"from": "gpt", "value": "rice"}]
00000000071.jpg
71_evjvqa
[{"from": "human", "value": "what is the color of the life jackets of those travelling on boats?\n<image>"}, {"from": "gpt", "value": "orange"}]
00000000071.jpg
71_evjvqa
[{"from": "human", "value": "how many people are sitting on the blue boat?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000071.jpg
71_evjvqa
[{"from": "human", "value": "how many people are sitting on the green boat?\n<image>"}, {"from": "gpt", "value": "six people"}]
00000000071.jpg
71_evjvqa
[{"from": "human", "value": "what color does the green boat's rower wear?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000071.jpg
71_evjvqa
[{"from": "human", "value": "what color does the blue boat's rower wear?\n<image>"}, {"from": "gpt", "value": "red-brown green"}]
00000000071.jpg
71_evjvqa
[{"from": "human", "value": "how many boats are carrying tourists?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000075.jpg
75_evjvqa
[{"from": "human", "value": "what is the guy holding a peach branch doing?\n<image>"}, {"from": "gpt", "value": "telephoning"}]
00000000075.jpg
75_evjvqa
[{"from": "human", "value": "what is the shirt's color of the guy holding a peach branch and telephoning?\n<image>"}, {"from": "gpt", "value": "black"}]
00000000077.jpg
77_evjvqa
[{"from": "human", "value": "what products are being exhibited at the stalls in the picture?\n<image>"}, {"from": "gpt", "value": "bracelet, jewelleries"}]
00000000077.jpg
77_evjvqa
[{"from": "human", "value": "where is the woman standing next to the pink girl putting hand on?\n<image>"}, {"from": "gpt", "value": "the stomach of the girl wearing a black shirt"}]
00000000077.jpg
77_evjvqa
[{"from": "human", "value": "what is the woman wearing a red jacket in the corner doing?\n<image>"}, {"from": "gpt", "value": "putting hands up to choose jewelleries"}]
00000000077.jpg
77_evjvqa
[{"from": "human", "value": "how many people are choosing jewelleries next to the three masked women?\n<image>"}, {"from": "gpt", "value": "three people, two unmasked and one in a red jacket"}]
00000000077.jpg
77_evjvqa
[{"from": "human", "value": "is the jewellery stall's owner male or female?\n<image>"}, {"from": "gpt", "value": "male"}]
00000000077.jpg
77_evjvqa
[{"from": "human", "value": "what color does the jewellery stall's owner wear? \n<image>"}, {"from": "gpt", "value": "brown red"}]
00000000079.jpg
79_evjvqa
[{"from": "human", "value": "where is the clothes stand?\n<image>"}, {"from": "gpt", "value": "at the front of the economy and irrigation management institution"}]
00000000079.jpg
79_evjvqa
[{"from": "human", "value": "how many people are sitting on chairs?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000079.jpg
79_evjvqa
[{"from": "human", "value": "what are the two women in the left corner doing?\n<image>"}, {"from": "gpt", "value": "eating"}]
00000000079.jpg
79_evjvqa
[{"from": "human", "value": "how many people are choosing clothes?\n<image>"}, {"from": "gpt", "value": "five people"}]
00000000079.jpg
79_evjvqa
[{"from": "human", "value": "how many motor scooters are around the seating man?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000081.jpg
81_evjvqa
[{"from": "human", "value": "what is the woman in brown holding?\n<image>"}, {"from": "gpt", "value": "a peach branch, a coconut water bag and two stalks of sugarcane"}]
00000000081.jpg
81_evjvqa
[{"from": "human", "value": "how many branches of peach is the woman in brown holding?\n<image>"}, {"from": "gpt", "value": "the woman is holding a branch of peach"}]
00000000081.jpg
81_evjvqa
[{"from": "human", "value": "how many stalks of sugarcane is the woman in brown holding?\n<image>"}, {"from": "gpt", "value": "two"}]
00000000081.jpg
81_evjvqa
[{"from": "human", "value": "what is in the bag which the woman in brown are holding?\n<image>"}, {"from": "gpt", "value": "a coconut"}]
00000000083.jpg
83_evjvqa
[{"from": "human", "value": "where is these houses built?\n<image>"}, {"from": "gpt", "value": "alongside a river"}]
00000000089.jpg
89_evjvqa
[{"from": "human", "value": "how many bottles of water are put on the barricade?\n<image>"}, {"from": "gpt", "value": "two bottles"}]
00000000089.jpg
89_evjvqa
[{"from": "human", "value": "where is the barricade built?\n<image>"}, {"from": "gpt", "value": "on a bridge"}]
00000000089.jpg
89_evjvqa
[{"from": "human", "value": "how many landing motor scooters are on the bridge?\n<image>"}, {"from": "gpt", "value": "there are two motor scooters landing on the bridge"}]
00000000090.jpg
90_evjvqa
[{"from": "human", "value": "what color is the name of the place painted?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000091.jpg
91_evjvqa
[{"from": "human", "value": "where is the cyclo landing?\n<image>"}, {"from": "gpt", "value": "on the sidewalks, outside of a house"}]
00000000091.jpg
91_evjvqa
[{"from": "human", "value": "how many cyclos are there landing outside?\n<image>"}, {"from": "gpt", "value": "one cyclo"}]
00000000092.jpg
92_evjvqa
[{"from": "human", "value": "what is the girl in a red jacket doing?\n<image>"}, {"from": "gpt", "value": "surfing the phone"}]
00000000092.jpg
92_evjvqa
[{"from": "human", "value": "where is this scenery recorded?\n<image>"}, {"from": "gpt", "value": "at a billing area of a supermarket"}]
00000000094.jpg
94_evjvqa
[{"from": "human", "value": "by which painting topic is the wall at the side of the house decorated?\n<image>"}, {"from": "gpt", "value": "ocean topic"}]
00000000094.jpg
94_evjvqa
[{"from": "human", "value": "what scenery does the painting on the house wall depict?\n<image>"}, {"from": "gpt", "value": "the undersea scenery"}]
00000000094.jpg
94_evjvqa
[{"from": "human", "value": "where is the things in the painting on the wall?\n<image>"}, {"from": "gpt", "value": "undersea"}]
00000000094.jpg
94_evjvqa
[{"from": "human", "value": "what are the main creatures painted in the drawing on the wall?\n<image>"}, {"from": "gpt", "value": "sea creature"}]
00000000096.jpg
96_evjvqa
[{"from": "human", "value": "how many people are painting the paintings?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000096.jpg
96_evjvqa
[{"from": "human", "value": "how many craftsmen are wearing white hat?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000096.jpg
96_evjvqa
[{"from": "human", "value": "what are the two men doing?\n<image>"}, {"from": "gpt", "value": "painting"}]
00000000098.jpg
98_evjvqa
[{"from": "human", "value": "what is the blue-shirt man wearing a white mask doing?\n<image>"}, {"from": "gpt", "value": "receiving a gift from a soldier"}]
00000000098.jpg
98_evjvqa
[{"from": "human", "value": "what is the soldier doing?\n<image>"}, {"from": "gpt", "value": "giving a gift to the man in a blue shirt"}]
00000000099.jpg
99_evjvqa
[{"from": "human", "value": "what is the hat's color of the booth owner?\n<image>"}, {"from": "gpt", "value": "white"}]
00000000099.jpg
99_evjvqa
[{"from": "human", "value": "what hat does the customer wear at the booth?\n<image>"}, {"from": "gpt", "value": "a pink helmet"}]
00000000099.jpg
99_evjvqa
[{"from": "human", "value": "what product does the woman wearing a pink helmet intend to buy?\n<image>"}, {"from": "gpt", "value": "a dishwashing liquid"}]
00000000099.jpg
99_evjvqa
[{"from": "human", "value": "what products does the booth of the woman wearing a green jacket sell?\n<image>"}, {"from": "gpt", "value": "dishwashing liquid"}]
00000000903.jpg
903_evjvqa
[{"from": "human", "value": "what is the female employee stooping for ?\n<image>"}, {"from": "gpt", "value": "to put products on the shelves"}]
00000000903.jpg
903_evjvqa
[{"from": "human", "value": "what kind of product is displayed to sell on the shelf where the employee is putting products on?\n<image>"}, {"from": "gpt", "value": "cakes"}]
00000000903.jpg
903_evjvqa
[{"from": "human", "value": "what does the girl putting products on the shelf wear?\n<image>"}, {"from": "gpt", "value": "blue shirt and black pants"}]
00000000910.jpg
910_evjvqa
[{"from": "human", "value": "what event are the people here visiting?\n<image>"}, {"from": "gpt", "value": "book fair"}]
00000000910.jpg
910_evjvqa
[{"from": "human", "value": "what does this row of shelves mainly contain?\n<image>"}, {"from": "gpt", "value": "books and comics"}]
00000000910.jpg
910_evjvqa
[{"from": "human", "value": "what is the boy in the black and orange striped shirt doing at the book fair?\n<image>"}, {"from": "gpt", "value": "reading comics"}]
00000000911.jpg
911_evjvqa
[{"from": "human", "value": "which side of the saleswoman is the man in blue shirt standing?\n<image>"}, {"from": "gpt", "value": "opposite"}]
00000000911.jpg
911_evjvqa
[{"from": "human", "value": "what is the woman in white shirt standing there to do?\n<image>"}, {"from": "gpt", "value": "to consult the customer to sell products"}]
00000000911.jpg
911_evjvqa
[{"from": "human", "value": "who is the employee serving the customer in blue shirt?\n<image>"}, {"from": "gpt", "value": "the girl in white shirt"}]
00000000915.jpg
915_evjvqa
[{"from": "human", "value": "how many people are sitting and drinking coffee at the coffee club?\n<image>"}, {"from": "gpt", "value": "four people"}]
00000000915.jpg
915_evjvqa
[{"from": "human", "value": "what is the position of the coffee tables and chairs here?\n<image>"}, {"from": "gpt", "value": "along the windows, near the handrail"}]
00000000915.jpg
915_evjvqa
[{"from": "human", "value": "who is chatting with the woman in the cafe?\n<image>"}, {"from": "gpt", "value": "the man wearing gray shirt"}]
00000000915.jpg
915_evjvqa
[{"from": "human", "value": "what is the young man in white shirt holding in his hand?\n<image>"}, {"from": "gpt", "value": "mobile phone"}]
00000000918.jpg
918_evjvqa
[{"from": "human", "value": "what kind of vehicle is the man in yellow shirt riding?\n<image>"}, {"from": "gpt", "value": "bicycle"}]
00000000918.jpg
918_evjvqa
[{"from": "human", "value": "what is the man in yellow shirt going through?\n<image>"}, {"from": "gpt", "value": "temple gate"}]
00000000918.jpg
918_evjvqa
[{"from": "human", "value": "what does the man cycling through gate wear?\n<image>"}, {"from": "gpt", "value": "yellow t-shirt and gray trousers"}]
00000000924.jpg
924_evjvqa
[{"from": "human", "value": "what are these two young men wearing on their chests?\n<image>"}, {"from": "gpt", "value": "aprons"}]
00000000924.jpg
924_evjvqa
[{"from": "human", "value": "what are these two men holding on their left hands?\n<image>"}, {"from": "gpt", "value": "scanner"}]
00000000924.jpg
924_evjvqa
[{"from": "human", "value": "what are these two employees doing in the supermarket?\n<image>"}, {"from": "gpt", "value": "scanning products codes"}]
00000000925.jpg
925_evjvqa
[{"from": "human", "value": "what is the woman on gray shirt doing at the school gate?\n<image>"}, {"from": "gpt", "value": "measuring body temperature"}]
00000000925.jpg
925_evjvqa
[{"from": "human", "value": "what is the man being done by the woman in gray shirt?\n<image>"}, {"from": "gpt", "value": "measured body temperature"}]
00000000925.jpg
925_evjvqa
[{"from": "human", "value": "where does the body temperature measurement by the woman in gray shirt take place?\n<image>"}, {"from": "gpt", "value": "at the historical site gate"}]
00000000925.jpg
925_evjvqa
[{"from": "human", "value": "which side of the gate lies the rules board of visiting the historical site?\n<image>"}, {"from": "gpt", "value": "right side"}]
00000000927.jpg
927_evjvqa
[{"from": "human", "value": "where is the little girl standing?\n<image>"}, {"from": "gpt", "value": "in the supermarket trolley"}]
00000000927.jpg
927_evjvqa
[{"from": "human", "value": "what is the woman in red camisole doing to the customers?\n<image>"}, {"from": "gpt", "value": "promoting products"}]
00000000927.jpg
927_evjvqa
[{"from": "human", "value": "what is the woman in pink dress wearing behind her back?\n<image>"}, {"from": "gpt", "value": "handbag"}]
00000000927.jpg
927_evjvqa
[{"from": "human", "value": "by whom is the little girl taken out to go shopping?\n<image>"}, {"from": "gpt", "value": "the man in white shirt and the woman in pink dress"}]
00000000931.jpg
931_evjvqa
[{"from": "human", "value": "how many people are sitting on the pavement?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000931.jpg
931_evjvqa
[{"from": "human", "value": "where are the motorbikes parked?\n<image>"}, {"from": "gpt", "value": "on the pavement"}]
00000000937.jpg
937_evjvqa
[{"from": "human", "value": "what is the young man in restaurant uniform next to the one in blue shirt doing?\n<image>"}, {"from": "gpt", "value": "making noodles"}]
00000000937.jpg
937_evjvqa
[{"from": "human", "value": "what is the young man in blue shirt looking at?\n<image>"}, {"from": "gpt", "value": "the young man making noodles"}]
00000000937.jpg
937_evjvqa
[{"from": "human", "value": "what kind of ingredient is the person in white shirt with blue mask making? \n<image>"}, {"from": "gpt", "value": "noodles"}]
00000000938.jpg
938_evjvqa
[{"from": "human", "value": "what color are the outfits that this couple is wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
00000000938.jpg
938_evjvqa
[{"from": "human", "value": "what is the young man doing to the woman?\n<image>"}, {"from": "gpt", "value": "kissing cheek"}]
00000000938.jpg
938_evjvqa
[{"from": "human", "value": "how does this couple pose for a photo?\n<image>"}, {"from": "gpt", "value": "the man kissing the woman's cheek"}]
00000000945.jpg
945_evjvqa
[{"from": "human", "value": "what kind of fruits are these women selling?\n<image>"}, {"from": "gpt", "value": "banana"}]
00000000945.jpg
945_evjvqa
[{"from": "human", "value": "what are these women arranging on the tarpaulins right in front of them?\n<image>"}, {"from": "gpt", "value": "bunches of bananas"}]
00000000945.jpg
945_evjvqa
[{"from": "human", "value": "what kind of hats do almost all the women in this market wear?\n<image>"}, {"from": "gpt", "value": "conical hats"}]
00000000953.jpg
953_evjvqa
[{"from": "human", "value": "how many upper floors does the building where the boys are sitting have?\n<image>"}, {"from": "gpt", "value": "three"}]
00000000953.jpg
953_evjvqa
[{"from": "human", "value": "where are the children sitting?\n<image>"}, {"from": "gpt", "value": "the staircase under a five-storey building"}]
00000000953.jpg
953_evjvqa
[{"from": "human", "value": "what vehicle is the person wearing white helmet intending to ride?\n<image>"}, {"from": "gpt", "value": "motorbike"}]
00000000955.jpg
955_evjvqa
[{"from": "human", "value": "what is the woman wearing blue face mask doing in the market? \n<image>"}, {"from": "gpt", "value": "buying flowers"}]
00000000955.jpg
955_evjvqa
[{"from": "human", "value": "what does the woman in gray outfit come to the market to do?\n<image>"}, {"from": "gpt", "value": "buy flowers"}]
00000000955.jpg
955_evjvqa
[{"from": "human", "value": "what does the hand of the woman wearing blue face mask and gray outfit hold?\n<image>"}, {"from": "gpt", "value": "a bunch of flowers"}]
00000000955.jpg
955_evjvqa
[{"from": "human", "value": "what does the woman holding a bunch flower wear?\n<image>"}, {"from": "gpt", "value": "gray shirt and trousers, black shoes"}]