image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000009709.jpg
9709_evjvqa
[{"from": "human", "value": "who is wearing the backpack on the chest?\n<image>"}, {"from": "gpt", "value": "the girl in white shirt"}]
00000009713.jpg
9713_evjvqa
[{"from": "human", "value": "what is this staff doing?\n<image>"}, {"from": "gpt", "value": "she is arranging the agricultural products on the shelf"}]
00000009714.jpg
9714_evjvqa
[{"from": "human", "value": "what is the woman in black shirt doing?\n<image>"}, {"from": "gpt", "value": "she is shopping"}]
00000009715.jpg
9715_evjvqa
[{"from": "human", "value": "what is this man doing in front of the cabinet?\n<image>"}, {"from": "gpt", "value": "he is sticking the announcement paper on the cabinet"}]
00000009716.jpg
9716_evjvqa
[{"from": "human", "value": "how many bird cages are hanged on the the porch?\n<image>"}, {"from": "gpt", "value": "there are four bird cages are hanged on the porch"}]
00000009717.jpg
9717_evjvqa
[{"from": "human", "value": "what animal is standing next to the motorcycle?\n<image>"}, {"from": "gpt", "value": "the dog is standing next to the motorcycle"}]
00000009718.jpg
9718_evjvqa
[{"from": "human", "value": "what is the woman in red shirt holding?\n<image>"}, {"from": "gpt", "value": "she is holding a bouquet"}]
00000009721.jpg
9721_evjvqa
[{"from": "human", "value": "what animal is walking next to the cow?\n<image>"}, {"from": "gpt", "value": "walking next to the cow is a calf"}]
00000009722.jpg
9722_evjvqa
[{"from": "human", "value": "what is the young man in black shirt holding in his hand?\n<image>"}, {"from": "gpt", "value": "the young man in black shirt is holding the food picker in his hand"}]
00000009724.jpg
9724_evjvqa
[{"from": "human", "value": "what is this activity?\n<image>"}, {"from": "gpt", "value": "having a floating market on the river"}]
00000009726.jpg
9726_evjvqa
[{"from": "human", "value": "what color shirt is the person riding a bicycle wearing?\n<image>"}, {"from": "gpt", "value": "the person riding a bicycle is wearing a red shirt"}]
00000009728.jpg
9728_evjvqa
[{"from": "human", "value": "how many white patio umbrellas are near the pond?\n<image>"}, {"from": "gpt", "value": "there are two white patio umbrellas"}]
00000009729.jpg
9729_evjvqa
[{"from": "human", "value": "where are the two girls sitting?\n<image>"}, {"from": "gpt", "value": "the two girls are sitting on a bench under the tree"}]
00000009736.jpg
9736_evjvqa
[{"from": "human", "value": "where is this couple sitting?\n<image>"}, {"from": "gpt", "value": "they are sitting on the steps in front of the temple gate"}]
00000009739.jpg
9739_evjvqa
[{"from": "human", "value": "which line are the people in orange shirts standing in?\n<image>"}, {"from": "gpt", "value": "the people in orange shirts are standing in the line on the right"}]
00000009740.jpg
9740_evjvqa
[{"from": "human", "value": "what is the girl under the sun wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a black dress"}]
00000009742.jpg
9742_evjvqa
[{"from": "human", "value": "who is wearing different color shirt from everyone else?\n<image>"}, {"from": "gpt", "value": "the youth in white shirt"}]
00000009746.jpg
9746_evjvqa
[{"from": "human", "value": "by what mean of transport is the woman in purple shirt moving in the market?\n<image>"}, {"from": "gpt", "value": "she is walking"}]
00000009747.jpg
9747_evjvqa
[{"from": "human", "value": "which side of the power pole is the bicycle located?\n<image>"}, {"from": "gpt", "value": "the bicycle is located on the left of the power pole"}]
00000009750.jpg
9750_evjvqa
[{"from": "human", "value": "who is this girl standing with?\n<image>"}, {"from": "gpt", "value": "she is standing alone"}]
00000009751.jpg
9751_evjvqa
[{"from": "human", "value": "what color present box is the girl in red dress sitting on?\n<image>"}, {"from": "gpt", "value": "she is sitting on a cyan present box"}]
00000009752.jpg
9752_evjvqa
[{"from": "human", "value": "what tool is the woman wearing red clothes holding?\n<image>"}, {"from": "gpt", "value": "she is holding a shovel"}]
00000009753.jpg
9753_evjvqa
[{"from": "human", "value": "how is the weather in this area?\n<image>"}, {"from": "gpt", "value": "it is sunny in this area"}]
00000009754.jpg
9754_evjvqa
[{"from": "human", "value": "what does the traffic sign on the roadside prohibit?\n<image>"}, {"from": "gpt", "value": "prohibiting cars"}]
00000009758.jpg
9758_evjvqa
[{"from": "human", "value": "which car is not white as the other cars?\n<image>"}, {"from": "gpt", "value": "the yellow car on the left"}]
00000009759.jpg
9759_evjvqa
[{"from": "human", "value": "what is this woman wearing on her face?\n<image>"}, {"from": "gpt", "value": "she is wearing sunglasses"}]
00000009760.jpg
9760_evjvqa
[{"from": "human", "value": "what is the two women in orange coats standing for?\n<image>"}, {"from": "gpt", "value": "they are watching displayed flowers"}]
00000009765.jpg
9765_evjvqa
[{"from": "human", "value": "how is the weather in this pagoda area?\n<image>"}, {"from": "gpt", "value": "it is sunny in this area"}]
00000009767.jpg
9767_evjvqa
[{"from": "human", "value": "what are the two students wearing on their collars?\n<image>"}, {"from": "gpt", "value": "they are wearing scarves"}]
00000009768.jpg
9768_evjvqa
[{"from": "human", "value": "how many people having firearm are guarding?\n<image>"}, {"from": "gpt", "value": "there are two people having firearm are guarding"}]
00000009771.jpg
9771_evjvqa
[{"from": "human", "value": "what dish does the largest pot have?\n<image>"}, {"from": "gpt", "value": "the largest pot has shumai"}]
00000009772.jpg
9772_evjvqa
[{"from": "human", "value": "what color shoes is the girl standing outside the bookstore wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing white shoes"}]
00000009773.jpg
9773_evjvqa
[{"from": "human", "value": "what is the woman holding a bag behind her back doing?\n<image>"}, {"from": "gpt", "value": "she is waiting to buy stuffs"}]
00000009775.jpg
9775_evjvqa
[{"from": "human", "value": "how many women are sitting next to the man in orange shirt?\n<image>"}, {"from": "gpt", "value": "next to the man in orange shirt are two women sitting"}]
00000009776.jpg
9776_evjvqa
[{"from": "human", "value": "what is this supermarket selling?\n<image>"}, {"from": "gpt", "value": "housewares"}]
00000009777.jpg
9777_evjvqa
[{"from": "human", "value": "what is this stall selling?\n<image>"}, {"from": "gpt", "value": "electronics"}]
00000009783.jpg
9783_evjvqa
[{"from": "human", "value": "what color are the plastic chairs at this night market?\n<image>"}, {"from": "gpt", "value": "the plastic chairs are blue"}]
00000009787.jpg
9787_evjvqa
[{"from": "human", "value": "what event are these people attending?\n<image>"}, {"from": "gpt", "value": "they are attending a book fair"}]
00000009791.jpg
9791_evjvqa
[{"from": "human", "value": "who is wearing a white helmet?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a white helmet"}]
00000009792.jpg
9792_evjvqa
[{"from": "human", "value": "how many bikes are parked on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are five bikes parked on the sidewalk"}]
00000009793.jpg
9793_evjvqa
[{"from": "human", "value": "who is wearing torn jeans?\n<image>"}, {"from": "gpt", "value": "the boy standing at the second position from the right"}]
00000009794.jpg
9794_evjvqa
[{"from": "human", "value": "what pet is the topless man walking?\n<image>"}, {"from": "gpt", "value": "that man is walking his dog"}]
00000009799.jpg
9799_evjvqa
[{"from": "human", "value": "what occasion is this store selling decoration items for?\n<image>"}, {"from": "gpt", "value": "lunar new year"}]
00000011404.jpg
11404_evjvqa
[{"from": "human", "value": "how many people are standing near the shrine?\n<image>"}, {"from": "gpt", "value": "there are two people standing near the shrine"}]
00000011405.jpg
11405_evjvqa
[{"from": "human", "value": "what time of day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is evening"}]
00000011406.jpg
11406_evjvqa
[{"from": "human", "value": "what material is the house built of?\n<image>"}, {"from": "gpt", "value": "the house is built of wood"}]
00000011408.jpg
11408_evjvqa
[{"from": "human", "value": "what color is the car in front of the house?\n<image>"}, {"from": "gpt", "value": "the car is white"}]
00000011414.jpg
11414_evjvqa
[{"from": "human", "value": "what time of day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is evening"}]
00000011415.jpg
11415_evjvqa
[{"from": "human", "value": "how many workers are constructing the shop's sign?\n<image>"}, {"from": "gpt", "value": "there is a worker constructing the shop's sign"}]
00000011417.jpg
11417_evjvqa
[{"from": "human", "value": "on which shoulder is the girl carrying the bag?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a handbag on her left shoulder"}]
00000011418.jpg
11418_evjvqa
[{"from": "human", "value": "how many people are sitting next to the street sign?\n<image>"}, {"from": "gpt", "value": "there is a person sitting next to the street sign"}]
00000011421.jpg
11421_evjvqa
[{"from": "human", "value": "what color are the flags?\n<image>"}, {"from": "gpt", "value": "the flags are red"}]
00000011422.jpg
11422_evjvqa
[{"from": "human", "value": "how many air conditioners are there in the room?\n<image>"}, {"from": "gpt", "value": "there is an air conditioner in the room"}]
00000011423.jpg
11423_evjvqa
[{"from": "human", "value": "on which side of the guy is the girl standing?\n<image>"}, {"from": "gpt", "value": "the girl is standing in front of the guy"}]
00000011424.jpg
11424_evjvqa
[{"from": "human", "value": "how many people are there carrying the burden?\n<image>"}, {"from": "gpt", "value": "there is a person who carries the burden"}]
00000011425.jpg
11425_evjvqa
[{"from": "human", "value": "which province does the bridge belong to?\n<image>"}, {"from": "gpt", "value": "the bridge belong to ca mau province"}]
00000011426.jpg
11426_evjvqa
[{"from": "human", "value": "how many bowls of food are there?\n<image>"}, {"from": "gpt", "value": "there are three bowls of food"}]
00000011427.jpg
11427_evjvqa
[{"from": "human", "value": "how many people are wearing helmets?\n<image>"}, {"from": "gpt", "value": "there is a person wearing a helmet"}]
00000011428.jpg
11428_evjvqa
[{"from": "human", "value": "where is the speaker located?\n<image>"}, {"from": "gpt", "value": "the speaker is placed in the corner of the room"}]
00000011429.jpg
11429_evjvqa
[{"from": "human", "value": "what time of the day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is evening"}]
00000011431.jpg
11431_evjvqa
[{"from": "human", "value": "what is the girl on the road doing?\n<image>"}, {"from": "gpt", "value": "the girl is jogging"}]
00000011433.jpg
11433_evjvqa
[{"from": "human", "value": "how many people are standing in front of the fruit shelves?\n<image>"}, {"from": "gpt", "value": "there are two people standing in front of the fruit shelves"}]
00000011435.jpg
11435_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this area is a market"}]
00000011437.jpg
11437_evjvqa
[{"from": "human", "value": "where are people sitting?\n<image>"}, {"from": "gpt", "value": "people are sitting in a hut"}]
00000011444.jpg
11444_evjvqa
[{"from": "human", "value": "which side of the boy is the girl standing on?\n<image>"}, {"from": "gpt", "value": "the girl is standing on the right-hand side of the boy"}]
00000011446.jpg
11446_evjvqa
[{"from": "human", "value": "where is the standing human statue placed?\n<image>"}, {"from": "gpt", "value": "the statue is placed in the center of the room"}]
00000011448.jpg
11448_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a park"}]
00000011451.jpg
11451_evjvqa
[{"from": "human", "value": "how many support soldiers are there in the supermarket?\n<image>"}, {"from": "gpt", "value": "there are two support soldiers in the supermarket"}]
00000011452.jpg
11452_evjvqa
[{"from": "human", "value": "how many trees are there in front of the gate?\n<image>"}, {"from": "gpt", "value": "there are two trees in front of the gate"}]
00000011455.jpg
11455_evjvqa
[{"from": "human", "value": "how many girls are there?\n<image>"}, {"from": "gpt", "value": "there are three girls"}]
00000011456.jpg
11456_evjvqa
[{"from": "human", "value": "how many pillars are there in front of the shrine?\n<image>"}, {"from": "gpt", "value": "there are four pillars in front of the shrine"}]
00000011458.jpg
11458_evjvqa
[{"from": "human", "value": "what time of day is the view?\n<image>"}, {"from": "gpt", "value": "the view is evening"}]
00000011461.jpg
11461_evjvqa
[{"from": "human", "value": "what color is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a white shirt"}]
00000011462.jpg
11462_evjvqa
[{"from": "human", "value": "what is the color of the car in front of the house?\n<image>"}, {"from": "gpt", "value": "the car is red"}]
00000011463.jpg
11463_evjvqa
[{"from": "human", "value": "how many people are standing next to the christmas pine tree?\n<image>"}, {"from": "gpt", "value": "there are three people standing next to the christmas tree"}]
00000011465.jpg
11465_evjvqa
[{"from": "human", "value": "how many pine trees are there in front of the building?\n<image>"}, {"from": "gpt", "value": "there are two pine trees in front of the building"}]
00000011466.jpg
11466_evjvqa
[{"from": "human", "value": "what color is the wallet?\n<image>"}, {"from": "gpt", "value": "the wallet is black"}]
00000011468.jpg
11468_evjvqa
[{"from": "human", "value": "how many people wearing police uniforms are in the supermarket?\n<image>"}, {"from": "gpt", "value": "there is a person wearing a police uniform"}]
00000011469.jpg
11469_evjvqa
[{"from": "human", "value": "how many white plates are there?\n<image>"}, {"from": "gpt", "value": "there are two white plates"}]
00000011473.jpg
11473_evjvqa
[{"from": "human", "value": "what kind of house is it?\n<image>"}, {"from": "gpt", "value": "the house is a house on stilts"}]
00000011475.jpg
11475_evjvqa
[{"from": "human", "value": "how many entrances does the gate have?\n<image>"}, {"from": "gpt", "value": "the gate has three entrances"}]
00000011478.jpg
11478_evjvqa
[{"from": "human", "value": "what are the kids doing?\n<image>"}, {"from": "gpt", "value": "the kids are playing a group game"}]
00000011480.jpg
11480_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing an orange skirt"}]
00000011481.jpg
11481_evjvqa
[{"from": "human", "value": "where is the gravestone located?\n<image>"}, {"from": "gpt", "value": "the gravestone is placed in the center of the yard"}]
00000011482.jpg
11482_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "the girl is sitting at the barrier beside the railway"}]
00000011483.jpg
11483_evjvqa
[{"from": "human", "value": "what color dress is the walking girl in the middle of the street wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a violet dress"}]
00000011488.jpg
11488_evjvqa
[{"from": "human", "value": "what color coat is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing white coat"}]
00000011489.jpg
11489_evjvqa
[{"from": "human", "value": "how many people are standing in the fashion store?\n<image>"}, {"from": "gpt", "value": "there are two people standing in a fashion store"}]
00000011492.jpg
11492_evjvqa
[{"from": "human", "value": "what color is the bag the woman holding?\n<image>"}, {"from": "gpt", "value": "the woman is holding a red bag"}]
00000011493.jpg
11493_evjvqa
[{"from": "human", "value": "what color are the girls standing near the glass cockpit wearing?\n<image>"}, {"from": "gpt", "value": "the girls is wearing white clothes"}]
00000011494.jpg
11494_evjvqa
[{"from": "human", "value": "how many gift boxes are there in front of the two children?\n<image>"}, {"from": "gpt", "value": "there are three gift boxes in front of two children"}]
00000011495.jpg
11495_evjvqa
[{"from": "human", "value": "what color does the flag background have?\n<image>"}, {"from": "gpt", "value": "the flag is red"}]
00000011498.jpg
11498_evjvqa
[{"from": "human", "value": "what color is the boy wearing to sit in the cart?\n<image>"}, {"from": "gpt", "value": "the boy is wearing red shirt"}]
00000011499.jpg
11499_evjvqa
[{"from": "human", "value": "how many billboards are there?\n<image>"}, {"from": "gpt", "value": "there are four billboards"}]
00000011505.jpg
11505_evjvqa
[{"from": "human", "value": "where is the guy standing?\n<image>"}, {"from": "gpt", "value": "in front of a big church"}]
00000011508.jpg
11508_evjvqa
[{"from": "human", "value": "how many taxis go into this alley?\n<image>"}, {"from": "gpt", "value": "a taxi"}]
00000011509.jpg
11509_evjvqa
[{"from": "human", "value": "where is the boy lying?\n<image>"}, {"from": "gpt", "value": "on a canoe in the middle of a lotus lake"}]
00000011510.jpg
11510_evjvqa
[{"from": "human", "value": "what products do these stalls sell?\n<image>"}, {"from": "gpt", "value": "fruits and vegetables of all kinds"}]
00000011511.jpg
11511_evjvqa
[{"from": "human", "value": "where is this lady sitting?\n<image>"}, {"from": "gpt", "value": "in the position facing the sea"}]
00000011512.jpg
11512_evjvqa
[{"from": "human", "value": "what are these groups of people gathering for?\n<image>"}, {"from": "gpt", "value": "these groups of people are having dinner"}]