image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000004211.jpg
4211_evjvqa
[{"from": "human", "value": "what items do the stalls on the left side sell?\n<image>"}, {"from": "gpt", "value": "fruits and vegetables"}]
00000004213.jpg
4213_evjvqa
[{"from": "human", "value": "which vehicle is the girl using?\n<image>"}, {"from": "gpt", "value": "the bicycle"}]
00000004216.jpg
4216_evjvqa
[{"from": "human", "value": "how many cars are running in the alley?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000004216.jpg
4216_evjvqa
[{"from": "human", "value": "where is the car running?\n<image>"}, {"from": "gpt", "value": "in the alley"}]
00000004216.jpg
4216_evjvqa
[{"from": "human", "value": "what color are the houses in this alley mainly painted?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000004222.jpg
4222_evjvqa
[{"from": "human", "value": "what is the woman in yellow doing?\n<image>"}, {"from": "gpt", "value": "she is sewing clothes"}]
00000004222.jpg
4222_evjvqa
[{"from": "human", "value": "where is the woman in yellow sewing?\n<image>"}, {"from": "gpt", "value": "in the clothes store"}]
00000004222.jpg
4222_evjvqa
[{"from": "human", "value": "what do the old woman come here for?\n<image>"}, {"from": "gpt", "value": "to wait the woman in yellow sewing clothes"}]
00000004222.jpg
4222_evjvqa
[{"from": "human", "value": "what items is the store sold?\n<image>"}, {"from": "gpt", "value": "clothes"}]
00000004222.jpg
4222_evjvqa
[{"from": "human", "value": "how many people are standing behind the woman sewing?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004224.jpg
4224_evjvqa
[{"from": "human", "value": "what do the stall in the innermost sell?\n<image>"}, {"from": "gpt", "value": "types of milk"}]
00000004225.jpg
4225_evjvqa
[{"from": "human", "value": "what is the girl standing for?\n<image>"}, {"from": "gpt", "value": "to read the newspapers taped on the bulletin board"}]
00000004227.jpg
4227_evjvqa
[{"from": "human", "value": "where are these trees falling?\n<image>"}, {"from": "gpt", "value": "into the house on the street corner"}]
00000004227.jpg
4227_evjvqa
[{"from": "human", "value": "what happened with the house on the corner?\n<image>"}, {"from": "gpt", "value": "it is covered by the falling tree"}]
00000004227.jpg
4227_evjvqa
[{"from": "human", "value": "how many people are standing watching the house being crushed by the tree?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000004230.jpg
4230_evjvqa
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "she is rearranging fruit on the shelf"}]
00000004230.jpg
4230_evjvqa
[{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "at the fruit store in the supermarket"}]
00000004233.jpg
4233_evjvqa
[{"from": "human", "value": "where are these two young people sitting?\n<image>"}, {"from": "gpt", "value": "on the train carriage"}]
00000004233.jpg
4233_evjvqa
[{"from": "human", "value": "where is the man in the white shirt sitting?\n<image>"}, {"from": "gpt", "value": "on a train carriage"}]
00000004233.jpg
4233_evjvqa
[{"from": "human", "value": "who is the girl sitting with?\n<image>"}, {"from": "gpt", "value": "with the boy in white shirt"}]
00000004236.jpg
4236_evjvqa
[{"from": "human", "value": "where is the man sitting?\n<image>"}, {"from": "gpt", "value": "in the room"}]
00000004237.jpg
4237_evjvqa
[{"from": "human", "value": "how many children are in this family?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004237.jpg
4237_evjvqa
[{"from": "human", "value": "how many men are in this family?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004237.jpg
4237_evjvqa
[{"from": "human", "value": "how many women are in this family?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004238.jpg
4238_evjvqa
[{"from": "human", "value": "what does the ship carry?\n<image>"}, {"from": "gpt", "value": "the containers"}]
00000004238.jpg
4238_evjvqa
[{"from": "human", "value": "what is the ship used for?\n<image>"}, {"from": "gpt", "value": "to transport the containers"}]
00000004238.jpg
4238_evjvqa
[{"from": "human", "value": "how are the containers shipped?\n<image>"}, {"from": "gpt", "value": "by seaway"}]
00000004239.jpg
4239_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "he is driving to deliver"}]
00000004244.jpg
4244_evjvqa
[{"from": "human", "value": "what products do these stalls sell?\n<image>"}, {"from": "gpt", "value": "clothes"}]
00000004245.jpg
4245_evjvqa
[{"from": "human", "value": "what is this trolley carrying?\n<image>"}, {"from": "gpt", "value": "the presents"}]
00000004248.jpg
4248_evjvqa
[{"from": "human", "value": "how many cameras are being used?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004248.jpg
4248_evjvqa
[{"from": "human", "value": "how many men are holding the pens?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000004248.jpg
4248_evjvqa
[{"from": "human", "value": "what scene is the camera in the right corner recording?\n<image>"}, {"from": "gpt", "value": "it is filming the scene of the man in the black shirt standing next to the man in the blue shirt"}]
00000004249.jpg
4249_evjvqa
[{"from": "human", "value": "what do people build on both sides of the road?\n<image>"}, {"from": "gpt", "value": "the stalls"}]
00000004249.jpg
4249_evjvqa
[{"from": "human", "value": "where are these stalls set up?\n<image>"}, {"from": "gpt", "value": "in the fairground"}]
00000004249.jpg
4249_evjvqa
[{"from": "human", "value": "where are these people going to visit?\n<image>"}, {"from": "gpt", "value": "in the fairground"}]
00000004250.jpg
4250_evjvqa
[{"from": "human", "value": "what means of transport do people use on the street?\n<image>"}, {"from": "gpt", "value": "bicycles, motorbikes and cyclos"}]
00000004250.jpg
4250_evjvqa
[{"from": "human", "value": "how many cyclos are carrying passengers?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000004250.jpg
4250_evjvqa
[{"from": "human", "value": "how many cyclos are running on the road?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004253.jpg
4253_evjvqa
[{"from": "human", "value": "where are these two women standing?\n<image>"}, {"from": "gpt", "value": "near the vegetable stall"}]
00000004253.jpg
4253_evjvqa
[{"from": "human", "value": "what is the woman closest to the vegetable stand holding in her hand?\n<image>"}, {"from": "gpt", "value": "two bell peppers"}]
00000004253.jpg
4253_evjvqa
[{"from": "human", "value": "what is the woman holding the orange notebook doing?\n<image>"}, {"from": "gpt", "value": "she is discussing with the woman standing next to her"}]
00000004253.jpg
4253_evjvqa
[{"from": "human", "value": "wheat are these two women doingg?\n<image>"}, {"from": "gpt", "value": "they are discussing on two bell peppers"}]
00000004256.jpg
4256_evjvqa
[{"from": "human", "value": "who is the girl in pink standing with?\n<image>"}, {"from": "gpt", "value": "with the woman in pink"}]
00000004257.jpg
4257_evjvqa
[{"from": "human", "value": "what items does this store sell?\n<image>"}, {"from": "gpt", "value": "types of bicycle"}]
00000004258.jpg
4258_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "on the steps of a lobby in front of a large building"}]
00000004260.jpg
4260_evjvqa
[{"from": "human", "value": "what are these two women selling?\n<image>"}, {"from": "gpt", "value": "dry food"}]
00000004260.jpg
4260_evjvqa
[{"from": "human", "value": "where are the stalls on both sides of the road set up?\n<image>"}, {"from": "gpt", "value": "in the market"}]
00000004260.jpg
4260_evjvqa
[{"from": "human", "value": "where do these people sit?\n<image>"}, {"from": "gpt", "value": "at the stall in the market"}]
00000004263.jpg
4263_evjvqa
[{"from": "human", "value": "what are these boats used for?\n<image>"}, {"from": "gpt", "value": "to transport agricultural products"}]
00000004263.jpg
4263_evjvqa
[{"from": "human", "value": "how many boats do not carry any goods?\n<image>"}, {"from": "gpt", "value": "two"}]
00000004263.jpg
4263_evjvqa
[{"from": "human", "value": "what means of transport do these people use to transport agricultural gproducts?\n<image>"}, {"from": "gpt", "value": "boats"}]
00000004264.jpg
4264_evjvqa
[{"from": "human", "value": "where are the children visiting?\n<image>"}, {"from": "gpt", "value": "the museum"}]
00000004264.jpg
4264_evjvqa
[{"from": "human", "value": "where are the children standing?\n<image>"}, {"from": "gpt", "value": "in the museum"}]
00000004264.jpg
4264_evjvqa
[{"from": "human", "value": "where are these two skeletons displayed?\n<image>"}, {"from": "gpt", "value": "at the room i nthe museum"}]
00000004264.jpg
4264_evjvqa
[{"from": "human", "value": "what artifacts does this room display?\n<image>"}, {"from": "gpt", "value": "skeletons and arts"}]
00000004265.jpg
4265_evjvqa
[{"from": "human", "value": "what job does this man do?\n<image>"}, {"from": "gpt", "value": "he is selling lottery tickets"}]
00000004265.jpg
4265_evjvqa
[{"from": "human", "value": "what is this man holding in his hand?\n<image>"}, {"from": "gpt", "value": "the tray of lottery tickets"}]
00000004265.jpg
4265_evjvqa
[{"from": "human", "value": "what is the store behind the man selling lottery tickets?\n<image>"}, {"from": "gpt", "value": "the grocery store"}]
00000004266.jpg
4266_evjvqa
[{"from": "human", "value": "what does this picture depict?\n<image>"}, {"from": "gpt", "value": "this picture shows a person sitting in a car"}]
00000004268.jpg
4268_evjvqa
[{"from": "human", "value": "what are the women doing?\n<image>"}, {"from": "gpt", "value": "they are dancing"}]
00000004273.jpg
4273_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "he is wearing virtual reality headset"}]
00000004273.jpg
4273_evjvqa
[{"from": "human", "value": "what is this man wearing in front of his face?\n<image>"}, {"from": "gpt", "value": "he is wearing virtual reality headset"}]
00000004273.jpg
4273_evjvqa
[{"from": "human", "value": "where is the man going?\n<image>"}, {"from": "gpt", "value": "he is going to the supermarket"}]
00000004284.jpg
4284_evjvqa
[{"from": "human", "value": "what are these people lining up for?\n<image>"}, {"from": "gpt", "value": "to wait to buy meat"}]
00000004284.jpg
4284_evjvqa
[{"from": "human", "value": "where are these people lining up for?\n<image>"}, {"from": "gpt", "value": "in front of the raw meat "}]
00000004284.jpg
4284_evjvqa
[{"from": "human", "value": "what are these three people standing in line going to buy?\n<image>"}, {"from": "gpt", "value": "raw meat"}]
00000004284.jpg
4284_evjvqa
[{"from": "human", "value": "how many people are buying the raw meat?\n<image>"}, {"from": "gpt", "value": "three"}]
00000004285.jpg
4285_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "beside the river"}]
00000004285.jpg
4285_evjvqa
[{"from": "human", "value": "what is the decoration on the bicycle behind the girl?\n<image>"}, {"from": "gpt", "value": "with the flowers"}]
00000004289.jpg
4289_evjvqa
[{"from": "human", "value": "what do people decorate on the shelves?\n<image>"}, {"from": "gpt", "value": "peach blossom tree"}]
00000004289.jpg
4289_evjvqa
[{"from": "human", "value": "where are people standing?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000004289.jpg
4289_evjvqa
[{"from": "human", "value": "what does the rightmost shelf sell?\n<image>"}, {"from": "gpt", "value": "types of cake"}]
00000004291.jpg
4291_evjvqa
[{"from": "human", "value": "how many cyclos are running on the street?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000004291.jpg
4291_evjvqa
[{"from": "human", "value": "how many bicycles are running on the street?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000004292.jpg
4292_evjvqa
[{"from": "human", "value": "how many people are in the ditch?\n<image>"}, {"from": "gpt", "value": "three men"}]
00000004292.jpg
4292_evjvqa
[{"from": "human", "value": "what are the three men wading in the ditch for?\n<image>"}, {"from": "gpt", "value": "to catch the fish"}]
00000004292.jpg
4292_evjvqa
[{"from": "human", "value": "what do people gather on either side of the ditch?\n<image>"}, {"from": "gpt", "value": "to watch the three men catching the fish"}]
00000004294.jpg
4294_evjvqa
[{"from": "human", "value": "who is the girl standing with?\n<image>"}, {"from": "gpt", "value": "with the boy"}]
00000000600.jpg
600_evjvqa
[{"from": "human", "value": "how many people buying goods are there at the supermarket?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000600.jpg
600_evjvqa
[{"from": "human", "value": "how many people are walking along the right sidewalk?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000603.jpg
603_evjvqa
[{"from": "human", "value": "where are two girls standing?\n<image>"}, {"from": "gpt", "value": "in a garden"}]
00000000603.jpg
603_evjvqa
[{"from": "human", "value": "how many people wearing hat are there in the garden?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000603.jpg
603_evjvqa
[{"from": "human", "value": "what color is the shirt of the girl holding the hat?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000000603.jpg
603_evjvqa
[{"from": "human", "value": "what clothes are the two girls walking in the garden wearing?\n<image>"}, {"from": "gpt", "value": "south vietnamese pajamas"}]
00000000604.jpg
604_evjvqa
[{"from": "human", "value": "how many statues are put in the room?\n<image>"}, {"from": "gpt", "value": "one statue"}]
00000000604.jpg
604_evjvqa
[{"from": "human", "value": "how many people are sitting in the room?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000604.jpg
604_evjvqa
[{"from": "human", "value": "what costume is the girl sitting in front of the man wearing?\n<image>"}, {"from": "gpt", "value": "red ao dai"}]
00000000604.jpg
604_evjvqa
[{"from": "human", "value": "what shirt is the man sitting in front of the lady wearing?\n<image>"}, {"from": "gpt", "value": "white shirt"}]
00000000605.jpg
605_evjvqa
[{"from": "human", "value": "how many air conditioner are installed in the room?\n<image>"}, {"from": "gpt", "value": "two air conditioners"}]
00000000605.jpg
605_evjvqa
[{"from": "human", "value": "where are the staff standings?\n<image>"}, {"from": "gpt", "value": "coffee shop"}]
00000000605.jpg
605_evjvqa
[{"from": "human", "value": "what is the staff standing at the checkout doing?\n<image>"}, {"from": "gpt", "value": "cleaning"}]
00000000605.jpg
605_evjvqa
[{"from": "human", "value": "what is the staff standing at the aisle doing?\n<image>"}, {"from": "gpt", "value": "rearranging the aisle"}]
00000000611.jpg
611_evjvqa
[{"from": "human", "value": "how many pancakes are presented on the disks?\n<image>"}, {"from": "gpt", "value": "two pancakes"}]
00000000613.jpg
613_evjvqa
[{"from": "human", "value": "how many people are sitting on the basket in the middle of the river?\n<image>"}, {"from": "gpt", "value": "five people"}]
00000000613.jpg
613_evjvqa
[{"from": "human", "value": "what hats are the tourists sitting on the basket wearing?\n<image>"}, {"from": "gpt", "value": "conical hat"}]
00000000613.jpg
613_evjvqa
[{"from": "human", "value": "how many baskets are moving in the water?\n<image>"}, {"from": "gpt", "value": "four baskets"}]
00000000622.jpg
622_evjvqa
[{"from": "human", "value": "where is the lady standing at?\n<image>"}, {"from": "gpt", "value": "in a narrow alley"}]
00000000625.jpg
625_evjvqa
[{"from": "human", "value": "where are the workers working?\n<image>"}, {"from": "gpt", "value": "in a packing plant"}]
00000000625.jpg
625_evjvqa
[{"from": "human", "value": "what color are the clothes worn by the workers?\n<image>"}, {"from": "gpt", "value": "blue"}]