image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000000625.jpg
625_evjvqa
[{"from": "human", "value": "what are the workers doing?\n<image>"}, {"from": "gpt", "value": "preparing the food"}]
00000000627.jpg
627_evjvqa
[{"from": "human", "value": "what is the woman in pink t-shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing fruit"}]
00000000627.jpg
627_evjvqa
[{"from": "human", "value": "how many people are choosing fruit?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000634.jpg
634_evjvqa
[{"from": "human", "value": "where is the man standing next to the woman in black shirt at?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000000634.jpg
634_evjvqa
[{"from": "human", "value": "where is the man standing next to the woman in black shirt at?\n<image>"}, {"from": "gpt", "value": "at the fruit aisle"}]
00000000634.jpg
634_evjvqa
[{"from": "human", "value": "which aisle is the man standing next to the woman in black shirt at?\n<image>"}, {"from": "gpt", "value": "at the fruit aisle"}]
00000000634.jpg
634_evjvqa
[{"from": "human", "value": "what is the woman in black t-shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing a box of fruit"}]
00000000634.jpg
634_evjvqa
[{"from": "human", "value": "what is the man standing next to the woman in black t-shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing a box of fruit"}]
00000000636.jpg
636_evjvqa
[{"from": "human", "value": "how many people are the on the first basket?\n<image>"}, {"from": "gpt", "value": "three people"}]
00000000636.jpg
636_evjvqa
[{"from": "human", "value": "what hat is the driver of the first basket wearing?\n<image>"}, {"from": "gpt", "value": "conical hat"}]
00000000637.jpg
637_evjvqa
[{"from": "human", "value": "what is the shop owner doing?\n<image>"}, {"from": "gpt", "value": "packaging gift"}]
00000000637.jpg
637_evjvqa
[{"from": "human", "value": "how many people are there sitting in the shop?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000637.jpg
637_evjvqa
[{"from": "human", "value": "how many people are buying decorations?\n<image>"}, {"from": "gpt", "value": "no one"}]
00000000637.jpg
637_evjvqa
[{"from": "human", "value": "where is the old lady sitting?\n<image>"}, {"from": "gpt", "value": "in a decoration shop"}]
00000000637.jpg
637_evjvqa
[{"from": "human", "value": "what is the old lady's shop doing?\n<image>"}, {"from": "gpt", "value": "tet decorations"}]
00000000639.jpg
639_evjvqa
[{"from": "human", "value": "where is this scene taken?\n<image>"}, {"from": "gpt", "value": "in an alley of the night market"}]
00000000640.jpg
640_evjvqa
[{"from": "human", "value": "where is this scene taken?\n<image>"}, {"from": "gpt", "value": "in a corner of a market"}]
00000000641.jpg
641_evjvqa
[{"from": "human", "value": "where is the man wearing short standing?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000000641.jpg
641_evjvqa
[{"from": "human", "value": "where is the man wearing short standing?\n<image>"}, {"from": "gpt", "value": "at the fresh food aisle"}]
00000000641.jpg
641_evjvqa
[{"from": "human", "value": "what is the man wearing short doing?\n<image>"}, {"from": "gpt", "value": "choosing fresh food"}]
00000000641.jpg
641_evjvqa
[{"from": "human", "value": "how many shop assistants are standing near the man?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000641.jpg
641_evjvqa
[{"from": "human", "value": "what is the shop assistant standing next to the man doing?\n<image>"}, {"from": "gpt", "value": "rearranging the goods"}]
00000000644.jpg
644_evjvqa
[{"from": "human", "value": "how many people are coming to the store?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000646.jpg
646_evjvqa
[{"from": "human", "value": "what are these two people doing?\n<image>"}, {"from": "gpt", "value": "blowing bubbles"}]
00000000646.jpg
646_evjvqa
[{"from": "human", "value": "where are these two people standing?\n<image>"}, {"from": "gpt", "value": "on an avenue"}]
00000000647.jpg
647_evjvqa
[{"from": "human", "value": "where is this scene taken?\n<image>"}, {"from": "gpt", "value": "in front of a palace"}]
00000000647.jpg
647_evjvqa
[{"from": "human", "value": "what is built in front of the palace?\n<image>"}, {"from": "gpt", "value": "a lake"}]
00000000648.jpg
648_evjvqa
[{"from": "human", "value": "what is the woman with the conical hat doing?\n<image>"}, {"from": "gpt", "value": "rowing"}]
00000000648.jpg
648_evjvqa
[{"from": "human", "value": "where is the woman with the conical hat standing?\n<image>"}, {"from": "gpt", "value": "standing on a small boat"}]
00000000648.jpg
648_evjvqa
[{"from": "human", "value": "what vehicle does the woman move on?\n<image>"}, {"from": "gpt", "value": "move by boat"}]
00000000648.jpg
648_evjvqa
[{"from": "human", "value": "how many boats are there moving on the water?\n<image>"}, {"from": "gpt", "value": "there is one boat moving on the water"}]
00000000650.jpg
650_evjvqa
[{"from": "human", "value": "what is the man in blue shirt doing?\n<image>"}, {"from": "gpt", "value": "paying"}]
00000000650.jpg
650_evjvqa
[{"from": "human", "value": "how many people are there at the shop?\n<image>"}, {"from": "gpt", "value": "four people"}]
00000000650.jpg
650_evjvqa
[{"from": "human", "value": "how many people are carrying the bag?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000659.jpg
659_evjvqa
[{"from": "human", "value": "how many people are walking on the street?\n<image>"}, {"from": "gpt", "value": "there is one woman walking on the street"}]
00000000659.jpg
659_evjvqa
[{"from": "human", "value": "where is the woman walking watched from?\n<image>"}, {"from": "gpt", "value": "from a narrow alley"}]
00000000660.jpg
660_evjvqa
[{"from": "human", "value": "where is this scene taken?\n<image>"}, {"from": "gpt", "value": "at a cable car system"}]
00000000668.jpg
668_evjvqa
[{"from": "human", "value": "what is this picture drawing about?\n<image>"}, {"from": "gpt", "value": "drawing about the picture of the houses in an avenue"}]
00000000669.jpg
669_evjvqa
[{"from": "human", "value": "what are these shops selling?\n<image>"}, {"from": "gpt", "value": "clothes"}]
00000000669.jpg
669_evjvqa
[{"from": "human", "value": "which area are the buyers standing?\n<image>"}, {"from": "gpt", "value": "at the clothes selling area"}]
00000000670.jpg
670_evjvqa
[{"from": "human", "value": "where are those people coming from?\n<image>"}, {"from": "gpt", "value": "from the house made of wood"}]
00000000671.jpg
671_evjvqa
[{"from": "human", "value": "what is the girl hugging on her hand?\n<image>"}, {"from": "gpt", "value": "hugging a dog"}]
00000000671.jpg
671_evjvqa
[{"from": "human", "value": "what is behind the three standing people?\n<image>"}, {"from": "gpt", "value": "a cage of class decorated with three christmas trees"}]
00000000671.jpg
671_evjvqa
[{"from": "human", "value": "who is the girl hugging the dog haging out on christmas with?\n<image>"}, {"from": "gpt", "value": "a man and a woman"}]
00000000671.jpg
671_evjvqa
[{"from": "human", "value": "what is decorated behind the three standing people?\n<image>"}, {"from": "gpt", "value": "three christmas trees and the christmas decorations"}]
00000000674.jpg
674_evjvqa
[{"from": "human", "value": "where is the conical hat put?\n<image>"}, {"from": "gpt", "value": "on a branch of the tree that are next to the girl"}]
00000000674.jpg
674_evjvqa
[{"from": "human", "value": "where are the man and the woman sitting at?\n<image>"}, {"from": "gpt", "value": "on a branch of the tree"}]
00000000675.jpg
675_evjvqa
[{"from": "human", "value": "what is mostly sold at the left side aisle?\n<image>"}, {"from": "gpt", "value": "spring roll, meat loaf, cakes, sugar-coated fruit"}]
00000000675.jpg
675_evjvqa
[{"from": "human", "value": "how many people are coming at the right aisle?\n<image>"}, {"from": "gpt", "value": "no one is coming"}]
00000000675.jpg
675_evjvqa
[{"from": "human", "value": "what are the shop owners of the left side aisle doing?\n<image>"}, {"from": "gpt", "value": "using the phones"}]
00000000677.jpg
677_evjvqa
[{"from": "human", "value": "what is the rightmost aisle selling?\n<image>"}, {"from": "gpt", "value": "cakes and candies"}]
00000000677.jpg
677_evjvqa
[{"from": "human", "value": "what is the aisle that is next to the cakes and candies aisle selling?\n<image>"}, {"from": "gpt", "value": "shower gel, fabric softener, washing powder"}]
00000000677.jpg
677_evjvqa
[{"from": "human", "value": "how many people are choosing fabric softener and washing powder?\n<image>"}, {"from": "gpt", "value": "no one is choosing"}]
00000000678.jpg
678_evjvqa
[{"from": "human", "value": "what is the man in green shirt doing?\n<image>"}, {"from": "gpt", "value": "checking the goods"}]
00000000678.jpg
678_evjvqa
[{"from": "human", "value": "where is the man in green shirt standing at?\n<image>"}, {"from": "gpt", "value": "standing at a clothes store"}]
00000000680.jpg
680_evjvqa
[{"from": "human", "value": "how many people are sleeping?\n<image>"}, {"from": "gpt", "value": "one person is sleeping"}]
00000000680.jpg
680_evjvqa
[{"from": "human", "value": "where is the woman laying at?\n<image>"}, {"from": "gpt", "value": "on a rollaway bed next to the wall"}]
00000000681.jpg
681_evjvqa
[{"from": "human", "value": "what is the shop owner doing?\n<image>"}, {"from": "gpt", "value": "using the phone"}]
00000000681.jpg
681_evjvqa
[{"from": "human", "value": "how many people are eating at the shop?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000681.jpg
681_evjvqa
[{"from": "human", "value": "what is the man opposite to the girl doing?\n<image>"}, {"from": "gpt", "value": "eating"}]
00000000681.jpg
681_evjvqa
[{"from": "human", "value": "what is the woman opposite to the man doing?\n<image>"}, {"from": "gpt", "value": "eating"}]
00000000683.jpg
683_evjvqa
[{"from": "human", "value": "how many touring car are there on the street?\n<image>"}, {"from": "gpt", "value": "two cars"}]
00000000683.jpg
683_evjvqa
[{"from": "human", "value": "where are the tourist on this car visiting?\n<image>"}, {"from": "gpt", "value": "ha noi"}]
00000000684.jpg
684_evjvqa
[{"from": "human", "value": "how many people are coming to the store?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000685.jpg
685_evjvqa
[{"from": "human", "value": "what costume are the cashiers wearing?\n<image>"}, {"from": "gpt", "value": "orange ao dai"}]
00000000685.jpg
685_evjvqa
[{"from": "human", "value": "what is the middle store selling?\n<image>"}, {"from": "gpt", "value": "teddy bear"}]
00000000685.jpg
685_evjvqa
[{"from": "human", "value": "how many cashiers are there at the checkout?\n<image>"}, {"from": "gpt", "value": "two cashiers"}]
00000000687.jpg
687_evjvqa
[{"from": "human", "value": "where is the man walking with the girl in black t-shirt standing?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000000687.jpg
687_evjvqa
[{"from": "human", "value": "where is the man walking with the girl in black t-shirt standing?\n<image>"}, {"from": "gpt", "value": "at the fruit aisle"}]
00000000687.jpg
687_evjvqa
[{"from": "human", "value": "which area is the man walking with the girl in black t-shirt standing at?\n<image>"}, {"from": "gpt", "value": "at the fruit area"}]
00000000687.jpg
687_evjvqa
[{"from": "human", "value": "how many people are choosing fruit?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000687.jpg
687_evjvqa
[{"from": "human", "value": "how many people are going with the man in black t-shirt?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000000687.jpg
687_evjvqa
[{"from": "human", "value": "who does the girl in black t-shirt going to the supermarket with?\n<image>"}, {"from": "gpt", "value": "the man in black t-shirt"}]
00000000691.jpg
691_evjvqa
[{"from": "human", "value": "what is hung above the abenues?\n<image>"}, {"from": "gpt", "value": "the lanterns"}]
00000000691.jpg
691_evjvqa
[{"from": "human", "value": "how many cars are moving on the street?\n<image>"}, {"from": "gpt", "value": "there is one car moving on the street"}]
00000000691.jpg
691_evjvqa
[{"from": "human", "value": "where is the car moving on?\n<image>"}, {"from": "gpt", "value": "on a street full of pedestrians"}]
00000000694.jpg
694_evjvqa
[{"from": "human", "value": "what is the woman with the plastic bag doing?\n<image>"}, {"from": "gpt", "value": "choosing fruit"}]
00000000694.jpg
694_evjvqa
[{"from": "human", "value": "how many people are there next to the woman with the plastic bag?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000694.jpg
694_evjvqa
[{"from": "human", "value": "what is the man in the black jacket doing?\n<image>"}, {"from": "gpt", "value": "choosing fruit"}]
00000000694.jpg
694_evjvqa
[{"from": "human", "value": "how many people are standing at the fruit aisle?\n<image>"}, {"from": "gpt", "value": "three people"}]
00000000699.jpg
699_evjvqa
[{"from": "human", "value": "where are these fout people at?\n<image>"}, {"from": "gpt", "value": "at an amusement park"}]
00000000699.jpg
699_evjvqa
[{"from": "human", "value": "where are these four youngster playing at?\n<image>"}, {"from": "gpt", "value": "at an amusement park"}]
00000000699.jpg
699_evjvqa
[{"from": "human", "value": "how many people are there with the girl in green t-shirt?\n<image>"}, {"from": "gpt", "value": "the girl in green t-shirt goes with three people"}]
00000000699.jpg
699_evjvqa
[{"from": "human", "value": "how many people are walking the boy in blue t-shirt?\n<image>"}, {"from": "gpt", "value": "the boy in blue t-shirt goes with three people"}]
00000002000.jpg
2000_evjvqa
[{"from": "human", "value": "what products does this counter sell?\n<image>"}, {"from": "gpt", "value": "agricultural products"}]
00000002000.jpg
2000_evjvqa
[{"from": "human", "value": "how many people are standing inside the supermarket?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000002000.jpg
2000_evjvqa
[{"from": "human", "value": "how many people are shopping in the store?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000002000.jpg
2000_evjvqa
[{"from": "human", "value": "how many people are buying at the vegetable counter?\n<image>"}, {"from": "gpt", "value": "no one is buying"}]
00000002003.jpg
2003_evjvqa
[{"from": "human", "value": "what vehicle do the girls wearing conical hats use?\n<image>"}, {"from": "gpt", "value": "bicycle"}]
00000002004.jpg
2004_evjvqa
[{"from": "human", "value": "what is the young man doing?\n<image>"}, {"from": "gpt", "value": "taking pictures"}]
00000002004.jpg
2004_evjvqa
[{"from": "human", "value": "who is the young man taking pictures for?\n<image>"}, {"from": "gpt", "value": "for the girl in white standing in front of"}]
00000002004.jpg
2004_evjvqa
[{"from": "human", "value": "what color shoes are both of these two wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
00000002011.jpg
2011_evjvqa
[{"from": "human", "value": "how many gift boxes are there under the christmas tree?\n<image>"}, {"from": "gpt", "value": "four boxes"}]
00000002011.jpg
2011_evjvqa
[{"from": "human", "value": "where is the christmas tree placed?\n<image>"}, {"from": "gpt", "value": "between the chair and the reception counter"}]
00000002011.jpg
2011_evjvqa
[{"from": "human", "value": "how many people are sitting on the chair?\n<image>"}, {"from": "gpt", "value": "no one is sitting on the chair"}]
00000002014.jpg
2014_evjvqa
[{"from": "human", "value": "where is the woman in the green shirt pointing?\n<image>"}, {"from": "gpt", "value": "dinosaur skeleton"}]
00000002014.jpg
2014_evjvqa
[{"from": "human", "value": "where is this group of people visiting?\n<image>"}, {"from": "gpt", "value": "museum"}]
00000002014.jpg
2014_evjvqa
[{"from": "human", "value": "which area is this group of people standing at?\n<image>"}, {"from": "gpt", "value": "exhibition area of dinosaur artifacts"}]
00000002014.jpg
2014_evjvqa
[{"from": "human", "value": "what artifacts does this room mainly display?\n<image>"}, {"from": "gpt", "value": "fossils, dinosaur skeleton"}]
00000002017.jpg
2017_evjvqa
[{"from": "human", "value": "which counter is the woman standing at?\n<image>"}, {"from": "gpt", "value": "vegetable counter"}]