image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000003783.jpg
3783_evjvqa
[{"from": "human", "value": "where are the carrots?\n<image>"}, {"from": "gpt", "value": "the carrots are placed in the plastic bags and in th trolley"}]
00000003785.jpg
3785_evjvqa
[{"from": "human", "value": "what is exhibited on the wall?\n<image>"}, {"from": "gpt", "value": "there are drawings hung on the wall"}]
00000003785.jpg
3785_evjvqa
[{"from": "human", "value": "what are those people doing?\n<image>"}, {"from": "gpt", "value": "they are visiting the art museum"}]
00000003785.jpg
3785_evjvqa
[{"from": "human", "value": "what is this place?\n<image>"}, {"from": "gpt", "value": "this is an art museum"}]
00000003786.jpg
3786_evjvqa
[{"from": "human", "value": "what is the conditions of the left side market?\n<image>"}, {"from": "gpt", "value": "they are closed"}]
00000003786.jpg
3786_evjvqa
[{"from": "human", "value": "what are the conditions of the right side market?\n<image>"}, {"from": "gpt", "value": "they are opened"}]
00000003786.jpg
3786_evjvqa
[{"from": "human", "value": "which side of the market is being opened?\n<image>"}, {"from": "gpt", "value": "the right side of the road"}]
00000003790.jpg
3790_evjvqa
[{"from": "human", "value": "where is the voltage transformer?\n<image>"}, {"from": "gpt", "value": "it is placed above the store that have yellow advertisement"}]
00000003793.jpg
3793_evjvqa
[{"from": "human", "value": "what are those two man who are not wearing helmet doing?\n<image>"}, {"from": "gpt", "value": "they are giving out some informations"}]
00000003793.jpg
3793_evjvqa
[{"from": "human", "value": "how many people are wearing helmets?\n<image>"}, {"from": "gpt", "value": "there are three people wearing helmets"}]
00000003795.jpg
3795_evjvqa
[{"from": "human", "value": "what color is the shirt of the woman?\n<image>"}, {"from": "gpt", "value": "the woman is wearing purple shirt"}]
00000003795.jpg
3795_evjvqa
[{"from": "human", "value": "who is standing next to the man wearing blue shirt?\n<image>"}, {"from": "gpt", "value": "the woman wearing purple shirt"}]
00000003795.jpg
3795_evjvqa
[{"from": "human", "value": "who is standing on the left of the woman?\n<image>"}, {"from": "gpt", "value": "the man weaing blue shirt"}]
00000003797.jpg
3797_evjvqa
[{"from": "human", "value": "how many motorbikes are there in front of the convenience store?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes in front of the convenience store"}]
00000003797.jpg
3797_evjvqa
[{"from": "human", "value": "where is the woman walking out of?\n<image>"}, {"from": "gpt", "value": "the woman is walkin out of the convenience store"}]
00000003797.jpg
3797_evjvqa
[{"from": "human", "value": "what color is the person sitting in the convenience store?\n<image>"}, {"from": "gpt", "value": "the person sitting in the convenience store wears red t-shirt"}]
00000000703.jpg
703_evjvqa
[{"from": "human", "value": "what is the girl holding on her hand?\n<image>"}, {"from": "gpt", "value": "a confetti"}]
00000000705.jpg
705_evjvqa
[{"from": "human", "value": "what do customers mainly come to this market for?\n<image>"}, {"from": "gpt", "value": "buy agricultural product"}]
00000000705.jpg
705_evjvqa
[{"from": "human", "value": "what are those people doing?\n<image>"}, {"from": "gpt", "value": "joining an agricultural market"}]
00000000707.jpg
707_evjvqa
[{"from": "human", "value": "what is next to the wall which is in front of the girl?\n<image>"}, {"from": "gpt", "value": "a bicycle"}]
00000000707.jpg
707_evjvqa
[{"from": "human", "value": "what is hung under the tree?\n<image>"}, {"from": "gpt", "value": "the lanterns "}]
00000000707.jpg
707_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "white shirt, black skirt, white sneakers"}]
00000000707.jpg
707_evjvqa
[{"from": "human", "value": "what does the store in front of the girl sell ?\n<image>"}, {"from": "gpt", "value": "souvenir"}]
00000000707.jpg
707_evjvqa
[{"from": "human", "value": "what material was used to made the floor under the girl?\n<image>"}, {"from": "gpt", "value": "stone"}]
00000000712.jpg
712_evjvqa
[{"from": "human", "value": "where is the woman sitting compared to the two men?\n<image>"}, {"from": "gpt", "value": "middle"}]
00000000712.jpg
712_evjvqa
[{"from": "human", "value": "where is the man in blue shirt sitting at the delegates'table?\n<image>"}, {"from": "gpt", "value": "third from the left"}]
00000000712.jpg
712_evjvqa
[{"from": "human", "value": "how many teacup was prepared on the delegates' table?\n<image>"}, {"from": "gpt", "value": "four cups"}]
00000000713.jpg
713_evjvqa
[{"from": "human", "value": "how many people are walking on the street?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000713.jpg
713_evjvqa
[{"from": "human", "value": "who is walking with the man in white t-shirt?\n<image>"}, {"from": "gpt", "value": "the girl in white t-shirt with red striped"}]
00000000716.jpg
716_evjvqa
[{"from": "human", "value": "what is this aisle selling?\n<image>"}, {"from": "gpt", "value": "different kinds of vegetables"}]
00000000716.jpg
716_evjvqa
[{"from": "human", "value": "what are the people standing next to the aisle doing?\n<image>"}, {"from": "gpt", "value": "buying vegetables"}]
00000000716.jpg
716_evjvqa
[{"from": "human", "value": "what is the woman in white shirt with black flower doing?\n<image>"}, {"from": "gpt", "value": "choosing vegetables"}]
00000000721.jpg
721_evjvqa
[{"from": "human", "value": "what position is the tallest man standing starting on the left?\n<image>"}, {"from": "gpt", "value": "the fourth position"}]
00000000721.jpg
721_evjvqa
[{"from": "human", "value": "what position is the man with the scarf standing starting from the right?\n<image>"}, {"from": "gpt", "value": "the third position"}]
00000000723.jpg
723_evjvqa
[{"from": "human", "value": "is there anyone sitting on the chair?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000000723.jpg
723_evjvqa
[{"from": "human", "value": "which side of the alley was the chairs put?\n<image>"}, {"from": "gpt", "value": "right side"}]
00000000723.jpg
723_evjvqa
[{"from": "human", "value": "is there anyone walking in the alley?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000000726.jpg
726_evjvqa
[{"from": "human", "value": "what vehicle was parked on the left of the storage?\n<image>"}, {"from": "gpt", "value": "a mini truck"}]
00000000727.jpg
727_evjvqa
[{"from": "human", "value": "what is this girl holding on her hand?\n<image>"}, {"from": "gpt", "value": "a book"}]
00000000727.jpg
727_evjvqa
[{"from": "human", "value": "what is on the left side of the woman?\n<image>"}, {"from": "gpt", "value": "a bag"}]
00000000727.jpg
727_evjvqa
[{"from": "human", "value": "what color are the girl's sneakers?\n<image>"}, {"from": "gpt", "value": "white"}]
00000000729.jpg
729_evjvqa
[{"from": "human", "value": "what is the man in black t-shirt about to do?\n<image>"}, {"from": "gpt", "value": "going to the vote"}]
00000000729.jpg
729_evjvqa
[{"from": "human", "value": "how many people are wearing the medical suit?\n<image>"}, {"from": "gpt", "value": "four people"}]
00000000729.jpg
729_evjvqa
[{"from": "human", "value": "what is the man tending to put the vote to?\n<image>"}, {"from": "gpt", "value": "a red box"}]
00000000738.jpg
738_evjvqa
[{"from": "human", "value": "which position of the girl wearing white mask starting from the right?\n<image>"}, {"from": "gpt", "value": "the first one"}]
00000000738.jpg
738_evjvqa
[{"from": "human", "value": "what are the girls standing doing?\n<image>"}, {"from": "gpt", "value": "selling medicine"}]
00000000739.jpg
739_evjvqa
[{"from": "human", "value": "which car being cleaned up in the garage?\n<image>"}, {"from": "gpt", "value": "the black car"}]
00000000740.jpg
740_evjvqa
[{"from": "human", "value": "how many people sitting on the motorcycle are there?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000740.jpg
740_evjvqa
[{"from": "human", "value": "who is walking next to the cow?\n<image>"}, {"from": "gpt", "value": "two women in white ao dai"}]
00000000740.jpg
740_evjvqa
[{"from": "human", "value": "who is walking behind the cow?\n<image>"}, {"from": "gpt", "value": "the cowman"}]
00000000740.jpg
740_evjvqa
[{"from": "human", "value": "what is walking next to the girls in white ao dai?\n<image>"}, {"from": "gpt", "value": "the cow"}]
00000000740.jpg
740_evjvqa
[{"from": "human", "value": "what is the man with the conical hat doing on the road?\n<image>"}, {"from": "gpt", "value": "herding the cow"}]
00000000742.jpg
742_evjvqa
[{"from": "human", "value": "what color are the umbrella outside the store?\n<image>"}, {"from": "gpt", "value": "white and blue"}]
00000000742.jpg
742_evjvqa
[{"from": "human", "value": "who is sitting in this store?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000000742.jpg
742_evjvqa
[{"from": "human", "value": "how many stools are there in the store?\n<image>"}, {"from": "gpt", "value": "five stools"}]
00000000745.jpg
745_evjvqa
[{"from": "human", "value": "what products are put in the woman's trolley whose is wearing orange jackect?\n<image>"}, {"from": "gpt", "value": "fresh vegestable"}]
00000000748.jpg
748_evjvqa
[{"from": "human", "value": "what are the men sitting on the ground doing?\n<image>"}, {"from": "gpt", "value": "creating musical instruments"}]
00000000748.jpg
748_evjvqa
[{"from": "human", "value": "what is the man standing doing?\n<image>"}, {"from": "gpt", "value": "playing the musical instrument"}]
00000000748.jpg
748_evjvqa
[{"from": "human", "value": "what color is the man's clothes whose is playing musical instrument?\n<image>"}, {"from": "gpt", "value": "red"}]
00000000750.jpg
750_evjvqa
[{"from": "human", "value": "who is the woman wearing glasses handling?\n<image>"}, {"from": "gpt", "value": "the baby in yellow shirt"}]
00000000750.jpg
750_evjvqa
[{"from": "human", "value": "who is the man in front of the baby in yellow shirt cosplay of?\n<image>"}, {"from": "gpt", "value": "santa claus"}]
00000000750.jpg
750_evjvqa
[{"from": "human", "value": "what is the main content of this department store?\n<image>"}, {"from": "gpt", "value": "christmas"}]
00000000753.jpg
753_evjvqa
[{"from": "human", "value": "what is the old woman in dress jacket holding on her hand?\n<image>"}, {"from": "gpt", "value": "a bag"}]
00000000753.jpg
753_evjvqa
[{"from": "human", "value": "what is the man wearing black cap prepare the table and the tools for?\n<image>"}, {"from": "gpt", "value": "selling food"}]
00000000753.jpg
753_evjvqa
[{"from": "human", "value": "how many windows are there on the second floor of the house on the left?\n<image>"}, {"from": "gpt", "value": "three windows"}]
00000000758.jpg
758_evjvqa
[{"from": "human", "value": "what is the man touching his hair holding on his right hand?\n<image>"}, {"from": "gpt", "value": "a red basket from the supermarket"}]
00000000758.jpg
758_evjvqa
[{"from": "human", "value": "what are people on this road doing?\n<image>"}, {"from": "gpt", "value": "buying in the supermarket"}]
00000000758.jpg
758_evjvqa
[{"from": "human", "value": "where are the activities taken place?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000000759.jpg
759_evjvqa
[{"from": "human", "value": "what are this couple staanding like that for?\n<image>"}, {"from": "gpt", "value": "prepare for a couple picture"}]
00000000759.jpg
759_evjvqa
[{"from": "human", "value": "where are these two people standing for phototgraph?\n<image>"}, {"from": "gpt", "value": "on the bridge at the gate of the historical destination"}]
00000000759.jpg
759_evjvqa
[{"from": "human", "value": "what are this couple wearing?\n<image>"}, {"from": "gpt", "value": "the girl wears white pants, the boy wears blue pants"}]
00000000763.jpg
763_evjvqa
[{"from": "human", "value": "what are the girls about to do?\n<image>"}, {"from": "gpt", "value": "choosing tet decorated trees"}]
00000000763.jpg
763_evjvqa
[{"from": "human", "value": "which of the girls is holding the bag?\n<image>"}, {"from": "gpt", "value": "the girl in white shirt"}]
00000000763.jpg
763_evjvqa
[{"from": "human", "value": "who is keeping the handbag?\n<image>"}, {"from": "gpt", "value": "the girl in pink jacket"}]
00000000763.jpg
763_evjvqa
[{"from": "human", "value": "what are the girls looking at?\n<image>"}, {"from": "gpt", "value": "kumquat trees"}]
00000000767.jpg
767_evjvqa
[{"from": "human", "value": "what is selling with many different prices?\n<image>"}, {"from": "gpt", "value": "the dried shrimp"}]
00000000767.jpg
767_evjvqa
[{"from": "human", "value": "what is hanging at the aisle next to the dired shrimp aisle?\n<image>"}, {"from": "gpt", "value": "the sausages"}]
00000000782.jpg
782_evjvqa
[{"from": "human", "value": "how was the direction of the car and the motorcycle?\n<image>"}, {"from": "gpt", "value": "opposite"}]
00000000782.jpg
782_evjvqa
[{"from": "human", "value": "what vehicle is used by the man in blue costumes?\n<image>"}, {"from": "gpt", "value": "motorcycle"}]
00000000784.jpg
784_evjvqa
[{"from": "human", "value": "what device are used by the customers?\n<image>"}, {"from": "gpt", "value": "smartphones"}]
00000000784.jpg
784_evjvqa
[{"from": "human", "value": "where are these two people using phones at?\n<image>"}, {"from": "gpt", "value": "at the coffee store"}]
00000000784.jpg
784_evjvqa
[{"from": "human", "value": "what are the couple sitting on the chair doing?\n<image>"}, {"from": "gpt", "value": "using smartphone"}]
00000000784.jpg
784_evjvqa
[{"from": "human", "value": "what fruit is on the advetisement in front of the store?\n<image>"}, {"from": "gpt", "value": "coconut"}]
00000000784.jpg
784_evjvqa
[{"from": "human", "value": "how many chairs are unused in front of the store?\n<image>"}, {"from": "gpt", "value": "two chairs"}]
00000000789.jpg
789_evjvqa
[{"from": "human", "value": "what is the person standing and wearing beanie doing?\n<image>"}, {"from": "gpt", "value": "buying food"}]
00000000789.jpg
789_evjvqa
[{"from": "human", "value": "who is the old woman wearing beanie going with?\n<image>"}, {"from": "gpt", "value": "alone"}]
00000000789.jpg
789_evjvqa
[{"from": "human", "value": "where do these activities take place?\n<image>"}, {"from": "gpt", "value": "at the market"}]
00000000792.jpg
792_evjvqa
[{"from": "human", "value": "what colors are the police officers' closthes?\n<image>"}, {"from": "gpt", "value": "green, black, blue"}]
00000000792.jpg
792_evjvqa
[{"from": "human", "value": "what is the man in green suit holding to control the traffic?\n<image>"}, {"from": "gpt", "value": "bludgeons"}]
00000000795.jpg
795_evjvqa
[{"from": "human", "value": "what is this woman doing?\n<image>"}, {"from": "gpt", "value": "categorizing the mushrooms"}]
00000000795.jpg
795_evjvqa
[{"from": "human", "value": "what is put into the plastic bag by the woman?\n<image>"}, {"from": "gpt", "value": "mushrooms"}]
00000000795.jpg
795_evjvqa
[{"from": "human", "value": "what is included in the woman's plastic bag?\n<image>"}, {"from": "gpt", "value": "mushrooms"}]
00000000797.jpg
797_evjvqa
[{"from": "human", "value": "what are the adults doing with their child?\n<image>"}, {"from": "gpt", "value": "playing toy with him"}]
00000000797.jpg
797_evjvqa
[{"from": "human", "value": "what is sold in stores along the street?\n<image>"}, {"from": "gpt", "value": "books"}]
00000000797.jpg
797_evjvqa
[{"from": "human", "value": "who are playing with the child?\n<image>"}, {"from": "gpt", "value": "his parents"}]
00000000202.jpg
202_evjvqa
[{"from": "human", "value": "which hand is the girl sitting next to the girl in purple wearing a watch?\n<image>"}, {"from": "gpt", "value": "left hand"}]
00000000202.jpg
202_evjvqa
[{"from": "human", "value": "how many girls are sitting at the table with the girl in white?\n<image>"}, {"from": "gpt", "value": "three"}]
00000000202.jpg
202_evjvqa
[{"from": "human", "value": "how many people are there sitting on the same table with the purple shirt girl but not arriving?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000000205.jpg
205_evjvqa
[{"from": "human", "value": "what scene does this painting depict?\n<image>"}, {"from": "gpt", "value": "depicts a street crowded with people and cars"}]
00000000205.jpg
205_evjvqa
[{"from": "human", "value": "where is the scene in this picture drawn?\n<image>"}, {"from": "gpt", "value": "on a bustling street with an endless stream of passers-by in a city"}]