image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000004329.jpg | 4329_evjvqa | [{"from": "human", "value": "what is this large yard area used for?\n<image>"}, {"from": "gpt", "value": "to walk"}] |
00000004332.jpg | 4332_evjvqa | [{"from": "human", "value": "how many employees are going out of the store?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004337.jpg | 4337_evjvqa | [{"from": "human", "value": "what item is this man going to get?\n<image>"}, {"from": "gpt", "value": "the box of prepared food"}] |
00000004337.jpg | 4337_evjvqa | [{"from": "human", "value": "which stall is this man shopping at?\n<image>"}, {"from": "gpt", "value": "the ready-to-eat food stall"}] |
00000004337.jpg | 4337_evjvqa | [{"from": "human", "value": "where is the man going shopping?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}] |
00000004341.jpg | 4341_evjvqa | [{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "on the pedestal of a wall"}] |
00000004341.jpg | 4341_evjvqa | [{"from": "human", "value": "what do people put on the wall?\n<image>"}, {"from": "gpt", "value": "there are three mirrors"}] |
00000004342.jpg | 4342_evjvqa | [{"from": "human", "value": "what is the man's bike riding on the right hand side selling?\n<image>"}, {"from": "gpt", "value": "selling all kinds of handicrafts made from bamboo"}] |
00000004342.jpg | 4342_evjvqa | [{"from": "human", "value": "what does the woman in a conical hat driving a bicycle on the left sell?\n<image>"}, {"from": "gpt", "value": "plastic utensils"}] |
00000004342.jpg | 4342_evjvqa | [{"from": "human", "value": "what hats do cyclists wear?\n<image>"}, {"from": "gpt", "value": "the conical hats"}] |
00000004342.jpg | 4342_evjvqa | [{"from": "human", "value": "how many people are riding bicycle?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004346.jpg | 4346_evjvqa | [{"from": "human", "value": "what store is the woman in the black hat standing at?\n<image>"}, {"from": "gpt", "value": "the clothes store"}] |
00000004346.jpg | 4346_evjvqa | [{"from": "human", "value": "how many people are visiting the clothes store?\n<image>"}, {"from": "gpt", "value": "there is the woman in black hat"}] |
00000004346.jpg | 4346_evjvqa | [{"from": "human", "value": "what item is the woman going to see?\n<image>"}, {"from": "gpt", "value": "clothes"}] |
00000004347.jpg | 4347_evjvqa | [{"from": "human", "value": "how is the street decorated?\n<image>"}, {"from": "gpt", "value": "the lanterns"}] |
00000004347.jpg | 4347_evjvqa | [{"from": "human", "value": "where are these lanterns decorated?\n<image>"}, {"from": "gpt", "value": "in the upper space of the pedestrian street"}] |
00000004347.jpg | 4347_evjvqa | [{"from": "human", "value": "how many people are using the conical hat?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004349.jpg | 4349_evjvqa | [{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "he is speaking at a conference"}] |
00000004350.jpg | 4350_evjvqa | [{"from": "human", "value": "what are children doing?\n<image>"}, {"from": "gpt", "value": "dancing and singing"}] |
00000004351.jpg | 4351_evjvqa | [{"from": "human", "value": "what is the woman in red doing?\n<image>"}, {"from": "gpt", "value": "choosing fruit"}] |
00000004351.jpg | 4351_evjvqa | [{"from": "human", "value": "how many people are standing at the fruit stall?\n<image>"}, {"from": "gpt", "value": "five"}] |
00000004351.jpg | 4351_evjvqa | [{"from": "human", "value": "who is the boy sitting in the trolley going in the supermarket with?\n<image>"}, {"from": "gpt", "value": "the woman in the white shirt"}] |
00000004351.jpg | 4351_evjvqa | [{"from": "human", "value": "how many people are standing nearby wearing masks?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000004354.jpg | 4354_evjvqa | [{"from": "human", "value": "what is the person doing?\n<image>"}, {"from": "gpt", "value": "taking the medicine from the vaccine vial"}] |
00000004356.jpg | 4356_evjvqa | [{"from": "human", "value": "what are these two women doing?\n<image>"}, {"from": "gpt", "value": "they are peeling the fruit"}] |
00000004356.jpg | 4356_evjvqa | [{"from": "human", "value": "what do these two women do?\n<image>"}, {"from": "gpt", "value": "selling fruit"}] |
00000004356.jpg | 4356_evjvqa | [{"from": "human", "value": "where are these two women sitting?\n<image>"}, {"from": "gpt", "value": "at the fruit stall"}] |
00000004358.jpg | 4358_evjvqa | [{"from": "human", "value": "what is the woman in the green shirt doing?\n<image>"}, {"from": "gpt", "value": "checking the box of goods"}] |
00000004358.jpg | 4358_evjvqa | [{"from": "human", "value": "who is the man in the green shirt sitting with?\n<image>"}, {"from": "gpt", "value": "the woman in the green shirt"}] |
00000004358.jpg | 4358_evjvqa | [{"from": "human", "value": "how many people are sitting around the table?\n<image>"}, {"from": "gpt", "value": "three "}] |
00000004360.jpg | 4360_evjvqa | [{"from": "human", "value": "how many beds are there in this room?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004362.jpg | 4362_evjvqa | [{"from": "human", "value": "how many teachers take these children to visit historical monument?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000004363.jpg | 4363_evjvqa | [{"from": "human", "value": "what is the woman have the son sitting in the stroller buying?\n<image>"}, {"from": "gpt", "value": "american beef"}] |
00000004364.jpg | 4364_evjvqa | [{"from": "human", "value": "what job does this man do?\n<image>"}, {"from": "gpt", "value": "he is a worker cleaning the streets"}] |
00000004364.jpg | 4364_evjvqa | [{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "he is pushing a garbage truck on the street"}] |
00000004364.jpg | 4364_evjvqa | [{"from": "human", "value": "where is the man going?\n<image>"}, {"from": "gpt", "value": "he is cleaning the streets"}] |
00000004367.jpg | 4367_evjvqa | [{"from": "human", "value": "how many people riding motorcycles wearing the helmets?\n<image>"}, {"from": "gpt", "value": "only the man"}] |
00000004367.jpg | 4367_evjvqa | [{"from": "human", "value": "what color shirt does the man riding a motorcycle wearing a helmet wear?\n<image>"}, {"from": "gpt", "value": "white"}] |
00000004369.jpg | 4369_evjvqa | [{"from": "human", "value": "what is the girl in pink ao dai holding in her hand?\n<image>"}, {"from": "gpt", "value": "she is holding a bouquet of fresh flowers"}] |
00000004369.jpg | 4369_evjvqa | [{"from": "human", "value": "where is the girl holding a bouquet of flowers standing?\n<image>"}, {"from": "gpt", "value": "she is standing on a busy street"}] |
00000004377.jpg | 4377_evjvqa | [{"from": "human", "value": "where does this water stall sell?\n<image>"}, {"from": "gpt", "value": "on the railway"}] |
00000004377.jpg | 4377_evjvqa | [{"from": "human", "value": "where are these people sitting and drinking water?\n<image>"}, {"from": "gpt", "value": "on the railway "}] |
00000004379.jpg | 4379_evjvqa | [{"from": "human", "value": "where is the girl in black shirt standing?\n<image>"}, {"from": "gpt", "value": "on the book street"}] |
00000004380.jpg | 4380_evjvqa | [{"from": "human", "value": "what items does this stall sell?\n<image>"}, {"from": "gpt", "value": "types of scarve"}] |
00000004380.jpg | 4380_evjvqa | [{"from": "human", "value": "how many people come to buy scarves?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000004380.jpg | 4380_evjvqa | [{"from": "human", "value": "what is the woman selling scarves doing?\n<image>"}, {"from": "gpt", "value": "returning excess money to customers"}] |
00000004380.jpg | 4380_evjvqa | [{"from": "human", "value": "what are these three women buying?\n<image>"}, {"from": "gpt", "value": "scarves"}] |
00000004381.jpg | 4381_evjvqa | [{"from": "human", "value": "what is the boy in orange doing?\n<image>"}, {"from": "gpt", "value": "he is walking the dog"}] |
00000004381.jpg | 4381_evjvqa | [{"from": "human", "value": "how many motorbikes are running behind the boy?\n<image>"}, {"from": "gpt", "value": "only one"}] |
00000004381.jpg | 4381_evjvqa | [{"from": "human", "value": "who is the boy in the orange shirt walking with?\n<image>"}, {"from": "gpt", "value": "the dog"}] |
00000004381.jpg | 4381_evjvqa | [{"from": "human", "value": "on which side of the road do people park their cars?\n<image>"}, {"from": "gpt", "value": "on the left"}] |
00000004382.jpg | 4382_evjvqa | [{"from": "human", "value": "what is this man carrying on his back?\n<image>"}, {"from": "gpt", "value": "the guitar"}] |
00000004382.jpg | 4382_evjvqa | [{"from": "human", "value": "what is the man doing? \n<image>"}, {"from": "gpt", "value": "he is burning incense and worshiping"}] |
00000004382.jpg | 4382_evjvqa | [{"from": "human", "value": "where is this man going sightseeing?\n<image>"}, {"from": "gpt", "value": "in the ancient pagoda"}] |
00000004389.jpg | 4389_evjvqa | [{"from": "human", "value": "how many ships are parked on the water surface?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004395.jpg | 4395_evjvqa | [{"from": "human", "value": "what do people do at these stalls?\n<image>"}, {"from": "gpt", "value": "to pay"}] |
00000009800.jpg | 9800_evjvqa | [{"from": "human", "value": "how many people are walking on the street?\n<image>"}, {"from": "gpt", "value": "there is one person walking on the street"}] |
00000009801.jpg | 9801_evjvqa | [{"from": "human", "value": "how many people are standing in front of the blue house?\n<image>"}, {"from": "gpt", "value": "there is one person standing in front of the blue house"}] |
00000009807.jpg | 9807_evjvqa | [{"from": "human", "value": "what is above the painting?\n<image>"}, {"from": "gpt", "value": "above the painting is an air conditioner"}] |
00000009808.jpg | 9808_evjvqa | [{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "the man is watching a medicine bottle on the hand of the girl"}] |
00000009809.jpg | 9809_evjvqa | [{"from": "human", "value": "what is the main building material of this house?\n<image>"}, {"from": "gpt", "value": "the main building material of this house is wood"}] |
00000009811.jpg | 9811_evjvqa | [{"from": "human", "value": "what is the girl sitting leaning against?\n<image>"}, {"from": "gpt", "value": "the girl is sitting leaning against the lamppost"}] |
00000009812.jpg | 9812_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a floating market"}] |
00000009813.jpg | 9813_evjvqa | [{"from": "human", "value": "where is the woman sitting?\n<image>"}, {"from": "gpt", "value": "the woman is sitting at a seafood stall"}] |
00000009814.jpg | 9814_evjvqa | [{"from": "human", "value": "how many buses are in front of the motorbike taxi driver?\n<image>"}, {"from": "gpt", "value": "there is one bus in front of the motorbike taxi driver"}] |
00000009818.jpg | 9818_evjvqa | [{"from": "human", "value": "what color are the light bulbs on the tower by the lake?\n<image>"}, {"from": "gpt", "value": "the light bulbs are yellow"}] |
00000009819.jpg | 9819_evjvqa | [{"from": "human", "value": "how many people are shopping in the supermarket?\n<image>"}, {"from": "gpt", "value": "there are three people shopping in the supermarket"}] |
00000009820.jpg | 9820_evjvqa | [{"from": "human", "value": "what is the man holding in his hand?\n<image>"}, {"from": "gpt", "value": "the man is holding an apple in his hand"}] |
00000009822.jpg | 9822_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a war museum"}] |
00000009824.jpg | 9824_evjvqa | [{"from": "human", "value": "what color is the taxi?\n<image>"}, {"from": "gpt", "value": "the taxi is brown"}] |
00000009825.jpg | 9825_evjvqa | [{"from": "human", "value": "what color shirt is the man wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing yellow shirt"}] |
00000009826.jpg | 9826_evjvqa | [{"from": "human", "value": "how many people are shopping?\n<image>"}, {"from": "gpt", "value": "there are two people shopping"}] |
00000009827.jpg | 9827_evjvqa | [{"from": "human", "value": "how many paintings are hanged on the wall?\n<image>"}, {"from": "gpt", "value": "there are two paintings hanged on the wall"}] |
00000009828.jpg | 9828_evjvqa | [{"from": "human", "value": "what color is the truck parked on the roadside?\n<image>"}, {"from": "gpt", "value": "the truck is red"}] |
00000009832.jpg | 9832_evjvqa | [{"from": "human", "value": "where is the man standing next to the cyclo looking at?\n<image>"}, {"from": "gpt", "value": "the man is looking at a building across the street"}] |
00000009833.jpg | 9833_evjvqa | [{"from": "human", "value": "what scenery does the painting portray?\n<image>"}, {"from": "gpt", "value": "the painting portrays the river countryside scenery"}] |
00000009835.jpg | 9835_evjvqa | [{"from": "human", "value": "what color shirt is the man wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing white shirt"}] |
00000009837.jpg | 9837_evjvqa | [{"from": "human", "value": "how many young girls are being awarded?\n<image>"}, {"from": "gpt", "value": "there are two young girls being awarded"}] |
00000009839.jpg | 9839_evjvqa | [{"from": "human", "value": "what color shirt is the man standing wearing?\n<image>"}, {"from": "gpt", "value": "the man standing is wearing white shirt"}] |
00000009841.jpg | 9841_evjvqa | [{"from": "human", "value": "what color is the roof of the pagoda?\n<image>"}, {"from": "gpt", "value": "the roof of the pagoda is red"}] |
00000009845.jpg | 9845_evjvqa | [{"from": "human", "value": "how many women are in the room?\n<image>"}, {"from": "gpt", "value": "there are two women in the room"}] |
00000009849.jpg | 9849_evjvqa | [{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "everyone is visiting a temple"}] |
00000009852.jpg | 9852_evjvqa | [{"from": "human", "value": "what is the girl holding the cellphone doing?\n<image>"}, {"from": "gpt", "value": "the girl holding the cellphone is using the cellphone to scan the qr code"}] |
00000009853.jpg | 9853_evjvqa | [{"from": "human", "value": "what stall is the girl standing at?\n<image>"}, {"from": "gpt", "value": "the girl is standing at the dairy stall"}] |
00000009854.jpg | 9854_evjvqa | [{"from": "human", "value": "how many medical staffs are there?\n<image>"}, {"from": "gpt", "value": "there are two medical staffs"}] |
00000009857.jpg | 9857_evjvqa | [{"from": "human", "value": "what color is the light of the church?\n<image>"}, {"from": "gpt", "value": "the light of the church is yellow"}] |
00000009859.jpg | 9859_evjvqa | [{"from": "human", "value": "what stall is on the left of the woman in white shirt?\n<image>"}, {"from": "gpt", "value": "on the left of the woman is durian stall"}] |
00000009860.jpg | 9860_evjvqa | [{"from": "human", "value": "how many tables are decorated with plant pot?\n<image>"}, {"from": "gpt", "value": "there is one table decorated with plant pot"}] |
00000009865.jpg | 9865_evjvqa | [{"from": "human", "value": "what is this area selling?\n<image>"}, {"from": "gpt", "value": "this area is selling school supplies"}] |
00000009869.jpg | 9869_evjvqa | [{"from": "human", "value": "how many bikes are on the right of the young man in purple shirt?\n<image>"}, {"from": "gpt", "value": "there is one bike"}] |
00000009870.jpg | 9870_evjvqa | [{"from": "human", "value": "what color are the chairs?\n<image>"}, {"from": "gpt", "value": "the chairs are red"}] |
00000009871.jpg | 9871_evjvqa | [{"from": "human", "value": "how many mannequins are behind the woman?\n<image>"}, {"from": "gpt", "value": "there are four mannequins behind the woman"}] |
00000009873.jpg | 9873_evjvqa | [{"from": "human", "value": "what is the supermarket staff doing?\n<image>"}, {"from": "gpt", "value": "the supermarket staff is arranging the products"}] |
00000009874.jpg | 9874_evjvqa | [{"from": "human", "value": "what color shirt is the woman on the right of the little girl wearing?\n<image>"}, {"from": "gpt", "value": "the woman is wearing black shirt"}] |
00000009875.jpg | 9875_evjvqa | [{"from": "human", "value": "what color shirt is the man riding white motorbike wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing red shirt"}] |
00000009876.jpg | 9876_evjvqa | [{"from": "human", "value": "how many people are heading to the book fair?\n<image>"}, {"from": "gpt", "value": "there are two people heading to the book fair"}] |
00000009877.jpg | 9877_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market on the shore"}] |
00000009880.jpg | 9880_evjvqa | [{"from": "human", "value": "what is on the right of the man riding bicycle?\n<image>"}, {"from": "gpt", "value": "on the right of the man riding bicycle is a motorbike"}] |
00000009881.jpg | 9881_evjvqa | [{"from": "human", "value": "how many tea cups are there?\n<image>"}, {"from": "gpt", "value": "there are two tea cups"}] |
00000009882.jpg | 9882_evjvqa | [{"from": "human", "value": "what is everone playing?\n<image>"}, {"from": "gpt", "value": "everyone is going on the swing ride"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.