image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000001826.jpg | 1826_evjvqa | [{"from": "human", "value": "what is the young man in the white shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing vegetables"}] |
00000001826.jpg | 1826_evjvqa | [{"from": "human", "value": "what is the girl in the black shirt near the woman doing?\n<image>"}, {"from": "gpt", "value": "choosing vegetables"}] |
00000001826.jpg | 1826_evjvqa | [{"from": "human", "value": "how many people are standing near the vegetable counter?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001826.jpg | 1826_evjvqa | [{"from": "human", "value": "where is the young man in the white shirt standing?\n<image>"}, {"from": "gpt", "value": "vegetable counter"}] |
00000001828.jpg | 1828_evjvqa | [{"from": "human", "value": "what kinds of plants are planted on the pedestal near the bench?\n<image>"}, {"from": "gpt", "value": "cactus"}] |
00000001828.jpg | 1828_evjvqa | [{"from": "human", "value": "how many people are sitting on the bench?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001828.jpg | 1828_evjvqa | [{"from": "human", "value": "what is the woman holding on her hand?\n<image>"}, {"from": "gpt", "value": "conical hat"}] |
00000001828.jpg | 1828_evjvqa | [{"from": "human", "value": "how many women does the man sit with?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001828.jpg | 1828_evjvqa | [{"from": "human", "value": "who is the woman sitting next to?\n<image>"}, {"from": "gpt", "value": "a man wearing a blue shirt"}] |
00000001828.jpg | 1828_evjvqa | [{"from": "human", "value": "who is the man sitting next to?\n<image>"}, {"from": "gpt", "value": "a woman wearing a white shirt"}] |
00000001829.jpg | 1829_evjvqa | [{"from": "human", "value": "where is the woman with the basket on the back standing?\n<image>"}, {"from": "gpt", "value": "fabric stall"}] |
00000001829.jpg | 1829_evjvqa | [{"from": "human", "value": "how many women are carrying baskets on the back?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001829.jpg | 1829_evjvqa | [{"from": "human", "value": "what color scarf is the owner of the fabric stall wearing?\n<image>"}, {"from": "gpt", "value": "green"}] |
00000001831.jpg | 1831_evjvqa | [{"from": "human", "value": "where is the car parked?\n<image>"}, {"from": "gpt", "value": "in the house"}] |
00000001831.jpg | 1831_evjvqa | [{"from": "human", "value": "how many cars are parked in the house?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001831.jpg | 1831_evjvqa | [{"from": "human", "value": "how many cars are there in the house?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001832.jpg | 1832_evjvqa | [{"from": "human", "value": "who is the young man being photographed with?\n<image>"}, {"from": "gpt", "value": "a girl in white standing with the bicycle"}] |
00000001832.jpg | 1832_evjvqa | [{"from": "human", "value": "what means of transport is the girl using?\n<image>"}, {"from": "gpt", "value": "bicycle"}] |
00000001835.jpg | 1835_evjvqa | [{"from": "human", "value": "what are the counters in this area selling?\n<image>"}, {"from": "gpt", "value": "vegetables"}] |
00000001835.jpg | 1835_evjvqa | [{"from": "human", "value": "what products are sold in this area?\n<image>"}, {"from": "gpt", "value": "fresh vegetables"}] |
00000001835.jpg | 1835_evjvqa | [{"from": "human", "value": "what products are mainly sold in this area?\n<image>"}, {"from": "gpt", "value": "fresh vegetables"}] |
00000001836.jpg | 1836_evjvqa | [{"from": "human", "value": "how many clocks are hanging on the wall?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000001836.jpg | 1836_evjvqa | [{"from": "human", "value": "how many men are in the room?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001836.jpg | 1836_evjvqa | [{"from": "human", "value": "how many people are standing at the reception?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001843.jpg | 1843_evjvqa | [{"from": "human", "value": "how many people are upstairs?\n<image>"}, {"from": "gpt", "value": "no one is upstairs"}] |
00000001845.jpg | 1845_evjvqa | [{"from": "human", "value": "what stall is the man standing at?\n<image>"}, {"from": "gpt", "value": "dry food stall"}] |
00000001845.jpg | 1845_evjvqa | [{"from": "human", "value": "what is the man standing doing?\n<image>"}, {"from": "gpt", "value": "selling dry food"}] |
00000001845.jpg | 1845_evjvqa | [{"from": "human", "value": "how many people are buying dry food?\n<image>"}, {"from": "gpt", "value": "no one is buying"}] |
00000001846.jpg | 1846_evjvqa | [{"from": "human", "value": "what color shirt does the owner wear?\n<image>"}, {"from": "gpt", "value": "blue"}] |
00000001846.jpg | 1846_evjvqa | [{"from": "human", "value": "what is the owner doing?\n<image>"}, {"from": "gpt", "value": "preparing food to sell to customers"}] |
00000001846.jpg | 1846_evjvqa | [{"from": "human", "value": "who is the owner selling with?\n<image>"}, {"from": "gpt", "value": "the owner is selling alone"}] |
00000001851.jpg | 1851_evjvqa | [{"from": "human", "value": "what is the group of people wearing helmets doing?\n<image>"}, {"from": "gpt", "value": "viewing christmas decorations"}] |
00000001851.jpg | 1851_evjvqa | [{"from": "human", "value": "where is the group of people wearing helmets standing?\n<image>"}, {"from": "gpt", "value": "christmas decorations shop"}] |
00000001851.jpg | 1851_evjvqa | [{"from": "human", "value": "how many people is the young man wearing a helmet viewing the goods with?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001852.jpg | 1852_evjvqa | [{"from": "human", "value": "where are the farmers working?\n<image>"}, {"from": "gpt", "value": "on the field"}] |
00000001852.jpg | 1852_evjvqa | [{"from": "human", "value": "what are the farmers doing?\n<image>"}, {"from": "gpt", "value": "working in the field"}] |
00000001852.jpg | 1852_evjvqa | [{"from": "human", "value": "what kind of crops are the farmers growing?\n<image>"}, {"from": "gpt", "value": "rice"}] |
00000001857.jpg | 1857_evjvqa | [{"from": "human", "value": "how many people are sitting near the letters a and 1?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001863.jpg | 1863_evjvqa | [{"from": "human", "value": "where is the man in the blue shirt standing?\n<image>"}, {"from": "gpt", "value": "food court"}] |
00000001863.jpg | 1863_evjvqa | [{"from": "human", "value": "what is the man in the blue shirt with the white bag doing?\n<image>"}, {"from": "gpt", "value": "choosing food"}] |
00000001863.jpg | 1863_evjvqa | [{"from": "human", "value": "what color jacket does the food stall owner wear?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000001864.jpg | 1864_evjvqa | [{"from": "human", "value": "which side of the road is the motorbike with the girl sitting on parked?\n<image>"}, {"from": "gpt", "value": "right"}] |
00000001865.jpg | 1865_evjvqa | [{"from": "human", "value": "where does the woman wearing the red shirt go shopping?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}] |
00000001865.jpg | 1865_evjvqa | [{"from": "human", "value": "what counter is the woman wearing the red shirt looking at?\n<image>"}, {"from": "gpt", "value": "instant noodle counter"}] |
00000001865.jpg | 1865_evjvqa | [{"from": "human", "value": "how many supermarket staff are behind the woman?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001866.jpg | 1866_evjvqa | [{"from": "human", "value": "how many people are eating in the restaurant?\n<image>"}, {"from": "gpt", "value": "no one is eating in the restaurant yet"}] |
00000001868.jpg | 1868_evjvqa | [{"from": "human", "value": "how many people are sitting at the restaurant on the street corner?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001871.jpg | 1871_evjvqa | [{"from": "human", "value": "what is supermarket staff doing?\n<image>"}, {"from": "gpt", "value": "giving the item to the woman"}] |
00000001871.jpg | 1871_evjvqa | [{"from": "human", "value": "what counter is the woman standing at?\n<image>"}, {"from": "gpt", "value": "household counter"}] |
00000001871.jpg | 1871_evjvqa | [{"from": "human", "value": "who is the woman standing next to?\n<image>"}, {"from": "gpt", "value": "supermarket staff"}] |
00000001871.jpg | 1871_evjvqa | [{"from": "human", "value": "what color shirt is the woman standing next to the supermarket staff wearing?\n<image>"}, {"from": "gpt", "value": "brown"}] |
00000001871.jpg | 1871_evjvqa | [{"from": "human", "value": "how many people are viewing the products at the household counter?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001871.jpg | 1871_evjvqa | [{"from": "human", "value": "who does the woman viewing the products at the household counter go with?\n<image>"}, {"from": "gpt", "value": "she goes alone"}] |
00000001875.jpg | 1875_evjvqa | [{"from": "human", "value": "how many taxis carry passengers to the supermarket?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001875.jpg | 1875_evjvqa | [{"from": "human", "value": "how many people travel by motorbike?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001875.jpg | 1875_evjvqa | [{"from": "human", "value": "where is the guy in the yellow shirt going?\n<image>"}, {"from": "gpt", "value": "going inside the supermarket"}] |
00000001875.jpg | 1875_evjvqa | [{"from": "human", "value": "how many cars are parked outside the supermarket?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001876.jpg | 1876_evjvqa | [{"from": "human", "value": "what products does the woman in the pink shirt sell?\n<image>"}, {"from": "gpt", "value": "fresh products"}] |
00000001876.jpg | 1876_evjvqa | [{"from": "human", "value": "how many people does the woman in the pink shirt sell with?\n<image>"}, {"from": "gpt", "value": "the woman in the pink shirt sells alone"}] |
00000001876.jpg | 1876_evjvqa | [{"from": "human", "value": "how many people come to see the fresh stall of the woman in the pink shirt?\n<image>"}, {"from": "gpt", "value": "no one comes to see"}] |
00000001883.jpg | 1883_evjvqa | [{"from": "human", "value": "what is the man in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "interviewing"}] |
00000001883.jpg | 1883_evjvqa | [{"from": "human", "value": "what is the man in the computer screen doing?\n<image>"}, {"from": "gpt", "value": "is participating in the interview"}] |
00000001883.jpg | 1883_evjvqa | [{"from": "human", "value": "who is interviewing?\n<image>"}, {"from": "gpt", "value": "the man wearing the blue shirt"}] |
00000001883.jpg | 1883_evjvqa | [{"from": "human", "value": "how many people is the man interviewing now?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001890.jpg | 1890_evjvqa | [{"from": "human", "value": "what means of transport are the people wearing blue shirts traveling by?\n<image>"}, {"from": "gpt", "value": "by cyclo"}] |
00000001892.jpg | 1892_evjvqa | [{"from": "human", "value": "what products does this store sell?\n<image>"}, {"from": "gpt", "value": "fashion clothes"}] |
00000001892.jpg | 1892_evjvqa | [{"from": "human", "value": "how many people are coming to buy clothes?\n<image>"}, {"from": "gpt", "value": "no one is coming to buy"}] |
00000001892.jpg | 1892_evjvqa | [{"from": "human", "value": "how many customers are inside the clothing store?\n<image>"}, {"from": "gpt", "value": "no customers"}] |
00000001894.jpg | 1894_evjvqa | [{"from": "human", "value": "how many paintings are placed on the podium?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001894.jpg | 1894_evjvqa | [{"from": "human", "value": "how many paintings are not placed on the podium?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001894.jpg | 1894_evjvqa | [{"from": "human", "value": "how many people stand on either side of the painting not on the podium?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001894.jpg | 1894_evjvqa | [{"from": "human", "value": "how many paintings are on display?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001895.jpg | 1895_evjvqa | [{"from": "human", "value": "what counter is the woman next to the trolley standing at?\n<image>"}, {"from": "gpt", "value": "vegetable counter"}] |
00000001895.jpg | 1895_evjvqa | [{"from": "human", "value": "what is the woman in the orange shirt doing?\n<image>"}, {"from": "gpt", "value": "going to buy vegetables"}] |
00000001895.jpg | 1895_evjvqa | [{"from": "human", "value": "how many items are placed on the trolley of the woman in the orange shirt?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004300.jpg | 4300_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is the historical monument"}] |
00000004303.jpg | 4303_evjvqa | [{"from": "human", "value": "what is the woman in the black and white striped shirt doing?\n<image>"}, {"from": "gpt", "value": "she is going to the supermarket to buy fish"}] |
00000004303.jpg | 4303_evjvqa | [{"from": "human", "value": "what is the woman in blue doing?\n<image>"}, {"from": "gpt", "value": "she is selling fish to a woman in the white shirt with black stripes"}] |
00000004303.jpg | 4303_evjvqa | [{"from": "human", "value": "where is the woman in blue standing?\n<image>"}, {"from": "gpt", "value": "at the raw fish stall in the supermarket"}] |
00000004303.jpg | 4303_evjvqa | [{"from": "human", "value": "where is the woman in white shirt with black striped standing?\n<image>"}, {"from": "gpt", "value": "at the fish stall"}] |
00000004304.jpg | 4304_evjvqa | [{"from": "human", "value": "where are these people gathered?\n<image>"}, {"from": "gpt", "value": "in the courtyard in front of a house"}] |
00000004308.jpg | 4308_evjvqa | [{"from": "human", "value": "what color is this building mainly painted?\n<image>"}, {"from": "gpt", "value": "yellow"}] |
00000004309.jpg | 4309_evjvqa | [{"from": "human", "value": "what does this stroller contain mainly?\n<image>"}, {"from": "gpt", "value": "vegetable and fruit"}] |
00000004314.jpg | 4314_evjvqa | [{"from": "human", "value": "how many motorbikes are parked in front of the house at the beginning of the alley?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000004316.jpg | 4316_evjvqa | [{"from": "human", "value": "where is the incense burner?\n<image>"}, {"from": "gpt", "value": "in the front of the yard"}] |
00000004320.jpg | 4320_evjvqa | [{"from": "human", "value": "what are the man and woman standing at the front of the line doing?\n<image>"}, {"from": "gpt", "value": "using the phone"}] |
00000004321.jpg | 4321_evjvqa | [{"from": "human", "value": "what is this area decorated with?\n<image>"}, {"from": "gpt", "value": "food"}] |
00000004321.jpg | 4321_evjvqa | [{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "the food stall"}] |
00000004321.jpg | 4321_evjvqa | [{"from": "human", "value": "what does this woman in yellow do?\n<image>"}, {"from": "gpt", "value": "selling food"}] |
00000004322.jpg | 4322_evjvqa | [{"from": "human", "value": "what do people go to this area for?\n<image>"}, {"from": "gpt", "value": "to buy food"}] |
00000004322.jpg | 4322_evjvqa | [{"from": "human", "value": "what are the stalls in this area selling mainly?\n<image>"}, {"from": "gpt", "value": "food"}] |
00000004323.jpg | 4323_evjvqa | [{"from": "human", "value": "what is this girl wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a white ao dai"}] |
00000004323.jpg | 4323_evjvqa | [{"from": "human", "value": "where is the girl standing?\n<image>"}, {"from": "gpt", "value": "she is standing on an empty street"}] |
00000004323.jpg | 4323_evjvqa | [{"from": "human", "value": "người phụ nữ đội nón lá đi ở phía sau cô gái đang gánh gì?\n<image>"}, {"from": "gpt", "value": "the yoke"}] |
00000004326.jpg | 4326_evjvqa | [{"from": "human", "value": "what are these two young people doing?\n<image>"}, {"from": "gpt", "value": "they are using phone scan qr code"}] |
00000004326.jpg | 4326_evjvqa | [{"from": "human", "value": "what are you girls using your phone for?\n<image>"}, {"from": "gpt", "value": "to scan the qr code"}] |
00000004326.jpg | 4326_evjvqa | [{"from": "human", "value": "who is the female friend standing at the store with?\n<image>"}, {"from": "gpt", "value": "the boy is wearing black jacket"}] |
00000004326.jpg | 4326_evjvqa | [{"from": "human", "value": "where does this man take the girl?\n<image>"}, {"from": "gpt", "value": "going into the store"}] |
00000004326.jpg | 4326_evjvqa | [{"from": "human", "value": "where is the paper containing the qr code pasted?\n<image>"}, {"from": "gpt", "value": "on the the floor"}] |
00000004328.jpg | 4328_evjvqa | [{"from": "human", "value": "how is the weather?\n<image>"}, {"from": "gpt", "value": "it just rain"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.