image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000002984.jpg | 2984_evjvqa | [{"from": "human", "value": "what is the man in the suit with the blue tie looking at?\n<image>"}, {"from": "gpt", "value": "vegetable on the shelves"}] |
00000002984.jpg | 2984_evjvqa | [{"from": "human", "value": "what is the person behind the man in the suit with the blue tie wearing?\n<image>"}, {"from": "gpt", "value": "he is wearing a suit with the pink tie"}] |
00000002984.jpg | 2984_evjvqa | [{"from": "human", "value": "what shirt is the man wearing the glasses?\n<image>"}, {"from": "gpt", "value": "the vertical stripe shirt"}] |
00000002986.jpg | 2986_evjvqa | [{"from": "human", "value": "which product is the stall selling?\n<image>"}, {"from": "gpt", "value": "the brocade products"}] |
00000002986.jpg | 2986_evjvqa | [{"from": "human", "value": "what does the woman in the white overcoat wear on her waist?\n<image>"}, {"from": "gpt", "value": "the red handbag"}] |
00000002986.jpg | 2986_evjvqa | [{"from": "human", "value": "what do the pants that the man in the long-sleeve shirt wear look like?\n<image>"}, {"from": "gpt", "value": "brown"}] |
00000002992.jpg | 2992_evjvqa | [{"from": "human", "value": "what is the man in white shirt with red tie doing?\n<image>"}, {"from": "gpt", "value": "he is speaking at the conference"}] |
00000002993.jpg | 2993_evjvqa | [{"from": "human", "value": "what is the girl in the yellow suit wearing on her face?\n<image>"}, {"from": "gpt", "value": "the sunglasses"}] |
00000002993.jpg | 2993_evjvqa | [{"from": "human", "value": "what color dress is the girl with sunglasses standing on the wooden surface?\n<image>"}, {"from": "gpt", "value": "yellow"}] |
00000002993.jpg | 2993_evjvqa | [{"from": "human", "value": "what pink things are hanging across the street below the block?\n<image>"}, {"from": "gpt", "value": "the lanterns"}] |
00000002998.jpg | 2998_evjvqa | [{"from": "human", "value": "who is sitting in the front seat of the antique fan shop?\n<image>"}, {"from": "gpt", "value": "the woman is holding a baby"}] |
00000002999.jpg | 2999_evjvqa | [{"from": "human", "value": "what color is this supermarket trolley painted?\n<image>"}, {"from": "gpt", "value": "red"}] |
00000002999.jpg | 2999_evjvqa | [{"from": "human", "value": "what is the man in the dark gray overcoat holding?\n<image>"}, {"from": "gpt", "value": "the handle of the bag"}] |
00000002999.jpg | 2999_evjvqa | [{"from": "human", "value": "who is the man in gray black overcoat standing with?\n<image>"}, {"from": "gpt", "value": "with the woman in black jacket"}] |
00000001002.jpg | 1002_evjvqa | [{"from": "human", "value": "where are these products sold?\n<image>"}, {"from": "gpt", "value": "in a grocery store"}] |
00000001002.jpg | 1002_evjvqa | [{"from": "human", "value": "where is this scene recored?\n<image>"}, {"from": "gpt", "value": "in a corner of the grocery store"}] |
00000001003.jpg | 1003_evjvqa | [{"from": "human", "value": "where are the boats docked?\n<image>"}, {"from": "gpt", "value": "on the water"}] |
00000001003.jpg | 1003_evjvqa | [{"from": "human", "value": "how many empty boats are docked on the water?\n<image>"}, {"from": "gpt", "value": "four empty boats are docked on the water"}] |
00000001008.jpg | 1008_evjvqa | [{"from": "human", "value": "how many motorbikes are running outside the market?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000001009.jpg | 1009_evjvqa | [{"from": "human", "value": "what does this store mainly sell?\n<image>"}, {"from": "gpt", "value": "mainly fabric"}] |
00000001010.jpg | 1010_evjvqa | [{"from": "human", "value": "where are the two women?\n<image>"}, {"from": "gpt", "value": "at a supermarket"}] |
00000001010.jpg | 1010_evjvqa | [{"from": "human", "value": "what are the two women doing?\n<image>"}, {"from": "gpt", "value": "going in the supermarket"}] |
00000001010.jpg | 1010_evjvqa | [{"from": "human", "value": "which department are the two women standing at?\n<image>"}, {"from": "gpt", "value": "the chewing gum department"}] |
00000001011.jpg | 1011_evjvqa | [{"from": "human", "value": "what is the woman in black coat standing near the one in pink shirt doing?\n<image>"}, {"from": "gpt", "value": "serving food to the customers"}] |
00000001012.jpg | 1012_evjvqa | [{"from": "human", "value": "where is this house built?\n<image>"}, {"from": "gpt", "value": "at the edge of an ocean"}] |
00000001014.jpg | 1014_evjvqa | [{"from": "human", "value": "how many vehicles are going into the restaurant?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001015.jpg | 1015_evjvqa | [{"from": "human", "value": "how many people are counting money?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000001015.jpg | 1015_evjvqa | [{"from": "human", "value": "how many employees are standing at the cashier counter?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001015.jpg | 1015_evjvqa | [{"from": "human", "value": "how many people are standing at the cashier counter?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000001015.jpg | 1015_evjvqa | [{"from": "human", "value": "what is the man in white shirt next to the trolley doing?\n<image>"}, {"from": "gpt", "value": "charging"}] |
00000001015.jpg | 1015_evjvqa | [{"from": "human", "value": "where is the man in white shirt next to the trolley standing?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}] |
00000001021.jpg | 1021_evjvqa | [{"from": "human", "value": "what color is the open car?\n<image>"}, {"from": "gpt", "value": "the open car is orange"}] |
00000001023.jpg | 1023_evjvqa | [{"from": "human", "value": "what items are sold in this area?\n<image>"}, {"from": "gpt", "value": "fashion clothing"}] |
00000001023.jpg | 1023_evjvqa | [{"from": "human", "value": "where is the woman in pink and white striped shirt next to the man in black shirt with the backpack?\n<image>"}, {"from": "gpt", "value": "in the market"}] |
00000001023.jpg | 1023_evjvqa | [{"from": "human", "value": "where is the woman in pink and white striped shirt next to the man in black shirt with the backpack?\n<image>"}, {"from": "gpt", "value": "at the clothing stall"}] |
00000001024.jpg | 1024_evjvqa | [{"from": "human", "value": "what stall are these people standing at?\n<image>"}, {"from": "gpt", "value": "the vegetable stall"}] |
00000001024.jpg | 1024_evjvqa | [{"from": "human", "value": "how many people are standing at the vegetable stall?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000001024.jpg | 1024_evjvqa | [{"from": "human", "value": "how many people in blue shirts are standing at the vegetable stall?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001024.jpg | 1024_evjvqa | [{"from": "human", "value": "how many people in orange shirts are standing at the vegetable stall?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001026.jpg | 1026_evjvqa | [{"from": "human", "value": "where are these two people going?\n<image>"}, {"from": "gpt", "value": "these two are going on a mountainside"}] |
00000001026.jpg | 1026_evjvqa | [{"from": "human", "value": "how many people are walking?\n<image>"}, {"from": "gpt", "value": "two people are walking"}] |
00000001028.jpg | 1028_evjvqa | [{"from": "human", "value": "where are these people standing at?\n<image>"}, {"from": "gpt", "value": "in a forest"}] |
00000001028.jpg | 1028_evjvqa | [{"from": "human", "value": "how many people in pink shirts are standing?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001032.jpg | 1032_evjvqa | [{"from": "human", "value": "where is the woman in yellow shirt standing?\n<image>"}, {"from": "gpt", "value": "a market"}] |
00000001032.jpg | 1032_evjvqa | [{"from": "human", "value": "which stall is the woman in yellow shirt standing at?\n<image>"}, {"from": "gpt", "value": "butcher stall"}] |
00000001032.jpg | 1032_evjvqa | [{"from": "human", "value": "how many people come to buy at the butcher stall?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001032.jpg | 1032_evjvqa | [{"from": "human", "value": "what stall is next to the butcher stall?\n<image>"}, {"from": "gpt", "value": "vegetables stall"}] |
00000001032.jpg | 1032_evjvqa | [{"from": "human", "value": "what is the woman at the vegetables stall doing?\n<image>"}, {"from": "gpt", "value": "cutting pumpkin to sell to a customer"}] |
00000001033.jpg | 1033_evjvqa | [{"from": "human", "value": "where are these tourists visiting?\n<image>"}, {"from": "gpt", "value": "a historical site"}] |
00000001033.jpg | 1033_evjvqa | [{"from": "human", "value": "what is this group of people lining up for?\n<image>"}, {"from": "gpt", "value": "for preparing to enter a tunnel"}] |
00000001033.jpg | 1033_evjvqa | [{"from": "human", "value": "where is this group of people going?\n<image>"}, {"from": "gpt", "value": "into a tunnel"}] |
00000001034.jpg | 1034_evjvqa | [{"from": "human", "value": "what is the woman in white shirt doing?\n<image>"}, {"from": "gpt", "value": "going in the supermarket"}] |
00000001034.jpg | 1034_evjvqa | [{"from": "human", "value": "what department is the woman in yellow shirt standing at?\n<image>"}, {"from": "gpt", "value": "the department selling washing powder, softener"}] |
00000001034.jpg | 1034_evjvqa | [{"from": "human", "value": "what color is the shirt of the woman pushing the trolley in the supermarket?\n<image>"}, {"from": "gpt", "value": "yellow"}] |
00000001042.jpg | 1042_evjvqa | [{"from": "human", "value": "where is this road built?\n<image>"}, {"from": "gpt", "value": "along a river"}] |
00000001043.jpg | 1043_evjvqa | [{"from": "human", "value": "where are the students sitting?\n<image>"}, {"from": "gpt", "value": "in an antique house"}] |
00000001044.jpg | 1044_evjvqa | [{"from": "human", "value": "how many cyclo riders does this picture draw?\n<image>"}, {"from": "gpt", "value": "this picture draws one cyclo rider"}] |
00000001047.jpg | 1047_evjvqa | [{"from": "human", "value": "how many people are going out?\n<image>"}, {"from": "gpt", "value": "two people are going out"}] |
00000001047.jpg | 1047_evjvqa | [{"from": "human", "value": "where are these two people going?\n<image>"}, {"from": "gpt", "value": "on a road in the village"}] |
00000001047.jpg | 1047_evjvqa | [{"from": "human", "value": "how many people are walking?\n<image>"}, {"from": "gpt", "value": "two people are walking"}] |
00000001049.jpg | 1049_evjvqa | [{"from": "human", "value": "how many people are sitting on the balcony?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001049.jpg | 1049_evjvqa | [{"from": "human", "value": "how many women are sitting on the balcony?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001049.jpg | 1049_evjvqa | [{"from": "human", "value": "how many people is the woman on white shirt sitting with?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001049.jpg | 1049_evjvqa | [{"from": "human", "value": "how many people does the woman wearing glasses go to drink with?\n<image>"}, {"from": "gpt", "value": "one person"}] |
00000001056.jpg | 1056_evjvqa | [{"from": "human", "value": "what color is the restaurant name decorated with?\n<image>"}, {"from": "gpt", "value": "yellow"}] |
00000001057.jpg | 1057_evjvqa | [{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "a lantern shop"}] |
00000001057.jpg | 1057_evjvqa | [{"from": "human", "value": "what is the woman holding in her hand?\n<image>"}, {"from": "gpt", "value": "a paper fan"}] |
00000001057.jpg | 1057_evjvqa | [{"from": "human", "value": "what is the woman with a paper fan doing?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}] |
00000001058.jpg | 1058_evjvqa | [{"from": "human", "value": "how many people does the woman carrying the handbag walk with?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000001058.jpg | 1058_evjvqa | [{"from": "human", "value": "how many people does the man in red shirt go with?\n<image>"}, {"from": "gpt", "value": "three"}] |
00000001058.jpg | 1058_evjvqa | [{"from": "human", "value": "how many people are walking on the street?\n<image>"}, {"from": "gpt", "value": "four"}] |
00000001059.jpg | 1059_evjvqa | [{"from": "human", "value": "where are the two women standing?\n<image>"}, {"from": "gpt", "value": "in front of the village gate"}] |
00000001059.jpg | 1059_evjvqa | [{"from": "human", "value": "how many women are standing in front of the village gate?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001059.jpg | 1059_evjvqa | [{"from": "human", "value": "how many women are standing on the street?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001063.jpg | 1063_evjvqa | [{"from": "human", "value": "how many people are carrying peddlers in the picture?\n<image>"}, {"from": "gpt", "value": "the picture shows one woman carrying peddlers"}] |
00000001063.jpg | 1063_evjvqa | [{"from": "human", "value": "how many trees at the street corner does this picture show?\n<image>"}, {"from": "gpt", "value": "this picture shows one treet at the street corner"}] |
00000001064.jpg | 1064_evjvqa | [{"from": "human", "value": "where is the man and the woman going?\n<image>"}, {"from": "gpt", "value": "a museum"}] |
00000001064.jpg | 1064_evjvqa | [{"from": "human", "value": "where is the man and the woman?\n<image>"}, {"from": "gpt", "value": "in a museum"}] |
00000001064.jpg | 1064_evjvqa | [{"from": "human", "value": "how many people are walking on the exhibition corridor area?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001064.jpg | 1064_evjvqa | [{"from": "human", "value": "what are the man and the woman doing?\n<image>"}, {"from": "gpt", "value": "visiting the exhibition area"}] |
00000001067.jpg | 1067_evjvqa | [{"from": "human", "value": "what stall is the man standing at?\n<image>"}, {"from": "gpt", "value": "a vegetable stall"}] |
00000001067.jpg | 1067_evjvqa | [{"from": "human", "value": "what is the color of the shirt the man wearing?\n<image>"}, {"from": "gpt", "value": "blue"}] |
00000001071.jpg | 1071_evjvqa | [{"from": "human", "value": "what products does this shelf mainly sell?\n<image>"}, {"from": "gpt", "value": "vegetables"}] |
00000001071.jpg | 1071_evjvqa | [{"from": "human", "value": "what products do the bottom two compartments of the shelf sell?\n<image>"}, {"from": "gpt", "value": "fresh vegetables"}] |
00000001071.jpg | 1071_evjvqa | [{"from": "human", "value": "what products are sold in the third compartment from the bottom?\n<image>"}, {"from": "gpt", "value": "tubers, fruits"}] |
00000001072.jpg | 1072_evjvqa | [{"from": "human", "value": "how many people does each cyclo carry?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001076.jpg | 1076_evjvqa | [{"from": "human", "value": "how many people in front of the restaurant is the girl sitting with?\n<image>"}, {"from": "gpt", "value": "the girl is sitting alone"}] |
00000001077.jpg | 1077_evjvqa | [{"from": "human", "value": "how many people are in front of the porch?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001077.jpg | 1077_evjvqa | [{"from": "human", "value": "how many canoes are on the river?\n<image>"}, {"from": "gpt", "value": "one"}] |
00000001077.jpg | 1077_evjvqa | [{"from": "human", "value": "what are the man and the woman in conical hats doing?\n<image>"}, {"from": "gpt", "value": "taking care of ornamental plants"}] |
00000001077.jpg | 1077_evjvqa | [{"from": "human", "value": "what are placed in front of the porch?\n<image>"}, {"from": "gpt", "value": "ornamental plant pots"}] |
00000001078.jpg | 1078_evjvqa | [{"from": "human", "value": "when is this scene taken on the street corner?\n<image>"}, {"from": "gpt", "value": "evening"}] |
00000001079.jpg | 1079_evjvqa | [{"from": "human", "value": "what are these two people doing?\n<image>"}, {"from": "gpt", "value": "canoeing on the sea"}] |
00000001079.jpg | 1079_evjvqa | [{"from": "human", "value": "where are these two people traveling?\n<image>"}, {"from": "gpt", "value": "sea"}] |
00000001079.jpg | 1079_evjvqa | [{"from": "human", "value": "how many people are canoeing on the water surface?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001080.jpg | 1080_evjvqa | [{"from": "human", "value": "how many employees are standing at the cashier counter?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001080.jpg | 1080_evjvqa | [{"from": "human", "value": "what is the man holding the baby doing?\n<image>"}, {"from": "gpt", "value": "waiting for payment"}] |
00000001080.jpg | 1080_evjvqa | [{"from": "human", "value": "how many people are waiting for payment?\n<image>"}, {"from": "gpt", "value": "two"}] |
00000001081.jpg | 1081_evjvqa | [{"from": "human", "value": "how many christmas trees is the outside of the house decorated with?\n<image>"}, {"from": "gpt", "value": "it is decorated with three christmas trees"}] |
00000001081.jpg | 1081_evjvqa | [{"from": "human", "value": "what are decorated at the foot of the christmas trees?\n<image>"}, {"from": "gpt", "value": "gift boxes"}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.