image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000007207.jpg | 7207_evjvqa | [{"from": "human", "value": "to which side of the female customer is the flower arrangement placed?\n<image>"}, {"from": "gpt", "value": "the flower arrangement is placed behind her on her right side. "}] |
00000007208.jpg | 7208_evjvqa | [{"from": "human", "value": "what is the man dressed in blue doing?\n<image>"}, {"from": "gpt", "value": "he is assisting the children in the tug of war game. "}] |
00000007208.jpg | 7208_evjvqa | [{"from": "human", "value": "what game are the students playing?\n<image>"}, {"from": "gpt", "value": "they are playing tug of war."}] |
00000007208.jpg | 7208_evjvqa | [{"from": "human", "value": "to which side of the students is the woman dressed in blue standing?\n<image>"}, {"from": "gpt", "value": "she is standing to the students' right-hand side."}] |
00000007209.jpg | 7209_evjvqa | [{"from": "human", "value": "what color is the helmet that the man driving by himself is wearing?\n<image>"}, {"from": "gpt", "value": "it is black."}] |
00000007209.jpg | 7209_evjvqa | [{"from": "human", "value": "what shop are they standing in front of?\n<image>"}, {"from": "gpt", "value": "a lantern shop."}] |
00000007209.jpg | 7209_evjvqa | [{"from": "human", "value": "what color is the woman sitting behind the male motorbike driver dressed in?\n<image>"}, {"from": "gpt", "value": "she is dressed in a white top."}] |
00000007212.jpg | 7212_evjvqa | [{"from": "human", "value": "how many people are standing in front of the pagoda gate?\n<image>"}, {"from": "gpt", "value": "there is one person standing in front of the pagoda gate."}] |
00000007213.jpg | 7213_evjvqa | [{"from": "human", "value": "how many people are standing near the chayote stall?\n<image>"}, {"from": "gpt", "value": "there is one person standing near the chayote stall."}] |
00000007213.jpg | 7213_evjvqa | [{"from": "human", "value": "what color are the shoes that the customer standing next to the chayote stall is wearing?\n<image>"}, {"from": "gpt", "value": "the customer is wearing a pair of white shoes."}] |
00000007218.jpg | 7218_evjvqa | [{"from": "human", "value": "which side of the person in yellow is the person in blue sitting on?\n<image>"}, {"from": "gpt", "value": "the person in blue is sitting on the person in yellow's right side."}] |
00000007218.jpg | 7218_evjvqa | [{"from": "human", "value": "what kind of goods does the punt next to the one with flowers ferry?\n<image>"}, {"from": "gpt", "value": "the punt next to the one with flowers ferries food."}] |
00000007218.jpg | 7218_evjvqa | [{"from": "human", "value": "how many colors does the bouquet of flowers in front of the person in yellow have?\n<image>"}, {"from": "gpt", "value": "the bouquet of flowers has three colors."}] |
00000007219.jpg | 7219_evjvqa | [{"from": "human", "value": "what color is the name of the restaurant?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007220.jpg | 7220_evjvqa | [{"from": "human", "value": "what is the man pulling?\n<image>"}, {"from": "gpt", "value": "he is pulling a shopping cart."}] |
00000007220.jpg | 7220_evjvqa | [{"from": "human", "value": "which section is he standing at?\n<image>"}, {"from": "gpt", "value": "he is standing at the produce section."}] |
00000007220.jpg | 7220_evjvqa | [{"from": "human", "value": "which hand does the man use to pull the shopping cart?\n<image>"}, {"from": "gpt", "value": "he uses his right hand to pull the shopping cart."}] |
00000007221.jpg | 7221_evjvqa | [{"from": "human", "value": "how many types of fruit are printed on the man's hat?\n<image>"}, {"from": "gpt", "value": "there are three types of fruit printed on the man's hat."}] |
00000007221.jpg | 7221_evjvqa | [{"from": "human", "value": "what is the man wearing on his wrist?\n<image>"}, {"from": "gpt", "value": "he is wearing a watch."}] |
00000007221.jpg | 7221_evjvqa | [{"from": "human", "value": "which hand does the man use to touch the hat?\n<image>"}, {"from": "gpt", "value": "he uses his left hand."}] |
00000007224.jpg | 7224_evjvqa | [{"from": "human", "value": "what type of goods does the store sell?\n<image>"}, {"from": "gpt", "value": "the store sells smart phones. "}] |
00000007225.jpg | 7225_evjvqa | [{"from": "human", "value": "what color is the flag hung from the clock tower?\n<image>"}, {"from": "gpt", "value": "the flag is red."}] |
00000007225.jpg | 7225_evjvqa | [{"from": "human", "value": "how many clock dials are built on the front of the market?\n<image>"}, {"from": "gpt", "value": "there is one clock dial built on the front of the market."}] |
00000007225.jpg | 7225_evjvqa | [{"from": "human", "value": "how many motorbikes are parked in front of the market entrance?\n<image>"}, {"from": "gpt", "value": "there is one motorbike parked in front of the market entrance."}] |
00000007226.jpg | 7226_evjvqa | [{"from": "human", "value": "how is the vietnamese flag placed in comparison with the chinese flag?\n<image>"}, {"from": "gpt", "value": "it is placed parallel to the chinese flag."}] |
00000007226.jpg | 7226_evjvqa | [{"from": "human", "value": "what are the two men doing?\n<image>"}, {"from": "gpt", "value": "they are shaking hands."}] |
00000007229.jpg | 7229_evjvqa | [{"from": "human", "value": "how many balloons are there?\n<image>"}, {"from": "gpt", "value": "there are two balloons."}] |
00000007229.jpg | 7229_evjvqa | [{"from": "human", "value": "what color are the words on the balloons?\n<image>"}, {"from": "gpt", "value": "they are red."}] |
00000007229.jpg | 7229_evjvqa | [{"from": "human", "value": "what color is the shirt that the person standing in front of the mirror on the right is wearing?\n<image>"}, {"from": "gpt", "value": "it is red. "}] |
00000007231.jpg | 7231_evjvqa | [{"from": "human", "value": "what kind of goods are displayed in this section?\n<image>"}, {"from": "gpt", "value": "this is the air conditioner section."}] |
00000007233.jpg | 7233_evjvqa | [{"from": "human", "value": "what is the little girl doing?\n<image>"}, {"from": "gpt", "value": "the little girl is pointing at a can of milk."}] |
00000007233.jpg | 7233_evjvqa | [{"from": "human", "value": "what color is the woman next to the little girl dressed in?\n<image>"}, {"from": "gpt", "value": "she is dressed in blue."}] |
00000007233.jpg | 7233_evjvqa | [{"from": "human", "value": "which hand is the little girl using to point at cans of milk?\n<image>"}, {"from": "gpt", "value": "she is using her right hand. "}] |
00000007235.jpg | 7235_evjvqa | [{"from": "human", "value": "what color is the female employee' shirt?\n<image>"}, {"from": "gpt", "value": "the female employee is wearing red."}] |
00000007235.jpg | 7235_evjvqa | [{"from": "human", "value": "to which direction of the employee is the woman dressed in blue standing?\n<image>"}, {"from": "gpt", "value": "she is standing in front of the employee."}] |
00000007235.jpg | 7235_evjvqa | [{"from": "human", "value": "which section is behind the man with the black fanny pack?\n<image>"}, {"from": "gpt", "value": "the mango section."}] |
00000007242.jpg | 7242_evjvqa | [{"from": "human", "value": "what color is the shirt that the man sitting on the motorbike is wearing?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007243.jpg | 7243_evjvqa | [{"from": "human", "value": "on which direction of the girl in the skirt is the girl with the black backpack walking?\n<image>"}, {"from": "gpt", "value": "she is walking on the right side of the girl in the skirt."}] |
00000007243.jpg | 7243_evjvqa | [{"from": "human", "value": "what color are the letters on the banner in the middle of the street?\n<image>"}, {"from": "gpt", "value": "they are yellow. "}] |
00000007243.jpg | 7243_evjvqa | [{"from": "human", "value": "what is the boy in the blue jacket doing?\n<image>"}, {"from": "gpt", "value": "he is driving the motorbike."}] |
00000007245.jpg | 7245_evjvqa | [{"from": "human", "value": "how many people are swimming in the pool?\n<image>"}, {"from": "gpt", "value": "there is one person swimming in the pool."}] |
00000007245.jpg | 7245_evjvqa | [{"from": "human", "value": "where is the girl in the black bikini sitting?\n<image>"}, {"from": "gpt", "value": "she is sitting on the poolside."}] |
00000007245.jpg | 7245_evjvqa | [{"from": "human", "value": "towards which direction is she swimming?\n<image>"}, {"from": "gpt", "value": "she is swimming towards where people are sitting. "}] |
00000007246.jpg | 7246_evjvqa | [{"from": "human", "value": "what color are the walls of the terrace of houses?\n<image>"}, {"from": "gpt", "value": "they are yellow."}] |
00000007246.jpg | 7246_evjvqa | [{"from": "human", "value": "what color is the flag hung from the balcony?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007246.jpg | 7246_evjvqa | [{"from": "human", "value": "what color is the door of the outermost house?\n<image>"}, {"from": "gpt", "value": "the door of the outermost house is white. "}] |
00000007250.jpg | 7250_evjvqa | [{"from": "human", "value": "what animal symbol is drawn between the two lines of chinese letters?\n<image>"}, {"from": "gpt", "value": "a fish symbol."}] |
00000007253.jpg | 7253_evjvqa | [{"from": "human", "value": "which hand is she raising?\n<image>"}, {"from": "gpt", "value": "she is raising her left hand."}] |
00000007253.jpg | 7253_evjvqa | [{"from": "human", "value": "to which side of the girl is the man standing?\n<image>"}, {"from": "gpt", "value": "the man is standing to the girl's right side. "}] |
00000007253.jpg | 7253_evjvqa | [{"from": "human", "value": "on which shoulder is he wearing his bag?\n<image>"}, {"from": "gpt", "value": "he is wearing the bag on his left shoulder."}] |
00000007256.jpg | 7256_evjvqa | [{"from": "human", "value": "how is the weather?\n<image>"}, {"from": "gpt", "value": "it is scorching hot. "}] |
00000007256.jpg | 7256_evjvqa | [{"from": "human", "value": "what color are the lower tree trunks painted?\n<image>"}, {"from": "gpt", "value": "the lower tree trunks are painted white."}] |
00000007256.jpg | 7256_evjvqa | [{"from": "human", "value": "where is the wastebasket placed?\n<image>"}, {"from": "gpt", "value": "the wastebasket is placed under the tree."}] |
00000007259.jpg | 7259_evjvqa | [{"from": "human", "value": "what is the man with the white hat doing?\n<image>"}, {"from": "gpt", "value": "he is monitoring the workers. "}] |
00000007259.jpg | 7259_evjvqa | [{"from": "human", "value": "how many wall-mounted fans are equipped in the workplace?\n<image>"}, {"from": "gpt", "value": "there is one wall-mounted fan equipped in the workplace."}] |
00000007259.jpg | 7259_evjvqa | [{"from": "human", "value": "which hand is she using to hold the knife?\n<image>"}, {"from": "gpt", "value": "she is using her right hand to hold the knife."}] |
00000007265.jpg | 7265_evjvqa | [{"from": "human", "value": "what color is her dress?\n<image>"}, {"from": "gpt", "value": "she is wearing a red dress."}] |
00000007265.jpg | 7265_evjvqa | [{"from": "human", "value": "which hand is she raising?\n<image>"}, {"from": "gpt", "value": "she is raising her left hand."}] |
00000007265.jpg | 7265_evjvqa | [{"from": "human", "value": "to which side of the girl is the handbag placed?\n<image>"}, {"from": "gpt", "value": "the handbag is placed to her right side."}] |
00000007267.jpg | 7267_evjvqa | [{"from": "human", "value": "why is she putting her arms up?\n<image>"}, {"from": "gpt", "value": "to cover her face."}] |
00000007267.jpg | 7267_evjvqa | [{"from": "human", "value": "what color is the hat on the snowman's head?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007267.jpg | 7267_evjvqa | [{"from": "human", "value": "on which side of the snowman is the christmas tree placed?\n<image>"}, {"from": "gpt", "value": "the christmas tree is placed on the right side of the snowman."}] |
00000007268.jpg | 7268_evjvqa | [{"from": "human", "value": "to which direction of the girl in yellow is the boy standing?\n<image>"}, {"from": "gpt", "value": "the boy is standing behind the girl in yellow."}] |
00000007268.jpg | 7268_evjvqa | [{"from": "human", "value": "where is the bicycle placed?\n<image>"}, {"from": "gpt", "value": "it is placed in front of the girl in yellow."}] |
00000007268.jpg | 7268_evjvqa | [{"from": "human", "value": "what is the girl standing at the rear of the bicycle is holding?\n<image>"}, {"from": "gpt", "value": "she is holding a little ball. "}] |
00000007269.jpg | 7269_evjvqa | [{"from": "human", "value": "how many people is the motorbike carrying?\n<image>"}, {"from": "gpt", "value": "the motorbike is carrying two people."}] |
00000007272.jpg | 7272_evjvqa | [{"from": "human", "value": "how many clock dials are built on the tower?\n<image>"}, {"from": "gpt", "value": "there are two clock dials built on the tower."}] |
00000007272.jpg | 7272_evjvqa | [{"from": "human", "value": "what color is the name of the market?\n<image>"}, {"from": "gpt", "value": "it is yellow."}] |
00000007272.jpg | 7272_evjvqa | [{"from": "human", "value": "what color are the lights on the tower of the market?\n<image>"}, {"from": "gpt", "value": "they are blue. "}] |
00000007274.jpg | 7274_evjvqa | [{"from": "human", "value": "what color is her dress?\n<image>"}, {"from": "gpt", "value": "it is yellow. "}] |
00000007274.jpg | 7274_evjvqa | [{"from": "human", "value": "to which direction is she turning her face?\n<image>"}, {"from": "gpt", "value": "she is turning her face to the left. "}] |
00000007274.jpg | 7274_evjvqa | [{"from": "human", "value": "where is she putting her hands?\n<image>"}, {"from": "gpt", "value": "she is putting her hands on the balcony railing."}] |
00000007275.jpg | 7275_evjvqa | [{"from": "human", "value": "what color is the conical hat?\n<image>"}, {"from": "gpt", "value": "it is white. "}] |
00000007277.jpg | 7277_evjvqa | [{"from": "human", "value": "what color is the trolley that the woman in the floral shirt is pushing?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007277.jpg | 7277_evjvqa | [{"from": "human", "value": "to which side of the woman in the floral shirt is the woman with the mask is standing?\n<image>"}, {"from": "gpt", "value": "she is standing in front of the woman in the floral shirt."}] |
00000007283.jpg | 7283_evjvqa | [{"from": "human", "value": "what is the woman wearing the conical hat doing?\n<image>"}, {"from": "gpt", "value": "she is pitching food."}] |
00000007283.jpg | 7283_evjvqa | [{"from": "human", "value": "what color is the shirt that the woman with the conical hat is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a purple shirt."}] |
00000007283.jpg | 7283_evjvqa | [{"from": "human", "value": "which hand is she holding the basket of food with?\n<image>"}, {"from": "gpt", "value": "she is holding the basket of food with both hands."}] |
00000007284.jpg | 7284_evjvqa | [{"from": "human", "value": "what is the company logo?\n<image>"}, {"from": "gpt", "value": "the company logo is a doctor. "}] |
00000007284.jpg | 7284_evjvqa | [{"from": "human", "value": "what color is the background of the company' signboard?\n<image>"}, {"from": "gpt", "value": "the background of the company' signboard is green."}] |
00000007284.jpg | 7284_evjvqa | [{"from": "human", "value": "what color are the letters on the sign?\n<image>"}, {"from": "gpt", "value": "the letters on the sign are white. "}] |
00000007285.jpg | 7285_evjvqa | [{"from": "human", "value": "how many motorbikes that stop at the traffic lights?\n<image>"}, {"from": "gpt", "value": "there is one motorbike that stops at the traffic lights. "}] |
00000007285.jpg | 7285_evjvqa | [{"from": "human", "value": "what color is the motorbike that stops at the middle of the road?\n<image>"}, {"from": "gpt", "value": "it is red. "}] |
00000007286.jpg | 7286_evjvqa | [{"from": "human", "value": "how many people are standing in front of the convenience store?\n<image>"}, {"from": "gpt", "value": "there are three people standing in front of the convenience store. "}] |
00000007286.jpg | 7286_evjvqa | [{"from": "human", "value": "where is the boy with the red hat standing?\n<image>"}, {"from": "gpt", "value": "he is standing between the man and woman. "}] |
00000007287.jpg | 7287_evjvqa | [{"from": "human", "value": "what is the woman in the red t-shirt doing?\n<image>"}, {"from": "gpt", "value": "she is pouring sauce over the dish."}] |
00000007287.jpg | 7287_evjvqa | [{"from": "human", "value": "what is the woman standing next to the girl in the red t-shirt doing?\n<image>"}, {"from": "gpt", "value": "she is putting the food in the plastic bag."}] |
00000007287.jpg | 7287_evjvqa | [{"from": "human", "value": "which hand is she using to hold the bottle of sauce?\n<image>"}, {"from": "gpt", "value": "her right hand. "}] |
00000007288.jpg | 7288_evjvqa | [{"from": "human", "value": "what color are the hulls of the boats?\n<image>"}, {"from": "gpt", "value": "they are blue."}] |
00000007288.jpg | 7288_evjvqa | [{"from": "human", "value": "what color is the flag hung from each boat?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007288.jpg | 7288_evjvqa | [{"from": "human", "value": "what national is the flag on each boat?\n<image>"}, {"from": "gpt", "value": "the flag on each boat is vietnamese. "}] |
00000007289.jpg | 7289_evjvqa | [{"from": "human", "value": "how many people are standing in front of the fridges?\n<image>"}, {"from": "gpt", "value": "there are two peole standing in front of the fridges."}] |
00000007289.jpg | 7289_evjvqa | [{"from": "human", "value": "where is the trolley placed?\n<image>"}, {"from": "gpt", "value": "it is behind the man. "}] |
00000007289.jpg | 7289_evjvqa | [{"from": "human", "value": "to which side of the man is the girl standing?\n<image>"}, {"from": "gpt", "value": "she is standing to his right side. "}] |
00000007290.jpg | 7290_evjvqa | [{"from": "human", "value": "what is hung above the sink?\n<image>"}, {"from": "gpt", "value": "there is a mirror hung above the sink."}] |
00000007290.jpg | 7290_evjvqa | [{"from": "human", "value": "on which side of the mirror is the shower head installed?\n<image>"}, {"from": "gpt", "value": "the shower head is installed on the right side of the mirror. "}] |
00000007290.jpg | 7290_evjvqa | [{"from": "human", "value": "to which side of the stairs is the bathroom built?\n<image>"}, {"from": "gpt", "value": "the bathroom is built to the right side of the stairs. "}] |
00000007291.jpg | 7291_evjvqa | [{"from": "human", "value": "how many dip bowls are there?\n<image>"}, {"from": "gpt", "value": "there are two dip bawls."}] |
00000007291.jpg | 7291_evjvqa | [{"from": "human", "value": "what side dish is served next to the dip bowls?\n<image>"}, {"from": "gpt", "value": "a cucumber dish."}] |
00000007291.jpg | 7291_evjvqa | [{"from": "human", "value": "what does the cutlery set on the rice plate consist of?\n<image>"}, {"from": "gpt", "value": "the cutlery set on the rice plate consists of a spoon and a fork."}] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.